You've already forked DataMate
Compare commits
48 Commits
329382db47
...
lsf
| Author | SHA1 | Date | |
|---|---|---|---|
| 75f9b95093 | |||
| ca37bc5a3b | |||
| e62a8369d4 | |||
| 6de41f1a5b | |||
| 24e59b87f2 | |||
| 1b2ed5335e | |||
| a5d8997c22 | |||
| e9e4cf3b1c | |||
| 9800517378 | |||
| 3a9afe3480 | |||
| afcb8783aa | |||
| 9b6ff59a11 | |||
| 39338df808 | |||
| 0ed7dcbee7 | |||
| 7abdafc338 | |||
| cca463e7d1 | |||
| 20446bf57d | |||
| 444f8cd015 | |||
| f12e4abd83 | |||
| 42069f82b3 | |||
| 74daed1c25 | |||
| 75db6daeb5 | |||
| ebb4548ca5 | |||
| 37b478a052 | |||
| a260134d7c | |||
| 8b1ab8ff36 | |||
| 910251e898 | |||
| 0e0782a452 | |||
| 5a553ddde3 | |||
| 8f21798d57 | |||
| f707ce9dae | |||
| 9988ff00f5 | |||
| 2fbfefdb91 | |||
| dc490f03be | |||
| 49f99527cc | |||
| 06a7cd9abd | |||
| ea7ca5474e | |||
| 8ffa131fad | |||
| 807c2289e2 | |||
| 7d5a809772 | |||
| 2f8645a011 | |||
| 71f8f7d1c3 | |||
| 78624915b7 | |||
| 2f49fc4199 | |||
| 9efc07935f | |||
| 7264e111ae | |||
| 3dd4035005 | |||
| 36b410ba7b |
2
.gitignore
vendored
2
.gitignore
vendored
@@ -189,4 +189,4 @@ Thumbs.db
|
||||
*.sublime-workspace
|
||||
|
||||
# Milvus
|
||||
deployment/docker/milvus/volumes/
|
||||
deployment/docker/milvus/volumes/
|
||||
|
||||
54
Makefile
54
Makefile
@@ -76,6 +76,12 @@ help:
|
||||
@echo " make download SAVE=true PLATFORM=linux/arm64 Save ARM64 images"
|
||||
@echo " make load-images Load all downloaded images from dist/"
|
||||
@echo ""
|
||||
@echo "Neo4j Commands:"
|
||||
@echo " make neo4j-up Start Neo4j graph database"
|
||||
@echo " make neo4j-down Stop Neo4j graph database"
|
||||
@echo " make neo4j-logs View Neo4j logs"
|
||||
@echo " make neo4j-shell Open Neo4j Cypher shell"
|
||||
@echo ""
|
||||
@echo "Utility Commands:"
|
||||
@echo " make create-namespace Create Kubernetes namespace"
|
||||
@echo " make help Show this help message"
|
||||
@@ -205,8 +211,9 @@ endif
|
||||
.PHONY: install
|
||||
install:
|
||||
ifeq ($(origin INSTALLER), undefined)
|
||||
$(call prompt-installer,datamate-$$INSTALLER-install milvus-$$INSTALLER-install)
|
||||
$(call prompt-installer,neo4j-$$INSTALLER-install datamate-$$INSTALLER-install milvus-$$INSTALLER-install)
|
||||
else
|
||||
$(MAKE) neo4j-$(INSTALLER)-install
|
||||
$(MAKE) datamate-$(INSTALLER)-install
|
||||
$(MAKE) milvus-$(INSTALLER)-install
|
||||
endif
|
||||
@@ -222,7 +229,7 @@ endif
|
||||
.PHONY: uninstall
|
||||
uninstall:
|
||||
ifeq ($(origin INSTALLER), undefined)
|
||||
$(call prompt-uninstaller,label-studio-$$INSTALLER-uninstall milvus-$$INSTALLER-uninstall deer-flow-$$INSTALLER-uninstall datamate-$$INSTALLER-uninstall)
|
||||
$(call prompt-uninstaller,label-studio-$$INSTALLER-uninstall milvus-$$INSTALLER-uninstall neo4j-$$INSTALLER-uninstall deer-flow-$$INSTALLER-uninstall datamate-$$INSTALLER-uninstall)
|
||||
else
|
||||
@if [ "$(INSTALLER)" = "docker" ]; then \
|
||||
echo "Delete volumes? (This will remove all data)"; \
|
||||
@@ -234,6 +241,7 @@ else
|
||||
fi
|
||||
@$(MAKE) label-studio-$(INSTALLER)-uninstall DELETE_VOLUMES_CHOICE=$$DELETE_VOLUMES_CHOICE; \
|
||||
$(MAKE) milvus-$(INSTALLER)-uninstall DELETE_VOLUMES_CHOICE=$$DELETE_VOLUMES_CHOICE; \
|
||||
$(MAKE) neo4j-$(INSTALLER)-uninstall DELETE_VOLUMES_CHOICE=$$DELETE_VOLUMES_CHOICE; \
|
||||
$(MAKE) deer-flow-$(INSTALLER)-uninstall DELETE_VOLUMES_CHOICE=$$DELETE_VOLUMES_CHOICE; \
|
||||
$(MAKE) datamate-$(INSTALLER)-uninstall DELETE_VOLUMES_CHOICE=$$DELETE_VOLUMES_CHOICE
|
||||
endif
|
||||
@@ -241,7 +249,7 @@ endif
|
||||
# ========== Docker Install/Uninstall Targets ==========
|
||||
|
||||
# Valid service targets for docker install/uninstall
|
||||
VALID_SERVICE_TARGETS := datamate backend frontend runtime mineru "deer-flow" milvus "label-studio" "data-juicer" dj
|
||||
VALID_SERVICE_TARGETS := datamate backend frontend runtime mineru "deer-flow" milvus neo4j "label-studio" "data-juicer" dj
|
||||
|
||||
# Generic docker service install target
|
||||
.PHONY: %-docker-install
|
||||
@@ -266,6 +274,8 @@ VALID_SERVICE_TARGETS := datamate backend frontend runtime mineru "deer-flow" mi
|
||||
REGISTRY=$(REGISTRY) docker compose -f deployment/docker/deer-flow/docker-compose.yml up -d; \
|
||||
elif [ "$*" = "milvus" ]; then \
|
||||
docker compose -f deployment/docker/milvus/docker-compose.yml up -d; \
|
||||
elif [ "$*" = "neo4j" ]; then \
|
||||
docker compose -f deployment/docker/neo4j/docker-compose.yml up -d; \
|
||||
elif [ "$*" = "data-juicer" ] || [ "$*" = "dj" ]; then \
|
||||
REGISTRY=$(REGISTRY) && docker compose -f deployment/docker/datamate/docker-compose.yml up -d datamate-data-juicer; \
|
||||
else \
|
||||
@@ -305,6 +315,12 @@ VALID_SERVICE_TARGETS := datamate backend frontend runtime mineru "deer-flow" mi
|
||||
else \
|
||||
docker compose -f deployment/docker/milvus/docker-compose.yml down; \
|
||||
fi; \
|
||||
elif [ "$*" = "neo4j" ]; then \
|
||||
if [ "$(DELETE_VOLUMES_CHOICE)" = "1" ]; then \
|
||||
docker compose -f deployment/docker/neo4j/docker-compose.yml down -v; \
|
||||
else \
|
||||
docker compose -f deployment/docker/neo4j/docker-compose.yml down; \
|
||||
fi; \
|
||||
elif [ "$*" = "data-juicer" ] || [ "$*" = "dj" ]; then \
|
||||
$(call docker-compose-service,datamate-data-juicer,down,deployment/docker/datamate); \
|
||||
else \
|
||||
@@ -314,7 +330,7 @@ VALID_SERVICE_TARGETS := datamate backend frontend runtime mineru "deer-flow" mi
|
||||
# ========== Kubernetes Install/Uninstall Targets ==========
|
||||
|
||||
# Valid k8s targets
|
||||
VALID_K8S_TARGETS := mineru datamate deer-flow milvus label-studio data-juicer dj
|
||||
VALID_K8S_TARGETS := mineru datamate deer-flow milvus neo4j label-studio data-juicer dj
|
||||
|
||||
# Generic k8s install target
|
||||
.PHONY: %-k8s-install
|
||||
@@ -327,7 +343,9 @@ VALID_K8S_TARGETS := mineru datamate deer-flow milvus label-studio data-juicer d
|
||||
done; \
|
||||
exit 1; \
|
||||
fi
|
||||
@if [ "$*" = "label-studio" ]; then \
|
||||
@if [ "$*" = "neo4j" ]; then \
|
||||
echo "Skipping Neo4j: no Helm chart available. Use 'make neo4j-docker-install' or provide an external Neo4j instance."; \
|
||||
elif [ "$*" = "label-studio" ]; then \
|
||||
helm upgrade label-studio deployment/helm/label-studio/ -n $(NAMESPACE) --install; \
|
||||
elif [ "$*" = "mineru" ]; then \
|
||||
kubectl apply -f deployment/kubernetes/mineru/deploy.yaml -n $(NAMESPACE); \
|
||||
@@ -356,7 +374,9 @@ VALID_K8S_TARGETS := mineru datamate deer-flow milvus label-studio data-juicer d
|
||||
done; \
|
||||
exit 1; \
|
||||
fi
|
||||
@if [ "$*" = "mineru" ]; then \
|
||||
@if [ "$*" = "neo4j" ]; then \
|
||||
echo "Skipping Neo4j: no Helm chart available. Use 'make neo4j-docker-uninstall' or manage your external Neo4j instance."; \
|
||||
elif [ "$*" = "mineru" ]; then \
|
||||
kubectl delete -f deployment/kubernetes/mineru/deploy.yaml -n $(NAMESPACE); \
|
||||
elif [ "$*" = "datamate" ]; then \
|
||||
helm uninstall datamate -n $(NAMESPACE) --ignore-not-found; \
|
||||
@@ -498,3 +518,25 @@ load-images:
|
||||
else \
|
||||
echo "Successfully loaded $$count image(s)"; \
|
||||
fi
|
||||
|
||||
# ========== Neo4j Targets ==========
|
||||
|
||||
.PHONY: neo4j-up
|
||||
neo4j-up:
|
||||
@echo "Starting Neo4j graph database..."
|
||||
docker compose -f deployment/docker/neo4j/docker-compose.yml up -d
|
||||
@echo "Neo4j Browser: http://localhost:7474"
|
||||
@echo "Bolt URI: bolt://localhost:7687"
|
||||
|
||||
.PHONY: neo4j-down
|
||||
neo4j-down:
|
||||
@echo "Stopping Neo4j graph database..."
|
||||
docker compose -f deployment/docker/neo4j/docker-compose.yml down
|
||||
|
||||
.PHONY: neo4j-logs
|
||||
neo4j-logs:
|
||||
docker compose -f deployment/docker/neo4j/docker-compose.yml logs -f
|
||||
|
||||
.PHONY: neo4j-shell
|
||||
neo4j-shell:
|
||||
docker exec -it datamate-neo4j cypher-shell -u neo4j -p "$${NEO4J_PASSWORD:-datamate123}"
|
||||
|
||||
@@ -110,9 +110,9 @@ Thank you for your interest in this project! We warmly welcome contributions fro
|
||||
bug reports, suggesting new features, or directly participating in code development, all forms of help make the project
|
||||
better.
|
||||
|
||||
• 📮 [GitHub Issues](../../issues): Submit bugs or feature suggestions.
|
||||
• 📮 [GitHub Issues](https://github.com/ModelEngine-Group/DataMate/issues): Submit bugs or feature suggestions.
|
||||
|
||||
• 🔧 [GitHub Pull Requests](../../pulls): Contribute code improvements.
|
||||
• 🔧 [GitHub Pull Requests](https://github.com/ModelEngine-Group/DataMate/pulls): Contribute code improvements.
|
||||
|
||||
## 📄 License
|
||||
|
||||
|
||||
@@ -37,6 +37,14 @@ public class ApiGatewayApplication {
|
||||
.route("data-collection", r -> r.path("/api/data-collection/**")
|
||||
.uri("http://datamate-backend-python:18000"))
|
||||
|
||||
// 知识图谱抽取服务路由
|
||||
.route("kg-extraction", r -> r.path("/api/kg/**")
|
||||
.uri("http://datamate-backend-python:18000"))
|
||||
|
||||
// GraphRAG 融合查询服务路由
|
||||
.route("graphrag", r -> r.path("/api/graphrag/**")
|
||||
.uri("http://datamate-backend-python:18000"))
|
||||
|
||||
.route("deer-flow-frontend", r -> r.path("/chat/**")
|
||||
.uri("http://deer-flow-frontend:3000"))
|
||||
|
||||
|
||||
@@ -83,11 +83,13 @@ public class UserContextFilter implements GlobalFilter, Ordered {
|
||||
String userId = String.valueOf(claims.get("userId"));
|
||||
String username = claims.getSubject();
|
||||
List<String> roles = gatewayJwtUtils.getStringListClaim(claims, "roles");
|
||||
List<String> permissions = gatewayJwtUtils.getStringListClaim(claims, "permissions");
|
||||
|
||||
ServerHttpRequest mutatedRequest = request.mutate()
|
||||
.header("X-User-Id", userId)
|
||||
.header("X-User-Name", username)
|
||||
.header("X-User-Roles", String.join(",", roles))
|
||||
.header("X-User-Permissions", String.join(",", permissions))
|
||||
.build();
|
||||
return chain.filter(exchange.mutate().request(mutatedRequest).build());
|
||||
}
|
||||
|
||||
@@ -48,6 +48,9 @@ public class PermissionRuleMatcher {
|
||||
addModuleRules(permissionRules, "/api/operator-market/**", "module:operator-market:read", "module:operator-market:write");
|
||||
addModuleRules(permissionRules, "/api/orchestration/**", "module:orchestration:read", "module:orchestration:write");
|
||||
addModuleRules(permissionRules, "/api/content-generation/**", "module:content-generation:use", "module:content-generation:use");
|
||||
addModuleRules(permissionRules, "/api/task-meta/**", "module:task-coordination:read", "module:task-coordination:write");
|
||||
addModuleRules(permissionRules, "/api/knowledge-graph/**", "module:knowledge-graph:read", "module:knowledge-graph:write");
|
||||
addModuleRules(permissionRules, "/api/graphrag/**", "module:knowledge-base:read", "module:knowledge-base:write");
|
||||
|
||||
permissionRules.add(new PermissionRule(READ_METHODS, "/api/auth/users/**", "system:user:manage"));
|
||||
permissionRules.add(new PermissionRule(WRITE_METHODS, "/api/auth/users/**", "system:user:manage"));
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
package com.datamate.datamanagement.application;
|
||||
|
||||
import com.datamate.common.auth.application.ResourceAccessService;
|
||||
import com.datamate.common.infrastructure.exception.BusinessAssert;
|
||||
import com.datamate.common.infrastructure.exception.CommonErrorCode;
|
||||
import com.datamate.common.infrastructure.exception.SystemErrorCode;
|
||||
import com.datamate.datamanagement.common.enums.KnowledgeStatusType;
|
||||
import com.datamate.datamanagement.domain.model.knowledge.KnowledgeItemDirectory;
|
||||
import com.datamate.datamanagement.domain.model.knowledge.KnowledgeSet;
|
||||
@@ -32,17 +34,19 @@ public class KnowledgeDirectoryApplicationService {
|
||||
private final KnowledgeItemDirectoryRepository knowledgeItemDirectoryRepository;
|
||||
private final KnowledgeItemRepository knowledgeItemRepository;
|
||||
private final KnowledgeSetRepository knowledgeSetRepository;
|
||||
private final ResourceAccessService resourceAccessService;
|
||||
|
||||
@Transactional(readOnly = true)
|
||||
public List<KnowledgeItemDirectory> getKnowledgeDirectories(String setId, KnowledgeDirectoryQuery query) {
|
||||
BusinessAssert.notNull(query, CommonErrorCode.PARAM_ERROR);
|
||||
requireAccessibleKnowledgeSet(setId);
|
||||
query.setSetId(setId);
|
||||
return knowledgeItemDirectoryRepository.findByCriteria(query);
|
||||
}
|
||||
|
||||
public KnowledgeItemDirectory createKnowledgeDirectory(String setId, CreateKnowledgeDirectoryRequest request) {
|
||||
BusinessAssert.notNull(request, CommonErrorCode.PARAM_ERROR);
|
||||
KnowledgeSet knowledgeSet = requireKnowledgeSet(setId);
|
||||
KnowledgeSet knowledgeSet = requireAccessibleKnowledgeSet(setId);
|
||||
BusinessAssert.isTrue(!isReadOnlyStatus(knowledgeSet.getStatus()),
|
||||
DataManagementErrorCode.KNOWLEDGE_SET_STATUS_ERROR);
|
||||
|
||||
@@ -71,7 +75,7 @@ public class KnowledgeDirectoryApplicationService {
|
||||
}
|
||||
|
||||
public void deleteKnowledgeDirectory(String setId, String relativePath) {
|
||||
KnowledgeSet knowledgeSet = requireKnowledgeSet(setId);
|
||||
KnowledgeSet knowledgeSet = requireAccessibleKnowledgeSet(setId);
|
||||
BusinessAssert.isTrue(!isReadOnlyStatus(knowledgeSet.getStatus()),
|
||||
DataManagementErrorCode.KNOWLEDGE_SET_STATUS_ERROR);
|
||||
|
||||
@@ -88,6 +92,15 @@ public class KnowledgeDirectoryApplicationService {
|
||||
return knowledgeSet;
|
||||
}
|
||||
|
||||
private KnowledgeSet requireAccessibleKnowledgeSet(String setId) {
|
||||
KnowledgeSet knowledgeSet = requireKnowledgeSet(setId);
|
||||
if (ResourceAccessService.CONFIDENTIAL_SENSITIVITY.equalsIgnoreCase(knowledgeSet.getSensitivity())) {
|
||||
BusinessAssert.isTrue(resourceAccessService.canViewConfidential(),
|
||||
SystemErrorCode.INSUFFICIENT_PERMISSIONS);
|
||||
}
|
||||
return knowledgeSet;
|
||||
}
|
||||
|
||||
private boolean isReadOnlyStatus(KnowledgeStatusType status) {
|
||||
return status == KnowledgeStatusType.ARCHIVED || status == KnowledgeStatusType.DEPRECATED;
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ package com.datamate.datamanagement.application;
|
||||
|
||||
import com.baomidou.mybatisplus.core.metadata.IPage;
|
||||
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
|
||||
import com.datamate.common.auth.application.ResourceAccessService;
|
||||
import com.datamate.common.infrastructure.exception.BusinessAssert;
|
||||
import com.datamate.common.infrastructure.exception.BusinessException;
|
||||
import com.datamate.common.infrastructure.exception.CommonErrorCode;
|
||||
@@ -12,11 +13,11 @@ import com.datamate.datamanagement.common.enums.KnowledgeSourceType;
|
||||
import com.datamate.datamanagement.common.enums.KnowledgeStatusType;
|
||||
import com.datamate.datamanagement.domain.model.dataset.Dataset;
|
||||
import com.datamate.datamanagement.domain.model.dataset.DatasetFile;
|
||||
import com.datamate.datamanagement.domain.model.dataset.Tag;
|
||||
import com.datamate.datamanagement.domain.model.knowledge.KnowledgeItem;
|
||||
import com.datamate.datamanagement.domain.model.knowledge.KnowledgeSet;
|
||||
import com.datamate.datamanagement.infrastructure.config.DataManagementProperties;
|
||||
import com.datamate.datamanagement.infrastructure.exception.DataManagementErrorCode;
|
||||
import com.datamate.datamanagement.infrastructure.persistence.mapper.TagMapper;
|
||||
import com.datamate.datamanagement.infrastructure.persistence.repository.DatasetFileRepository;
|
||||
import com.datamate.datamanagement.infrastructure.persistence.repository.DatasetRepository;
|
||||
import com.datamate.datamanagement.infrastructure.persistence.repository.KnowledgeItemRepository;
|
||||
@@ -30,6 +31,7 @@ import com.datamate.datamanagement.interfaces.dto.KnowledgeItemResponse;
|
||||
import com.datamate.datamanagement.interfaces.dto.KnowledgeItemSearchQuery;
|
||||
import com.datamate.datamanagement.interfaces.dto.KnowledgeItemSearchResponse;
|
||||
import com.datamate.datamanagement.interfaces.dto.KnowledgeManagementStatisticsResponse;
|
||||
import com.datamate.datamanagement.interfaces.dto.KnowledgeSetPagingQuery;
|
||||
import com.datamate.datamanagement.interfaces.dto.ReplaceKnowledgeItemFileRequest;
|
||||
import com.datamate.datamanagement.interfaces.dto.UpdateKnowledgeItemRequest;
|
||||
import com.datamate.datamanagement.interfaces.dto.UploadKnowledgeItemsRequest;
|
||||
@@ -56,12 +58,15 @@ import java.nio.file.Paths;
|
||||
import java.time.LocalDateTime;
|
||||
import java.time.format.DateTimeFormatter;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.UUID;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
* 知识条目应用服务
|
||||
@@ -88,11 +93,11 @@ public class KnowledgeItemApplicationService {
|
||||
private final DatasetRepository datasetRepository;
|
||||
private final DatasetFileRepository datasetFileRepository;
|
||||
private final DataManagementProperties dataManagementProperties;
|
||||
private final TagMapper tagMapper;
|
||||
private final KnowledgeItemPreviewService knowledgeItemPreviewService;
|
||||
private final ResourceAccessService resourceAccessService;
|
||||
|
||||
public KnowledgeItem createKnowledgeItem(String setId, CreateKnowledgeItemRequest request) {
|
||||
KnowledgeSet knowledgeSet = requireKnowledgeSet(setId);
|
||||
KnowledgeSet knowledgeSet = requireAccessibleKnowledgeSet(setId);
|
||||
BusinessAssert.isTrue(!isReadOnlyStatus(knowledgeSet.getStatus()),
|
||||
DataManagementErrorCode.KNOWLEDGE_SET_STATUS_ERROR);
|
||||
|
||||
@@ -112,7 +117,7 @@ public class KnowledgeItemApplicationService {
|
||||
}
|
||||
|
||||
public List<KnowledgeItem> uploadKnowledgeItems(String setId, UploadKnowledgeItemsRequest request) {
|
||||
KnowledgeSet knowledgeSet = requireKnowledgeSet(setId);
|
||||
KnowledgeSet knowledgeSet = requireAccessibleKnowledgeSet(setId);
|
||||
BusinessAssert.isTrue(!isReadOnlyStatus(knowledgeSet.getStatus()),
|
||||
DataManagementErrorCode.KNOWLEDGE_SET_STATUS_ERROR);
|
||||
|
||||
@@ -176,7 +181,7 @@ public class KnowledgeItemApplicationService {
|
||||
}
|
||||
|
||||
public KnowledgeItem updateKnowledgeItem(String setId, String itemId, UpdateKnowledgeItemRequest request) {
|
||||
KnowledgeSet knowledgeSet = requireKnowledgeSet(setId);
|
||||
KnowledgeSet knowledgeSet = requireAccessibleKnowledgeSet(setId);
|
||||
KnowledgeItem knowledgeItem = knowledgeItemRepository.getById(itemId);
|
||||
BusinessAssert.notNull(knowledgeItem, DataManagementErrorCode.KNOWLEDGE_ITEM_NOT_FOUND);
|
||||
BusinessAssert.isTrue(Objects.equals(knowledgeItem.getSetId(), setId), CommonErrorCode.PARAM_ERROR);
|
||||
@@ -199,6 +204,7 @@ public class KnowledgeItemApplicationService {
|
||||
}
|
||||
|
||||
public void deleteKnowledgeItem(String setId, String itemId) {
|
||||
requireAccessibleKnowledgeSet(setId);
|
||||
KnowledgeItem knowledgeItem = knowledgeItemRepository.getById(itemId);
|
||||
BusinessAssert.notNull(knowledgeItem, DataManagementErrorCode.KNOWLEDGE_ITEM_NOT_FOUND);
|
||||
BusinessAssert.isTrue(Objects.equals(knowledgeItem.getSetId(), setId), CommonErrorCode.PARAM_ERROR);
|
||||
@@ -209,6 +215,7 @@ public class KnowledgeItemApplicationService {
|
||||
}
|
||||
|
||||
public void deleteKnowledgeItems(String setId, DeleteKnowledgeItemsRequest request) {
|
||||
requireAccessibleKnowledgeSet(setId);
|
||||
BusinessAssert.notNull(request, CommonErrorCode.PARAM_ERROR);
|
||||
List<String> ids = request.getIds();
|
||||
BusinessAssert.isTrue(CollectionUtils.isNotEmpty(ids), CommonErrorCode.PARAM_ERROR);
|
||||
@@ -231,6 +238,7 @@ public class KnowledgeItemApplicationService {
|
||||
|
||||
@Transactional(readOnly = true)
|
||||
public KnowledgeItem getKnowledgeItem(String setId, String itemId) {
|
||||
requireAccessibleKnowledgeSet(setId);
|
||||
KnowledgeItem knowledgeItem = knowledgeItemRepository.getById(itemId);
|
||||
BusinessAssert.notNull(knowledgeItem, DataManagementErrorCode.KNOWLEDGE_ITEM_NOT_FOUND);
|
||||
BusinessAssert.isTrue(Objects.equals(knowledgeItem.getSetId(), setId), CommonErrorCode.PARAM_ERROR);
|
||||
@@ -239,6 +247,7 @@ public class KnowledgeItemApplicationService {
|
||||
|
||||
@Transactional(readOnly = true)
|
||||
public PagedResponse<KnowledgeItemResponse> getKnowledgeItems(String setId, KnowledgeItemPagingQuery query) {
|
||||
requireAccessibleKnowledgeSet(setId);
|
||||
query.setSetId(setId);
|
||||
IPage<KnowledgeItem> page = new Page<>(query.getPage(), query.getSize());
|
||||
page = knowledgeItemRepository.findByCriteria(page, query);
|
||||
@@ -248,19 +257,58 @@ public class KnowledgeItemApplicationService {
|
||||
|
||||
@Transactional(readOnly = true)
|
||||
public KnowledgeManagementStatisticsResponse getKnowledgeManagementStatistics() {
|
||||
boolean excludeConfidential = !resourceAccessService.canViewConfidential();
|
||||
String ownerFilterUserId = resourceAccessService.resolveOwnerFilterUserId();
|
||||
KnowledgeSetPagingQuery baseQuery = new KnowledgeSetPagingQuery();
|
||||
KnowledgeManagementStatisticsResponse response = new KnowledgeManagementStatisticsResponse();
|
||||
response.setTotalKnowledgeSets(knowledgeSetRepository.count());
|
||||
|
||||
long totalFiles = knowledgeItemRepository.countBySourceTypes(List.of(
|
||||
long totalSets = knowledgeSetRepository.countByCriteria(baseQuery, ownerFilterUserId, excludeConfidential);
|
||||
response.setTotalKnowledgeSets(totalSets);
|
||||
|
||||
List<String> accessibleSetIds = knowledgeSetRepository.listSetIdsByCriteria(baseQuery, ownerFilterUserId, excludeConfidential);
|
||||
if (CollectionUtils.isEmpty(accessibleSetIds)) {
|
||||
response.setTotalFiles(0L);
|
||||
response.setTotalSize(0L);
|
||||
response.setTotalTags(0L);
|
||||
return response;
|
||||
}
|
||||
List<KnowledgeSet> accessibleSets = knowledgeSetRepository.listByIds(accessibleSetIds);
|
||||
if (CollectionUtils.isEmpty(accessibleSets)) {
|
||||
response.setTotalFiles(0L);
|
||||
response.setTotalSize(0L);
|
||||
response.setTotalTags(0L);
|
||||
return response;
|
||||
}
|
||||
|
||||
List<String> normalizedSetIds = accessibleSets.stream()
|
||||
.map(KnowledgeSet::getId)
|
||||
.filter(StringUtils::isNotBlank)
|
||||
.toList();
|
||||
if (CollectionUtils.isEmpty(normalizedSetIds)) {
|
||||
response.setTotalFiles(0L);
|
||||
response.setTotalSize(0L);
|
||||
response.setTotalTags(0L);
|
||||
return response;
|
||||
}
|
||||
|
||||
long totalFiles = knowledgeItemRepository.countBySourceTypesAndSetIds(List.of(
|
||||
KnowledgeSourceType.DATASET_FILE,
|
||||
KnowledgeSourceType.FILE_UPLOAD
|
||||
));
|
||||
), normalizedSetIds);
|
||||
response.setTotalFiles(totalFiles);
|
||||
|
||||
long datasetFileSize = safeLong(knowledgeItemRepository.sumDatasetFileSize());
|
||||
long uploadFileSize = calculateUploadFileTotalSize();
|
||||
long datasetFileSize = safeLong(knowledgeItemRepository.sumDatasetFileSizeBySetIds(normalizedSetIds));
|
||||
long uploadFileSize = calculateUploadFileTotalSize(normalizedSetIds);
|
||||
response.setTotalSize(datasetFileSize + uploadFileSize);
|
||||
response.setTotalTags(safeLong(tagMapper.countKnowledgeSetTags()));
|
||||
|
||||
long totalTags = accessibleSets.stream()
|
||||
.filter(Objects::nonNull)
|
||||
.flatMap(set -> CollectionUtils.isEmpty(set.getTags()) ? Collections.<Tag>emptyList().stream() : set.getTags().stream())
|
||||
.map(tag -> StringUtils.trimToNull(tag == null ? null : tag.getName()))
|
||||
.filter(Objects::nonNull)
|
||||
.collect(Collectors.toCollection(HashSet::new))
|
||||
.size();
|
||||
response.setTotalTags(totalTags);
|
||||
|
||||
return response;
|
||||
}
|
||||
@@ -271,8 +319,9 @@ public class KnowledgeItemApplicationService {
|
||||
String keyword = StringUtils.trimToEmpty(query.getKeyword());
|
||||
BusinessAssert.isTrue(StringUtils.isNotBlank(keyword), CommonErrorCode.PARAM_ERROR);
|
||||
|
||||
boolean excludeConfidential = !resourceAccessService.canViewConfidential();
|
||||
IPage<KnowledgeItemSearchResponse> page = new Page<>(query.getPage(), query.getSize());
|
||||
IPage<KnowledgeItemSearchResponse> result = knowledgeItemRepository.searchFileItems(page, keyword);
|
||||
IPage<KnowledgeItemSearchResponse> result = knowledgeItemRepository.searchFileItems(page, keyword, excludeConfidential);
|
||||
List<KnowledgeItemSearchResponse> responses = result.getRecords()
|
||||
.stream()
|
||||
.map(this::normalizeSearchResponse)
|
||||
@@ -281,7 +330,7 @@ public class KnowledgeItemApplicationService {
|
||||
}
|
||||
|
||||
public List<KnowledgeItem> importKnowledgeItems(String setId, ImportKnowledgeItemsRequest request) {
|
||||
KnowledgeSet knowledgeSet = requireKnowledgeSet(setId);
|
||||
KnowledgeSet knowledgeSet = requireAccessibleKnowledgeSet(setId);
|
||||
BusinessAssert.isTrue(!isReadOnlyStatus(knowledgeSet.getStatus()),
|
||||
DataManagementErrorCode.KNOWLEDGE_SET_STATUS_ERROR);
|
||||
Dataset dataset = datasetRepository.getById(request.getDatasetId());
|
||||
@@ -318,7 +367,7 @@ public class KnowledgeItemApplicationService {
|
||||
@Transactional(readOnly = true)
|
||||
public void exportKnowledgeItems(String setId, HttpServletResponse response) {
|
||||
BusinessAssert.notNull(response, CommonErrorCode.PARAM_ERROR);
|
||||
KnowledgeSet knowledgeSet = requireKnowledgeSet(setId);
|
||||
KnowledgeSet knowledgeSet = requireAccessibleKnowledgeSet(setId);
|
||||
List<KnowledgeItem> items = knowledgeItemRepository.findAllBySetId(setId);
|
||||
|
||||
response.setContentType(EXPORT_CONTENT_TYPE);
|
||||
@@ -347,6 +396,7 @@ public class KnowledgeItemApplicationService {
|
||||
@Transactional(readOnly = true)
|
||||
public void downloadKnowledgeItemFile(String setId, String itemId, HttpServletResponse response) {
|
||||
BusinessAssert.notNull(response, CommonErrorCode.PARAM_ERROR);
|
||||
requireAccessibleKnowledgeSet(setId);
|
||||
KnowledgeItem knowledgeItem = knowledgeItemRepository.getById(itemId);
|
||||
BusinessAssert.notNull(knowledgeItem, DataManagementErrorCode.KNOWLEDGE_ITEM_NOT_FOUND);
|
||||
BusinessAssert.isTrue(Objects.equals(knowledgeItem.getSetId(), setId), CommonErrorCode.PARAM_ERROR);
|
||||
@@ -380,6 +430,7 @@ public class KnowledgeItemApplicationService {
|
||||
@Transactional(readOnly = true)
|
||||
public void previewKnowledgeItemFile(String setId, String itemId, HttpServletResponse response) {
|
||||
BusinessAssert.notNull(response, CommonErrorCode.PARAM_ERROR);
|
||||
requireAccessibleKnowledgeSet(setId);
|
||||
KnowledgeItem knowledgeItem = knowledgeItemRepository.getById(itemId);
|
||||
BusinessAssert.notNull(knowledgeItem, DataManagementErrorCode.KNOWLEDGE_ITEM_NOT_FOUND);
|
||||
BusinessAssert.isTrue(Objects.equals(knowledgeItem.getSetId(), setId), CommonErrorCode.PARAM_ERROR);
|
||||
@@ -441,7 +492,7 @@ public class KnowledgeItemApplicationService {
|
||||
}
|
||||
|
||||
public KnowledgeItem replaceKnowledgeItemFile(String setId, String itemId, ReplaceKnowledgeItemFileRequest request) {
|
||||
KnowledgeSet knowledgeSet = requireKnowledgeSet(setId);
|
||||
KnowledgeSet knowledgeSet = requireAccessibleKnowledgeSet(setId);
|
||||
KnowledgeItem knowledgeItem = knowledgeItemRepository.getById(itemId);
|
||||
BusinessAssert.notNull(knowledgeItem, DataManagementErrorCode.KNOWLEDGE_ITEM_NOT_FOUND);
|
||||
BusinessAssert.isTrue(Objects.equals(knowledgeItem.getSetId(), setId), CommonErrorCode.PARAM_ERROR);
|
||||
@@ -655,8 +706,8 @@ public class KnowledgeItemApplicationService {
|
||||
return item;
|
||||
}
|
||||
|
||||
private long calculateUploadFileTotalSize() {
|
||||
List<KnowledgeItem> items = knowledgeItemRepository.findFileUploadItems();
|
||||
private long calculateUploadFileTotalSize(List<String> setIds) {
|
||||
List<KnowledgeItem> items = knowledgeItemRepository.findFileUploadItemsBySetIds(setIds);
|
||||
if (CollectionUtils.isEmpty(items)) {
|
||||
return 0L;
|
||||
}
|
||||
@@ -846,6 +897,18 @@ public class KnowledgeItemApplicationService {
|
||||
return knowledgeSet;
|
||||
}
|
||||
|
||||
/**
|
||||
* 校验当前用户是否可访问指定知识集(含保密权限检查)
|
||||
*/
|
||||
private KnowledgeSet requireAccessibleKnowledgeSet(String setId) {
|
||||
KnowledgeSet knowledgeSet = requireKnowledgeSet(setId);
|
||||
if (ResourceAccessService.CONFIDENTIAL_SENSITIVITY.equalsIgnoreCase(knowledgeSet.getSensitivity())) {
|
||||
BusinessAssert.isTrue(resourceAccessService.canViewConfidential(),
|
||||
SystemErrorCode.INSUFFICIENT_PERMISSIONS);
|
||||
}
|
||||
return knowledgeSet;
|
||||
}
|
||||
|
||||
private String buildExportFileName(String setId) {
|
||||
return EXPORT_FILE_PREFIX + setId + "_" + LocalDateTime.now().format(EXPORT_TIME_FORMATTER) + EXPORT_FILE_SUFFIX;
|
||||
}
|
||||
|
||||
@@ -1,13 +1,17 @@
|
||||
package com.datamate.datamanagement.application;
|
||||
|
||||
import com.datamate.common.auth.application.ResourceAccessService;
|
||||
import com.datamate.common.infrastructure.exception.BusinessAssert;
|
||||
import com.datamate.common.infrastructure.exception.CommonErrorCode;
|
||||
import com.datamate.common.infrastructure.exception.SystemErrorCode;
|
||||
import com.datamate.datamanagement.common.enums.KnowledgeContentType;
|
||||
import com.datamate.datamanagement.common.enums.KnowledgeItemPreviewStatus;
|
||||
import com.datamate.datamanagement.common.enums.KnowledgeSourceType;
|
||||
import com.datamate.datamanagement.domain.model.knowledge.KnowledgeItem;
|
||||
import com.datamate.datamanagement.domain.model.knowledge.KnowledgeSet;
|
||||
import com.datamate.datamanagement.infrastructure.config.DataManagementProperties;
|
||||
import com.datamate.datamanagement.infrastructure.persistence.repository.KnowledgeItemRepository;
|
||||
import com.datamate.datamanagement.infrastructure.persistence.repository.KnowledgeSetRepository;
|
||||
import com.datamate.datamanagement.interfaces.dto.KnowledgeItemPreviewStatusResponse;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
@@ -38,8 +42,10 @@ public class KnowledgeItemPreviewService {
|
||||
private static final DateTimeFormatter PREVIEW_TIME_FORMATTER = DateTimeFormatter.ISO_LOCAL_DATE_TIME;
|
||||
|
||||
private final KnowledgeItemRepository knowledgeItemRepository;
|
||||
private final KnowledgeSetRepository knowledgeSetRepository;
|
||||
private final DataManagementProperties dataManagementProperties;
|
||||
private final KnowledgeItemPreviewAsyncService knowledgeItemPreviewAsyncService;
|
||||
private final ResourceAccessService resourceAccessService;
|
||||
private final ObjectMapper objectMapper = new ObjectMapper();
|
||||
|
||||
public KnowledgeItemPreviewStatusResponse getPreviewStatus(String setId, String itemId) {
|
||||
@@ -138,6 +144,14 @@ public class KnowledgeItemPreviewService {
|
||||
private KnowledgeItem requireKnowledgeItem(String setId, String itemId) {
|
||||
BusinessAssert.isTrue(StringUtils.isNotBlank(setId), CommonErrorCode.PARAM_ERROR);
|
||||
BusinessAssert.isTrue(StringUtils.isNotBlank(itemId), CommonErrorCode.PARAM_ERROR);
|
||||
|
||||
KnowledgeSet knowledgeSet = knowledgeSetRepository.getById(setId);
|
||||
BusinessAssert.notNull(knowledgeSet, CommonErrorCode.PARAM_ERROR);
|
||||
if (ResourceAccessService.CONFIDENTIAL_SENSITIVITY.equalsIgnoreCase(knowledgeSet.getSensitivity())) {
|
||||
BusinessAssert.isTrue(resourceAccessService.canViewConfidential(),
|
||||
SystemErrorCode.INSUFFICIENT_PERMISSIONS);
|
||||
}
|
||||
|
||||
KnowledgeItem knowledgeItem = knowledgeItemRepository.getById(itemId);
|
||||
BusinessAssert.notNull(knowledgeItem, CommonErrorCode.PARAM_ERROR);
|
||||
BusinessAssert.isTrue(Objects.equals(knowledgeItem.getSetId(), setId), CommonErrorCode.PARAM_ERROR);
|
||||
|
||||
@@ -5,6 +5,7 @@ import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
|
||||
import com.datamate.common.auth.application.ResourceAccessService;
|
||||
import com.datamate.common.infrastructure.exception.BusinessAssert;
|
||||
import com.datamate.common.infrastructure.exception.CommonErrorCode;
|
||||
import com.datamate.common.infrastructure.exception.SystemErrorCode;
|
||||
import com.datamate.common.interfaces.PagedResponse;
|
||||
import com.datamate.datamanagement.common.enums.KnowledgeStatusType;
|
||||
import com.datamate.datamanagement.domain.model.dataset.Tag;
|
||||
@@ -46,9 +47,11 @@ public class KnowledgeSetApplicationService {
|
||||
public KnowledgeSet createKnowledgeSet(CreateKnowledgeSetRequest request) {
|
||||
BusinessAssert.isTrue(knowledgeSetRepository.findByName(request.getName()) == null,
|
||||
DataManagementErrorCode.KNOWLEDGE_SET_ALREADY_EXISTS);
|
||||
assertCanUseSensitivity(request.getSensitivity());
|
||||
|
||||
KnowledgeSet knowledgeSet = KnowledgeConverter.INSTANCE.convertToKnowledgeSet(request);
|
||||
knowledgeSet.setId(UUID.randomUUID().toString());
|
||||
knowledgeSet.setSensitivity(normalizeSensitivity(knowledgeSet.getSensitivity()));
|
||||
if (knowledgeSet.getStatus() == null) {
|
||||
knowledgeSet.setStatus(KnowledgeStatusType.DRAFT);
|
||||
}
|
||||
@@ -67,6 +70,7 @@ public class KnowledgeSetApplicationService {
|
||||
KnowledgeSet knowledgeSet = knowledgeSetRepository.getById(setId);
|
||||
BusinessAssert.notNull(knowledgeSet, DataManagementErrorCode.KNOWLEDGE_SET_NOT_FOUND);
|
||||
resourceAccessService.assertOwnerAccess(knowledgeSet.getCreatedBy());
|
||||
assertConfidentialAccess(knowledgeSet);
|
||||
BusinessAssert.isTrue(!isReadOnlyStatus(knowledgeSet.getStatus()),
|
||||
DataManagementErrorCode.KNOWLEDGE_SET_STATUS_ERROR);
|
||||
|
||||
@@ -106,7 +110,8 @@ public class KnowledgeSetApplicationService {
|
||||
knowledgeSet.setSourceType(request.getSourceType());
|
||||
}
|
||||
if (request.getSensitivity() != null) {
|
||||
knowledgeSet.setSensitivity(request.getSensitivity());
|
||||
assertCanUseSensitivity(request.getSensitivity());
|
||||
knowledgeSet.setSensitivity(normalizeSensitivity(request.getSensitivity()));
|
||||
}
|
||||
if (request.getMetadata() != null) {
|
||||
knowledgeSet.setMetadata(request.getMetadata());
|
||||
@@ -123,6 +128,7 @@ public class KnowledgeSetApplicationService {
|
||||
KnowledgeSet knowledgeSet = knowledgeSetRepository.getById(setId);
|
||||
BusinessAssert.notNull(knowledgeSet, DataManagementErrorCode.KNOWLEDGE_SET_NOT_FOUND);
|
||||
resourceAccessService.assertOwnerAccess(knowledgeSet.getCreatedBy());
|
||||
assertConfidentialAccess(knowledgeSet);
|
||||
knowledgeSetRepository.removeById(setId);
|
||||
}
|
||||
|
||||
@@ -131,6 +137,7 @@ public class KnowledgeSetApplicationService {
|
||||
KnowledgeSet knowledgeSet = knowledgeSetRepository.getById(setId);
|
||||
BusinessAssert.notNull(knowledgeSet, DataManagementErrorCode.KNOWLEDGE_SET_NOT_FOUND);
|
||||
resourceAccessService.assertOwnerAccess(knowledgeSet.getCreatedBy());
|
||||
assertConfidentialAccess(knowledgeSet);
|
||||
return knowledgeSet;
|
||||
}
|
||||
|
||||
@@ -138,11 +145,33 @@ public class KnowledgeSetApplicationService {
|
||||
public PagedResponse<KnowledgeSetResponse> getKnowledgeSets(KnowledgeSetPagingQuery query) {
|
||||
IPage<KnowledgeSet> page = new Page<>(query.getPage(), query.getSize());
|
||||
String ownerFilterUserId = resourceAccessService.resolveOwnerFilterUserId();
|
||||
page = knowledgeSetRepository.findByCriteria(page, query, ownerFilterUserId);
|
||||
boolean excludeConfidential = !resourceAccessService.canViewConfidential();
|
||||
page = knowledgeSetRepository.findByCriteria(page, query, ownerFilterUserId, excludeConfidential);
|
||||
List<KnowledgeSetResponse> responses = KnowledgeConverter.INSTANCE.convertSetResponses(page.getRecords());
|
||||
return PagedResponse.of(responses, page.getCurrent(), page.getTotal(), page.getPages());
|
||||
}
|
||||
|
||||
private void assertConfidentialAccess(KnowledgeSet knowledgeSet) {
|
||||
if (ResourceAccessService.CONFIDENTIAL_SENSITIVITY.equalsIgnoreCase(knowledgeSet.getSensitivity())) {
|
||||
BusinessAssert.isTrue(resourceAccessService.canViewConfidential(),
|
||||
SystemErrorCode.INSUFFICIENT_PERMISSIONS);
|
||||
}
|
||||
}
|
||||
|
||||
private void assertCanUseSensitivity(String sensitivity) {
|
||||
if (ResourceAccessService.CONFIDENTIAL_SENSITIVITY.equalsIgnoreCase(sensitivity)) {
|
||||
BusinessAssert.isTrue(resourceAccessService.canViewConfidential(),
|
||||
SystemErrorCode.INSUFFICIENT_PERMISSIONS);
|
||||
}
|
||||
}
|
||||
|
||||
private String normalizeSensitivity(String sensitivity) {
|
||||
if (!StringUtils.hasText(sensitivity)) {
|
||||
return null;
|
||||
}
|
||||
return sensitivity.trim().toUpperCase();
|
||||
}
|
||||
|
||||
private boolean isReadOnlyStatus(KnowledgeStatusType status) {
|
||||
return status == KnowledgeStatusType.ARCHIVED || status == KnowledgeStatusType.DEPRECATED;
|
||||
}
|
||||
|
||||
@@ -21,8 +21,8 @@ public class DataManagementConfig {
|
||||
/**
|
||||
* 缓存管理器
|
||||
*/
|
||||
@Bean
|
||||
public CacheManager cacheManager() {
|
||||
@Bean("dataManagementCacheManager")
|
||||
public CacheManager dataManagementCacheManager() {
|
||||
return new ConcurrentMapCacheManager("datasets", "datasetFiles", "tags");
|
||||
}
|
||||
|
||||
|
||||
@@ -8,9 +8,12 @@ import org.apache.ibatis.annotations.Mapper;
|
||||
import org.apache.ibatis.annotations.Param;
|
||||
import org.apache.ibatis.annotations.Select;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
@Mapper
|
||||
public interface KnowledgeItemMapper extends BaseMapper<KnowledgeItem> {
|
||||
@Select("""
|
||||
<script>
|
||||
SELECT
|
||||
ki.id AS id,
|
||||
ki.set_id AS setId,
|
||||
@@ -34,19 +37,32 @@ public interface KnowledgeItemMapper extends BaseMapper<KnowledgeItem> {
|
||||
FROM t_dm_knowledge_items ki
|
||||
LEFT JOIN t_dm_knowledge_sets ks ON ki.set_id = ks.id
|
||||
LEFT JOIN t_dm_dataset_files df ON ki.source_file_id = df.id AND ki.source_type = 'DATASET_FILE'
|
||||
WHERE (ki.source_type = 'FILE_UPLOAD' AND (ki.source_file_id LIKE CONCAT('%', #{keyword}, '%')
|
||||
WHERE ((ki.source_type = 'FILE_UPLOAD' AND (ki.source_file_id LIKE CONCAT('%', #{keyword}, '%')
|
||||
OR ki.relative_path LIKE CONCAT('%', #{keyword}, '%')))
|
||||
OR (ki.source_type = 'DATASET_FILE' AND (df.file_name LIKE CONCAT('%', #{keyword}, '%')
|
||||
OR ki.relative_path LIKE CONCAT('%', #{keyword}, '%')))
|
||||
OR ki.relative_path LIKE CONCAT('%', #{keyword}, '%'))))
|
||||
<if test="excludeConfidential">
|
||||
AND (ks.sensitivity IS NULL OR UPPER(TRIM(ks.sensitivity)) != 'CONFIDENTIAL')
|
||||
</if>
|
||||
ORDER BY ki.created_at DESC
|
||||
</script>
|
||||
""")
|
||||
IPage<KnowledgeItemSearchResponse> searchFileItems(IPage<?> page, @Param("keyword") String keyword);
|
||||
IPage<KnowledgeItemSearchResponse> searchFileItems(IPage<?> page, @Param("keyword") String keyword,
|
||||
@Param("excludeConfidential") boolean excludeConfidential);
|
||||
|
||||
@Select("""
|
||||
<script>
|
||||
SELECT COALESCE(SUM(df.file_size), 0)
|
||||
FROM t_dm_knowledge_items ki
|
||||
LEFT JOIN t_dm_dataset_files df ON ki.source_file_id = df.id
|
||||
WHERE ki.source_type = 'DATASET_FILE'
|
||||
<if test="setIds != null and setIds.size() > 0">
|
||||
AND ki.set_id IN
|
||||
<foreach collection="setIds" item="setId" open="(" separator="," close=")">
|
||||
#{setId}
|
||||
</foreach>
|
||||
</if>
|
||||
</script>
|
||||
""")
|
||||
Long sumDatasetFileSize();
|
||||
Long sumDatasetFileSizeBySetIds(@Param("setIds") List<String> setIds);
|
||||
}
|
||||
|
||||
@@ -2,11 +2,11 @@ package com.datamate.datamanagement.infrastructure.persistence.repository;
|
||||
|
||||
import com.baomidou.mybatisplus.core.metadata.IPage;
|
||||
import com.baomidou.mybatisplus.extension.repository.IRepository;
|
||||
import com.baomidou.mybatisplus.core.metadata.IPage;
|
||||
import com.datamate.datamanagement.common.enums.KnowledgeSourceType;
|
||||
import com.datamate.datamanagement.domain.model.knowledge.KnowledgeItem;
|
||||
import com.datamate.datamanagement.interfaces.dto.KnowledgeItemPagingQuery;
|
||||
import com.datamate.datamanagement.interfaces.dto.KnowledgeItemSearchResponse;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
@@ -19,13 +19,13 @@ public interface KnowledgeItemRepository extends IRepository<KnowledgeItem> {
|
||||
|
||||
List<KnowledgeItem> findAllBySetId(String setId);
|
||||
|
||||
long countBySourceTypes(List<KnowledgeSourceType> sourceTypes);
|
||||
long countBySourceTypesAndSetIds(List<KnowledgeSourceType> sourceTypes, List<String> setIds);
|
||||
|
||||
List<KnowledgeItem> findFileUploadItems();
|
||||
List<KnowledgeItem> findFileUploadItemsBySetIds(List<String> setIds);
|
||||
|
||||
IPage<KnowledgeItemSearchResponse> searchFileItems(IPage<?> page, String keyword);
|
||||
IPage<KnowledgeItemSearchResponse> searchFileItems(IPage<?> page, String keyword, boolean excludeConfidential);
|
||||
|
||||
Long sumDatasetFileSize();
|
||||
Long sumDatasetFileSizeBySetIds(List<String> setIds);
|
||||
|
||||
boolean existsBySetIdAndRelativePath(String setId, String relativePath);
|
||||
|
||||
|
||||
@@ -5,11 +5,18 @@ import com.baomidou.mybatisplus.extension.repository.IRepository;
|
||||
import com.datamate.datamanagement.domain.model.knowledge.KnowledgeSet;
|
||||
import com.datamate.datamanagement.interfaces.dto.KnowledgeSetPagingQuery;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* 知识集仓储接口
|
||||
*/
|
||||
public interface KnowledgeSetRepository extends IRepository<KnowledgeSet> {
|
||||
KnowledgeSet findByName(String name);
|
||||
|
||||
IPage<KnowledgeSet> findByCriteria(IPage<KnowledgeSet> page, KnowledgeSetPagingQuery query, String createdBy);
|
||||
IPage<KnowledgeSet> findByCriteria(IPage<KnowledgeSet> page, KnowledgeSetPagingQuery query, String createdBy,
|
||||
boolean excludeConfidential);
|
||||
|
||||
long countByCriteria(KnowledgeSetPagingQuery query, String createdBy, boolean excludeConfidential);
|
||||
|
||||
List<String> listSetIdsByCriteria(KnowledgeSetPagingQuery query, String createdBy, boolean excludeConfidential);
|
||||
}
|
||||
|
||||
@@ -61,26 +61,37 @@ public class KnowledgeItemRepositoryImpl extends CrudRepository<KnowledgeItemMap
|
||||
}
|
||||
|
||||
@Override
|
||||
public long countBySourceTypes(List<KnowledgeSourceType> sourceTypes) {
|
||||
public long countBySourceTypesAndSetIds(List<KnowledgeSourceType> sourceTypes, List<String> setIds) {
|
||||
if (sourceTypes == null || sourceTypes.isEmpty() || setIds == null || setIds.isEmpty()) {
|
||||
return 0L;
|
||||
}
|
||||
return knowledgeItemMapper.selectCount(new LambdaQueryWrapper<KnowledgeItem>()
|
||||
.in(KnowledgeItem::getSourceType, sourceTypes));
|
||||
.in(KnowledgeItem::getSourceType, sourceTypes)
|
||||
.in(KnowledgeItem::getSetId, setIds));
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<KnowledgeItem> findFileUploadItems() {
|
||||
public List<KnowledgeItem> findFileUploadItemsBySetIds(List<String> setIds) {
|
||||
if (setIds == null || setIds.isEmpty()) {
|
||||
return List.of();
|
||||
}
|
||||
return knowledgeItemMapper.selectList(new LambdaQueryWrapper<KnowledgeItem>()
|
||||
.eq(KnowledgeItem::getSourceType, KnowledgeSourceType.FILE_UPLOAD)
|
||||
.select(KnowledgeItem::getId, KnowledgeItem::getContent, KnowledgeItem::getSourceFileId));
|
||||
.in(KnowledgeItem::getSetId, setIds)
|
||||
.select(KnowledgeItem::getId, KnowledgeItem::getSetId, KnowledgeItem::getContent, KnowledgeItem::getSourceFileId));
|
||||
}
|
||||
|
||||
@Override
|
||||
public IPage<KnowledgeItemSearchResponse> searchFileItems(IPage<?> page, String keyword) {
|
||||
return knowledgeItemMapper.searchFileItems(page, keyword);
|
||||
public IPage<KnowledgeItemSearchResponse> searchFileItems(IPage<?> page, String keyword, boolean excludeConfidential) {
|
||||
return knowledgeItemMapper.searchFileItems(page, keyword, excludeConfidential);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Long sumDatasetFileSize() {
|
||||
return knowledgeItemMapper.sumDatasetFileSize();
|
||||
public Long sumDatasetFileSizeBySetIds(List<String> setIds) {
|
||||
if (setIds == null || setIds.isEmpty()) {
|
||||
return 0L;
|
||||
}
|
||||
return knowledgeItemMapper.sumDatasetFileSizeBySetIds(setIds);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
||||
@@ -3,6 +3,7 @@ package com.datamate.datamanagement.infrastructure.persistence.repository.impl;
|
||||
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
|
||||
import com.baomidou.mybatisplus.core.metadata.IPage;
|
||||
import com.baomidou.mybatisplus.extension.repository.CrudRepository;
|
||||
import com.datamate.common.auth.application.ResourceAccessService;
|
||||
import com.datamate.datamanagement.domain.model.knowledge.KnowledgeSet;
|
||||
import com.datamate.datamanagement.infrastructure.persistence.mapper.KnowledgeSetMapper;
|
||||
import com.datamate.datamanagement.infrastructure.persistence.repository.KnowledgeSetRepository;
|
||||
@@ -11,6 +12,9 @@ import lombok.RequiredArgsConstructor;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.springframework.stereotype.Repository;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* 知识集仓储实现类
|
||||
*/
|
||||
@@ -25,25 +29,62 @@ public class KnowledgeSetRepositoryImpl extends CrudRepository<KnowledgeSetMappe
|
||||
}
|
||||
|
||||
@Override
|
||||
public IPage<KnowledgeSet> findByCriteria(IPage<KnowledgeSet> page, KnowledgeSetPagingQuery query, String createdBy) {
|
||||
public IPage<KnowledgeSet> findByCriteria(IPage<KnowledgeSet> page, KnowledgeSetPagingQuery query, String createdBy,
|
||||
boolean excludeConfidential) {
|
||||
LambdaQueryWrapper<KnowledgeSet> wrapper = buildCriteriaWrapper(query, createdBy, excludeConfidential);
|
||||
wrapper.orderByDesc(KnowledgeSet::getCreatedAt);
|
||||
return knowledgeSetMapper.selectPage(page, wrapper);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long countByCriteria(KnowledgeSetPagingQuery query, String createdBy, boolean excludeConfidential) {
|
||||
return knowledgeSetMapper.selectCount(buildCriteriaWrapper(query, createdBy, excludeConfidential));
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<String> listSetIdsByCriteria(KnowledgeSetPagingQuery query, String createdBy, boolean excludeConfidential) {
|
||||
LambdaQueryWrapper<KnowledgeSet> wrapper = buildCriteriaWrapper(query, createdBy, excludeConfidential)
|
||||
.select(KnowledgeSet::getId)
|
||||
.orderByDesc(KnowledgeSet::getCreatedAt);
|
||||
List<KnowledgeSet> sets = knowledgeSetMapper.selectList(wrapper);
|
||||
if (sets == null || sets.isEmpty()) {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
return sets.stream().map(KnowledgeSet::getId).filter(StringUtils::isNotBlank).toList();
|
||||
}
|
||||
|
||||
private LambdaQueryWrapper<KnowledgeSet> buildCriteriaWrapper(KnowledgeSetPagingQuery query,
|
||||
String createdBy,
|
||||
boolean excludeConfidential) {
|
||||
KnowledgeSetPagingQuery safeQuery = query == null ? new KnowledgeSetPagingQuery() : query;
|
||||
|
||||
LambdaQueryWrapper<KnowledgeSet> wrapper = new LambdaQueryWrapper<KnowledgeSet>()
|
||||
.eq(query.getStatus() != null, KnowledgeSet::getStatus, query.getStatus())
|
||||
.eq(StringUtils.isNotBlank(query.getDomain()), KnowledgeSet::getDomain, query.getDomain())
|
||||
.eq(StringUtils.isNotBlank(query.getBusinessLine()), KnowledgeSet::getBusinessLine, query.getBusinessLine())
|
||||
.eq(StringUtils.isNotBlank(query.getOwner()), KnowledgeSet::getOwner, query.getOwner())
|
||||
.eq(StringUtils.isNotBlank(query.getSensitivity()), KnowledgeSet::getSensitivity, query.getSensitivity())
|
||||
.eq(query.getSourceType() != null, KnowledgeSet::getSourceType, query.getSourceType())
|
||||
.ge(query.getValidFrom() != null, KnowledgeSet::getValidFrom, query.getValidFrom())
|
||||
.le(query.getValidTo() != null, KnowledgeSet::getValidTo, query.getValidTo())
|
||||
.eq(safeQuery.getStatus() != null, KnowledgeSet::getStatus, safeQuery.getStatus())
|
||||
.eq(StringUtils.isNotBlank(safeQuery.getDomain()), KnowledgeSet::getDomain, safeQuery.getDomain())
|
||||
.eq(StringUtils.isNotBlank(safeQuery.getBusinessLine()), KnowledgeSet::getBusinessLine, safeQuery.getBusinessLine())
|
||||
.eq(StringUtils.isNotBlank(safeQuery.getOwner()), KnowledgeSet::getOwner, safeQuery.getOwner())
|
||||
.eq(safeQuery.getSourceType() != null, KnowledgeSet::getSourceType, safeQuery.getSourceType())
|
||||
.ge(safeQuery.getValidFrom() != null, KnowledgeSet::getValidFrom, safeQuery.getValidFrom())
|
||||
.le(safeQuery.getValidTo() != null, KnowledgeSet::getValidTo, safeQuery.getValidTo())
|
||||
.eq(StringUtils.isNotBlank(createdBy), KnowledgeSet::getCreatedBy, createdBy);
|
||||
|
||||
if (StringUtils.isNotBlank(query.getKeyword())) {
|
||||
wrapper.and(w -> w.like(KnowledgeSet::getName, query.getKeyword())
|
||||
.or()
|
||||
.like(KnowledgeSet::getDescription, query.getKeyword()));
|
||||
if (queryHasSensitivity(safeQuery)) {
|
||||
wrapper.apply("UPPER(TRIM(sensitivity)) = {0}", normalizeSensitivity(safeQuery.getSensitivity()));
|
||||
}
|
||||
|
||||
for (String tagName : query.getTags()) {
|
||||
if (excludeConfidential) {
|
||||
wrapper.and(w -> w.isNull(KnowledgeSet::getSensitivity)
|
||||
.or()
|
||||
.apply("UPPER(TRIM(sensitivity)) != {0}", ResourceAccessService.CONFIDENTIAL_SENSITIVITY));
|
||||
}
|
||||
|
||||
if (StringUtils.isNotBlank(safeQuery.getKeyword())) {
|
||||
wrapper.and(w -> w.like(KnowledgeSet::getName, safeQuery.getKeyword())
|
||||
.or()
|
||||
.like(KnowledgeSet::getDescription, safeQuery.getKeyword()));
|
||||
}
|
||||
|
||||
for (String tagName : safeQuery.getTags()) {
|
||||
wrapper.and(w ->
|
||||
w.apply("tags IS NOT NULL " +
|
||||
"AND JSON_VALID(tags) = 1 " +
|
||||
@@ -52,7 +93,15 @@ public class KnowledgeSetRepositoryImpl extends CrudRepository<KnowledgeSetMappe
|
||||
);
|
||||
}
|
||||
|
||||
wrapper.orderByDesc(KnowledgeSet::getCreatedAt);
|
||||
return knowledgeSetMapper.selectPage(page, wrapper);
|
||||
return wrapper;
|
||||
}
|
||||
|
||||
private boolean queryHasSensitivity(KnowledgeSetPagingQuery query) {
|
||||
String normalized = normalizeSensitivity(query.getSensitivity());
|
||||
return StringUtils.isNotBlank(normalized) && !"ALL".equals(normalized);
|
||||
}
|
||||
|
||||
private String normalizeSensitivity(String sensitivity) {
|
||||
return StringUtils.upperCase(StringUtils.trimToNull(sensitivity));
|
||||
}
|
||||
}
|
||||
|
||||
114
backend/services/knowledge-graph-service/pom.xml
Normal file
114
backend/services/knowledge-graph-service/pom.xml
Normal file
@@ -0,0 +1,114 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
|
||||
http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<parent>
|
||||
<groupId>com.datamate</groupId>
|
||||
<artifactId>services</artifactId>
|
||||
<version>1.0.0-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
<artifactId>knowledge-graph-service</artifactId>
|
||||
<name>Knowledge Graph Service</name>
|
||||
<description>知识图谱服务 - 基于Neo4j的实体关系管理与图谱查询</description>
|
||||
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>com.datamate</groupId>
|
||||
<artifactId>domain-common</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
|
||||
<!-- Spring Data Neo4j -->
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-data-neo4j</artifactId>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-web</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-data-redis</artifactId>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.mysql</groupId>
|
||||
<artifactId>mysql-connector-j</artifactId>
|
||||
<version>${mysql.version}</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-test</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.springdoc</groupId>
|
||||
<artifactId>springdoc-openapi-starter-webmvc-ui</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.openapitools</groupId>
|
||||
<artifactId>jackson-databind-nullable</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>jakarta.validation</groupId>
|
||||
<artifactId>jakarta.validation-api</artifactId>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-maven-plugin</artifactId>
|
||||
<configuration>
|
||||
<arguments>true</arguments>
|
||||
<classifier>exec</classifier>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-compiler-plugin</artifactId>
|
||||
<version>3.11.0</version>
|
||||
<configuration>
|
||||
<source>${maven.compiler.source}</source>
|
||||
<target>${maven.compiler.target}</target>
|
||||
<annotationProcessorPaths>
|
||||
<path>
|
||||
<groupId>org.projectlombok</groupId>
|
||||
<artifactId>lombok</artifactId>
|
||||
<version>${lombok.version}</version>
|
||||
</path>
|
||||
<path>
|
||||
<groupId>org.projectlombok</groupId>
|
||||
<artifactId>lombok-mapstruct-binding</artifactId>
|
||||
<version>${lombok-mapstruct-binding.version}</version>
|
||||
</path>
|
||||
<path>
|
||||
<groupId>org.mapstruct</groupId>
|
||||
<artifactId>mapstruct-processor</artifactId>
|
||||
<version>${mapstruct.version}</version>
|
||||
</path>
|
||||
</annotationProcessorPaths>
|
||||
<compilerArgs>
|
||||
<arg>-parameters</arg>
|
||||
<arg>-Amapstruct.defaultComponentModel=spring</arg>
|
||||
</compilerArgs>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-surefire-plugin</artifactId>
|
||||
<version>3.2.5</version>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</project>
|
||||
@@ -0,0 +1,28 @@
|
||||
package com.datamate.knowledgegraph;
|
||||
|
||||
import com.datamate.knowledgegraph.infrastructure.neo4j.KnowledgeGraphProperties;
|
||||
import org.springframework.boot.web.client.RestTemplateBuilder;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.ComponentScan;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.data.neo4j.repository.config.EnableNeo4jRepositories;
|
||||
import org.springframework.scheduling.annotation.EnableScheduling;
|
||||
import org.springframework.web.client.RestTemplate;
|
||||
|
||||
import java.time.Duration;
|
||||
|
||||
@Configuration
|
||||
@ComponentScan(basePackages = {"com.datamate.knowledgegraph", "com.datamate.common.auth"})
|
||||
@EnableNeo4jRepositories(basePackages = "com.datamate.knowledgegraph.domain.repository")
|
||||
@EnableScheduling
|
||||
public class KnowledgeGraphServiceConfiguration {
|
||||
|
||||
@Bean("kgRestTemplate")
|
||||
public RestTemplate kgRestTemplate(RestTemplateBuilder builder, KnowledgeGraphProperties properties) {
|
||||
KnowledgeGraphProperties.Sync syncConfig = properties.getSync();
|
||||
return builder
|
||||
.connectTimeout(Duration.ofMillis(syncConfig.getConnectTimeout()))
|
||||
.readTimeout(Duration.ofMillis(syncConfig.getReadTimeout()))
|
||||
.build();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,219 @@
|
||||
package com.datamate.knowledgegraph.application;
|
||||
|
||||
import com.datamate.common.infrastructure.exception.BusinessException;
|
||||
import com.datamate.common.infrastructure.exception.SystemErrorCode;
|
||||
import com.datamate.common.interfaces.PagedResponse;
|
||||
import com.datamate.knowledgegraph.domain.model.EditReview;
|
||||
import com.datamate.knowledgegraph.domain.repository.EditReviewRepository;
|
||||
import com.datamate.knowledgegraph.infrastructure.exception.KnowledgeGraphErrorCode;
|
||||
import com.datamate.knowledgegraph.interfaces.dto.*;
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.springframework.stereotype.Service;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.List;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
/**
|
||||
* 编辑审核业务服务。
|
||||
* <p>
|
||||
* 提供编辑审核的提交、审批、拒绝和查询功能。
|
||||
* 审批通过后自动调用对应的实体/关系 CRUD 服务执行变更。
|
||||
*/
|
||||
@Service
|
||||
@Slf4j
|
||||
@RequiredArgsConstructor
|
||||
public class EditReviewService {
|
||||
|
||||
private static final long MAX_SKIP = 100_000L;
|
||||
private static final Pattern UUID_PATTERN = Pattern.compile(
|
||||
"^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$"
|
||||
);
|
||||
private static final ObjectMapper MAPPER = new ObjectMapper();
|
||||
|
||||
private final EditReviewRepository reviewRepository;
|
||||
private final GraphEntityService entityService;
|
||||
private final GraphRelationService relationService;
|
||||
|
||||
@Transactional
|
||||
public EditReviewVO submitReview(String graphId, SubmitReviewRequest request, String submittedBy) {
|
||||
validateGraphId(graphId);
|
||||
|
||||
EditReview review = EditReview.builder()
|
||||
.graphId(graphId)
|
||||
.operationType(request.getOperationType())
|
||||
.entityId(request.getEntityId())
|
||||
.relationId(request.getRelationId())
|
||||
.payload(request.getPayload())
|
||||
.status("PENDING")
|
||||
.submittedBy(submittedBy)
|
||||
.build();
|
||||
|
||||
EditReview saved = reviewRepository.save(review);
|
||||
log.info("Review submitted: id={}, graphId={}, type={}, by={}",
|
||||
saved.getId(), graphId, request.getOperationType(), submittedBy);
|
||||
return toVO(saved);
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public EditReviewVO approveReview(String graphId, String reviewId, String reviewedBy, String comment) {
|
||||
validateGraphId(graphId);
|
||||
|
||||
EditReview review = reviewRepository.findById(reviewId, graphId)
|
||||
.orElseThrow(() -> BusinessException.of(KnowledgeGraphErrorCode.REVIEW_NOT_FOUND));
|
||||
|
||||
if (!"PENDING".equals(review.getStatus())) {
|
||||
throw BusinessException.of(KnowledgeGraphErrorCode.REVIEW_ALREADY_PROCESSED);
|
||||
}
|
||||
|
||||
// Apply the change
|
||||
applyChange(review);
|
||||
|
||||
// Update review status
|
||||
review.setStatus("APPROVED");
|
||||
review.setReviewedBy(reviewedBy);
|
||||
review.setReviewComment(comment);
|
||||
review.setReviewedAt(LocalDateTime.now());
|
||||
reviewRepository.save(review);
|
||||
|
||||
log.info("Review approved: id={}, graphId={}, type={}, by={}",
|
||||
reviewId, graphId, review.getOperationType(), reviewedBy);
|
||||
return toVO(review);
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public EditReviewVO rejectReview(String graphId, String reviewId, String reviewedBy, String comment) {
|
||||
validateGraphId(graphId);
|
||||
|
||||
EditReview review = reviewRepository.findById(reviewId, graphId)
|
||||
.orElseThrow(() -> BusinessException.of(KnowledgeGraphErrorCode.REVIEW_NOT_FOUND));
|
||||
|
||||
if (!"PENDING".equals(review.getStatus())) {
|
||||
throw BusinessException.of(KnowledgeGraphErrorCode.REVIEW_ALREADY_PROCESSED);
|
||||
}
|
||||
|
||||
review.setStatus("REJECTED");
|
||||
review.setReviewedBy(reviewedBy);
|
||||
review.setReviewComment(comment);
|
||||
review.setReviewedAt(LocalDateTime.now());
|
||||
reviewRepository.save(review);
|
||||
|
||||
log.info("Review rejected: id={}, graphId={}, type={}, by={}",
|
||||
reviewId, graphId, review.getOperationType(), reviewedBy);
|
||||
return toVO(review);
|
||||
}
|
||||
|
||||
public PagedResponse<EditReviewVO> listPendingReviews(String graphId, int page, int size) {
|
||||
validateGraphId(graphId);
|
||||
|
||||
int safePage = Math.max(0, page);
|
||||
int safeSize = Math.max(1, Math.min(size, 200));
|
||||
long skip = (long) safePage * safeSize;
|
||||
if (skip > MAX_SKIP) {
|
||||
throw BusinessException.of(SystemErrorCode.INVALID_PARAMETER, "分页偏移量过大");
|
||||
}
|
||||
|
||||
List<EditReview> reviews = reviewRepository.findPendingByGraphId(graphId, skip, safeSize);
|
||||
long total = reviewRepository.countPendingByGraphId(graphId);
|
||||
long totalPages = safeSize > 0 ? (total + safeSize - 1) / safeSize : 0;
|
||||
|
||||
List<EditReviewVO> content = reviews.stream().map(EditReviewService::toVO).toList();
|
||||
return PagedResponse.of(content, safePage, total, totalPages);
|
||||
}
|
||||
|
||||
public PagedResponse<EditReviewVO> listReviews(String graphId, String status, int page, int size) {
|
||||
validateGraphId(graphId);
|
||||
|
||||
int safePage = Math.max(0, page);
|
||||
int safeSize = Math.max(1, Math.min(size, 200));
|
||||
long skip = (long) safePage * safeSize;
|
||||
if (skip > MAX_SKIP) {
|
||||
throw BusinessException.of(SystemErrorCode.INVALID_PARAMETER, "分页偏移量过大");
|
||||
}
|
||||
|
||||
List<EditReview> reviews = reviewRepository.findByGraphId(graphId, status, skip, safeSize);
|
||||
long total = reviewRepository.countByGraphId(graphId, status);
|
||||
long totalPages = safeSize > 0 ? (total + safeSize - 1) / safeSize : 0;
|
||||
|
||||
List<EditReviewVO> content = reviews.stream().map(EditReviewService::toVO).toList();
|
||||
return PagedResponse.of(content, safePage, total, totalPages);
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// 执行变更
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
private void applyChange(EditReview review) {
|
||||
String graphId = review.getGraphId();
|
||||
String type = review.getOperationType();
|
||||
|
||||
try {
|
||||
switch (type) {
|
||||
case "CREATE_ENTITY" -> {
|
||||
CreateEntityRequest req = MAPPER.readValue(review.getPayload(), CreateEntityRequest.class);
|
||||
entityService.createEntity(graphId, req);
|
||||
}
|
||||
case "UPDATE_ENTITY" -> {
|
||||
UpdateEntityRequest req = MAPPER.readValue(review.getPayload(), UpdateEntityRequest.class);
|
||||
entityService.updateEntity(graphId, review.getEntityId(), req);
|
||||
}
|
||||
case "DELETE_ENTITY" -> {
|
||||
entityService.deleteEntity(graphId, review.getEntityId());
|
||||
}
|
||||
case "BATCH_DELETE_ENTITY" -> {
|
||||
BatchDeleteRequest req = MAPPER.readValue(review.getPayload(), BatchDeleteRequest.class);
|
||||
entityService.batchDeleteEntities(graphId, req.getIds());
|
||||
}
|
||||
case "CREATE_RELATION" -> {
|
||||
CreateRelationRequest req = MAPPER.readValue(review.getPayload(), CreateRelationRequest.class);
|
||||
relationService.createRelation(graphId, req);
|
||||
}
|
||||
case "UPDATE_RELATION" -> {
|
||||
UpdateRelationRequest req = MAPPER.readValue(review.getPayload(), UpdateRelationRequest.class);
|
||||
relationService.updateRelation(graphId, review.getRelationId(), req);
|
||||
}
|
||||
case "DELETE_RELATION" -> {
|
||||
relationService.deleteRelation(graphId, review.getRelationId());
|
||||
}
|
||||
case "BATCH_DELETE_RELATION" -> {
|
||||
BatchDeleteRequest req = MAPPER.readValue(review.getPayload(), BatchDeleteRequest.class);
|
||||
relationService.batchDeleteRelations(graphId, req.getIds());
|
||||
}
|
||||
default -> throw BusinessException.of(SystemErrorCode.INVALID_PARAMETER, "未知操作类型: " + type);
|
||||
}
|
||||
} catch (JsonProcessingException e) {
|
||||
throw BusinessException.of(SystemErrorCode.INVALID_PARAMETER, "变更载荷解析失败: " + e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// 转换
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
private static EditReviewVO toVO(EditReview review) {
|
||||
return EditReviewVO.builder()
|
||||
.id(review.getId())
|
||||
.graphId(review.getGraphId())
|
||||
.operationType(review.getOperationType())
|
||||
.entityId(review.getEntityId())
|
||||
.relationId(review.getRelationId())
|
||||
.payload(review.getPayload())
|
||||
.status(review.getStatus())
|
||||
.submittedBy(review.getSubmittedBy())
|
||||
.reviewedBy(review.getReviewedBy())
|
||||
.reviewComment(review.getReviewComment())
|
||||
.createdAt(review.getCreatedAt())
|
||||
.reviewedAt(review.getReviewedAt())
|
||||
.build();
|
||||
}
|
||||
|
||||
private void validateGraphId(String graphId) {
|
||||
if (graphId == null || !UUID_PATTERN.matcher(graphId).matches()) {
|
||||
throw BusinessException.of(SystemErrorCode.INVALID_PARAMETER, "graphId 格式无效");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,216 @@
|
||||
package com.datamate.knowledgegraph.application;
|
||||
|
||||
import com.datamate.common.infrastructure.exception.BusinessException;
|
||||
import com.datamate.common.infrastructure.exception.SystemErrorCode;
|
||||
import com.datamate.common.interfaces.PagedResponse;
|
||||
import com.datamate.knowledgegraph.domain.model.GraphEntity;
|
||||
import com.datamate.knowledgegraph.domain.repository.GraphEntityRepository;
|
||||
import com.datamate.knowledgegraph.infrastructure.cache.GraphCacheService;
|
||||
import com.datamate.knowledgegraph.infrastructure.cache.RedisCacheConfig;
|
||||
import com.datamate.knowledgegraph.infrastructure.exception.KnowledgeGraphErrorCode;
|
||||
import com.datamate.knowledgegraph.infrastructure.neo4j.KnowledgeGraphProperties;
|
||||
import com.datamate.knowledgegraph.interfaces.dto.CreateEntityRequest;
|
||||
import com.datamate.knowledgegraph.interfaces.dto.UpdateEntityRequest;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.springframework.cache.annotation.Cacheable;
|
||||
import org.springframework.stereotype.Service;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
@Service
|
||||
@Slf4j
|
||||
@RequiredArgsConstructor
|
||||
public class GraphEntityService {
|
||||
|
||||
/** 分页偏移量上限,防止深翻页导致 Neo4j 性能退化。 */
|
||||
private static final long MAX_SKIP = 100_000L;
|
||||
|
||||
private static final Pattern UUID_PATTERN = Pattern.compile(
|
||||
"^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$"
|
||||
);
|
||||
|
||||
private final GraphEntityRepository entityRepository;
|
||||
private final KnowledgeGraphProperties properties;
|
||||
private final GraphCacheService cacheService;
|
||||
|
||||
@Transactional
|
||||
public GraphEntity createEntity(String graphId, CreateEntityRequest request) {
|
||||
validateGraphId(graphId);
|
||||
GraphEntity entity = GraphEntity.builder()
|
||||
.name(request.getName())
|
||||
.type(request.getType())
|
||||
.description(request.getDescription())
|
||||
.aliases(request.getAliases())
|
||||
.properties(request.getProperties())
|
||||
.sourceId(request.getSourceId())
|
||||
.sourceType(request.getSourceType())
|
||||
.graphId(graphId)
|
||||
.confidence(request.getConfidence() != null ? request.getConfidence() : 1.0)
|
||||
.createdAt(LocalDateTime.now())
|
||||
.updatedAt(LocalDateTime.now())
|
||||
.build();
|
||||
GraphEntity saved = entityRepository.save(entity);
|
||||
cacheService.evictEntityCaches(graphId, saved.getId());
|
||||
cacheService.evictSearchCaches(graphId);
|
||||
return saved;
|
||||
}
|
||||
|
||||
@Cacheable(value = RedisCacheConfig.CACHE_ENTITIES,
|
||||
key = "T(com.datamate.knowledgegraph.infrastructure.cache.GraphCacheService).cacheKey(#graphId, #entityId)",
|
||||
unless = "#result == null",
|
||||
cacheManager = "knowledgeGraphCacheManager")
|
||||
public GraphEntity getEntity(String graphId, String entityId) {
|
||||
validateGraphId(graphId);
|
||||
return entityRepository.findByIdAndGraphId(entityId, graphId)
|
||||
.orElseThrow(() -> BusinessException.of(KnowledgeGraphErrorCode.ENTITY_NOT_FOUND));
|
||||
}
|
||||
|
||||
@Cacheable(value = RedisCacheConfig.CACHE_ENTITIES,
|
||||
key = "T(com.datamate.knowledgegraph.infrastructure.cache.GraphCacheService).cacheKey(#graphId, 'list')",
|
||||
cacheManager = "knowledgeGraphCacheManager")
|
||||
public List<GraphEntity> listEntities(String graphId) {
|
||||
validateGraphId(graphId);
|
||||
return entityRepository.findByGraphId(graphId);
|
||||
}
|
||||
|
||||
public List<GraphEntity> searchEntities(String graphId, String name) {
|
||||
validateGraphId(graphId);
|
||||
return entityRepository.findByGraphIdAndNameContaining(graphId, name);
|
||||
}
|
||||
|
||||
public List<GraphEntity> listEntitiesByType(String graphId, String type) {
|
||||
validateGraphId(graphId);
|
||||
return entityRepository.findByGraphIdAndType(graphId, type);
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// 分页查询
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
public PagedResponse<GraphEntity> listEntitiesPaged(String graphId, int page, int size) {
|
||||
validateGraphId(graphId);
|
||||
int safePage = Math.max(0, page);
|
||||
int safeSize = Math.max(1, Math.min(size, 200));
|
||||
long skip = (long) safePage * safeSize;
|
||||
if (skip > MAX_SKIP) {
|
||||
throw BusinessException.of(SystemErrorCode.INVALID_PARAMETER, "分页偏移量过大");
|
||||
}
|
||||
List<GraphEntity> entities = entityRepository.findByGraphIdPaged(graphId, skip, safeSize);
|
||||
long total = entityRepository.countByGraphId(graphId);
|
||||
long totalPages = safeSize > 0 ? (total + safeSize - 1) / safeSize : 0;
|
||||
return PagedResponse.of(entities, safePage, total, totalPages);
|
||||
}
|
||||
|
||||
public PagedResponse<GraphEntity> listEntitiesByTypePaged(String graphId, String type, int page, int size) {
|
||||
validateGraphId(graphId);
|
||||
int safePage = Math.max(0, page);
|
||||
int safeSize = Math.max(1, Math.min(size, 200));
|
||||
long skip = (long) safePage * safeSize;
|
||||
if (skip > MAX_SKIP) {
|
||||
throw BusinessException.of(SystemErrorCode.INVALID_PARAMETER, "分页偏移量过大");
|
||||
}
|
||||
List<GraphEntity> entities = entityRepository.findByGraphIdAndTypePaged(graphId, type, skip, safeSize);
|
||||
long total = entityRepository.countByGraphIdAndType(graphId, type);
|
||||
long totalPages = safeSize > 0 ? (total + safeSize - 1) / safeSize : 0;
|
||||
return PagedResponse.of(entities, safePage, total, totalPages);
|
||||
}
|
||||
|
||||
public PagedResponse<GraphEntity> searchEntitiesPaged(String graphId, String keyword, int page, int size) {
|
||||
validateGraphId(graphId);
|
||||
int safePage = Math.max(0, page);
|
||||
int safeSize = Math.max(1, Math.min(size, 200));
|
||||
long skip = (long) safePage * safeSize;
|
||||
if (skip > MAX_SKIP) {
|
||||
throw BusinessException.of(SystemErrorCode.INVALID_PARAMETER, "分页偏移量过大");
|
||||
}
|
||||
List<GraphEntity> entities = entityRepository.findByGraphIdAndNameContainingPaged(graphId, keyword, skip, safeSize);
|
||||
long total = entityRepository.countByGraphIdAndNameContaining(graphId, keyword);
|
||||
long totalPages = safeSize > 0 ? (total + safeSize - 1) / safeSize : 0;
|
||||
return PagedResponse.of(entities, safePage, total, totalPages);
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public GraphEntity updateEntity(String graphId, String entityId, UpdateEntityRequest request) {
|
||||
validateGraphId(graphId);
|
||||
GraphEntity entity = getEntity(graphId, entityId);
|
||||
if (request.getName() != null) {
|
||||
entity.setName(request.getName());
|
||||
}
|
||||
if (request.getDescription() != null) {
|
||||
entity.setDescription(request.getDescription());
|
||||
}
|
||||
if (request.getAliases() != null) {
|
||||
entity.setAliases(request.getAliases());
|
||||
}
|
||||
if (request.getProperties() != null) {
|
||||
entity.setProperties(request.getProperties());
|
||||
}
|
||||
if (request.getConfidence() != null) {
|
||||
entity.setConfidence(request.getConfidence());
|
||||
}
|
||||
entity.setUpdatedAt(LocalDateTime.now());
|
||||
GraphEntity saved = entityRepository.save(entity);
|
||||
cacheService.evictEntityCaches(graphId, entityId);
|
||||
cacheService.evictSearchCaches(graphId);
|
||||
return saved;
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public void deleteEntity(String graphId, String entityId) {
|
||||
validateGraphId(graphId);
|
||||
GraphEntity entity = getEntity(graphId, entityId);
|
||||
entityRepository.delete(entity);
|
||||
cacheService.evictEntityCaches(graphId, entityId);
|
||||
cacheService.evictSearchCaches(graphId);
|
||||
}
|
||||
|
||||
public List<GraphEntity> getNeighbors(String graphId, String entityId, int depth, int limit) {
|
||||
validateGraphId(graphId);
|
||||
int clampedDepth = Math.max(1, Math.min(depth, properties.getMaxDepth()));
|
||||
int clampedLimit = Math.max(1, Math.min(limit, properties.getMaxNodesPerQuery()));
|
||||
return entityRepository.findNeighbors(graphId, entityId, clampedDepth, clampedLimit);
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public Map<String, Object> batchDeleteEntities(String graphId, List<String> entityIds) {
|
||||
validateGraphId(graphId);
|
||||
int deleted = 0;
|
||||
List<String> failedIds = new ArrayList<>();
|
||||
for (String entityId : entityIds) {
|
||||
try {
|
||||
deleteEntity(graphId, entityId);
|
||||
deleted++;
|
||||
} catch (Exception e) {
|
||||
log.warn("Batch delete: failed to delete entity {}: {}", entityId, e.getMessage());
|
||||
failedIds.add(entityId);
|
||||
}
|
||||
}
|
||||
Map<String, Object> result = Map.of(
|
||||
"deleted", deleted,
|
||||
"total", entityIds.size(),
|
||||
"failedIds", failedIds
|
||||
);
|
||||
return result;
|
||||
}
|
||||
|
||||
public long countEntities(String graphId) {
|
||||
validateGraphId(graphId);
|
||||
return entityRepository.countByGraphId(graphId);
|
||||
}
|
||||
|
||||
/**
|
||||
* 校验 graphId 格式(UUID)。
|
||||
* 防止恶意构造的 graphId 注入 Cypher 查询。
|
||||
*/
|
||||
private void validateGraphId(String graphId) {
|
||||
if (graphId == null || !UUID_PATTERN.matcher(graphId).matches()) {
|
||||
throw BusinessException.of(SystemErrorCode.INVALID_PARAMETER, "graphId 格式无效");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,990 @@
|
||||
package com.datamate.knowledgegraph.application;
|
||||
|
||||
import com.datamate.common.auth.application.ResourceAccessService;
|
||||
import com.datamate.common.infrastructure.exception.BusinessException;
|
||||
import com.datamate.common.infrastructure.exception.SystemErrorCode;
|
||||
import com.datamate.common.interfaces.PagedResponse;
|
||||
import com.datamate.knowledgegraph.domain.model.GraphEntity;
|
||||
import com.datamate.knowledgegraph.domain.repository.GraphEntityRepository;
|
||||
import com.datamate.knowledgegraph.infrastructure.cache.GraphCacheService;
|
||||
import com.datamate.knowledgegraph.infrastructure.cache.RedisCacheConfig;
|
||||
import com.datamate.knowledgegraph.infrastructure.exception.KnowledgeGraphErrorCode;
|
||||
import com.datamate.knowledgegraph.infrastructure.neo4j.KnowledgeGraphProperties;
|
||||
import com.datamate.knowledgegraph.interfaces.dto.*;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.neo4j.driver.Driver;
|
||||
import org.neo4j.driver.Record;
|
||||
import org.neo4j.driver.Session;
|
||||
import org.neo4j.driver.TransactionConfig;
|
||||
import org.neo4j.driver.Value;
|
||||
import org.neo4j.driver.types.MapAccessor;
|
||||
import org.springframework.cache.annotation.Cacheable;
|
||||
import org.springframework.data.neo4j.core.Neo4jClient;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import java.time.Duration;
|
||||
import java.util.*;
|
||||
import java.util.function.Function;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
/**
|
||||
* 知识图谱查询服务。
|
||||
* <p>
|
||||
* 提供图遍历(N 跳邻居、最短路径、所有路径、子图提取、子图导出)和全文搜索功能。
|
||||
* 使用 {@link Neo4jClient} 执行复杂 Cypher 查询。
|
||||
* <p>
|
||||
* 查询结果根据用户权限进行过滤:
|
||||
* <ul>
|
||||
* <li>管理员:不过滤,看到全部数据</li>
|
||||
* <li>普通用户:按 {@code created_by} 过滤,只看到自己创建的业务实体;
|
||||
* 结构型实体(User、Org、Field 等无 created_by 的实体)对所有用户可见</li>
|
||||
* </ul>
|
||||
*/
|
||||
@Service
|
||||
@Slf4j
|
||||
@RequiredArgsConstructor
|
||||
public class GraphQueryService {
|
||||
|
||||
private static final String REL_TYPE = "RELATED_TO";
|
||||
private static final long MAX_SKIP = 100_000L;
|
||||
|
||||
/** 结构型实体类型白名单:对所有用户可见,不按 created_by 过滤 */
|
||||
private static final Set<String> STRUCTURAL_ENTITY_TYPES = Set.of("User", "Org", "Field");
|
||||
|
||||
private static final Pattern UUID_PATTERN = Pattern.compile(
|
||||
"^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$"
|
||||
);
|
||||
|
||||
private final Neo4jClient neo4jClient;
|
||||
private final Driver neo4jDriver;
|
||||
private final GraphEntityRepository entityRepository;
|
||||
private final KnowledgeGraphProperties properties;
|
||||
private final ResourceAccessService resourceAccessService;
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// N 跳邻居
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* 查询实体的 N 跳邻居,返回邻居节点和连接边。
|
||||
*
|
||||
* @param depth 跳数(1-3,由配置上限约束)
|
||||
* @param limit 返回节点数上限
|
||||
*/
|
||||
@Cacheable(value = RedisCacheConfig.CACHE_QUERIES,
|
||||
key = "T(com.datamate.knowledgegraph.infrastructure.cache.GraphCacheService).cacheKey(#graphId, #entityId, #depth, #limit, @resourceAccessService.resolveOwnerFilterUserId(), @resourceAccessService.canViewConfidential())",
|
||||
cacheManager = "knowledgeGraphCacheManager")
|
||||
public SubgraphVO getNeighborGraph(String graphId, String entityId, int depth, int limit) {
|
||||
validateGraphId(graphId);
|
||||
String filterUserId = resolveOwnerFilter();
|
||||
boolean excludeConfidential = filterUserId != null && !resourceAccessService.canViewConfidential();
|
||||
|
||||
// 校验实体存在 + 权限
|
||||
GraphEntity startEntity = entityRepository.findByIdAndGraphId(entityId, graphId)
|
||||
.orElseThrow(() -> BusinessException.of(KnowledgeGraphErrorCode.ENTITY_NOT_FOUND));
|
||||
|
||||
if (filterUserId != null) {
|
||||
assertEntityAccess(startEntity, filterUserId, excludeConfidential);
|
||||
}
|
||||
|
||||
int clampedDepth = Math.max(1, Math.min(depth, properties.getMaxDepth()));
|
||||
int clampedLimit = Math.max(1, Math.min(limit, properties.getMaxNodesPerQuery()));
|
||||
|
||||
// 路径级全节点权限过滤(与 getShortestPath 一致)
|
||||
String permFilter = "";
|
||||
if (filterUserId != null) {
|
||||
StringBuilder pf = new StringBuilder("AND ALL(n IN nodes(p) WHERE ");
|
||||
pf.append("(n.type IN ['User', 'Org', 'Field'] OR n.`properties.created_by` = $filterUserId)");
|
||||
if (excludeConfidential) {
|
||||
pf.append(" AND (toUpper(trim(n.`properties.sensitivity`)) IS NULL OR toUpper(trim(n.`properties.sensitivity`)) <> 'CONFIDENTIAL')");
|
||||
}
|
||||
pf.append(") ");
|
||||
permFilter = pf.toString();
|
||||
}
|
||||
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("graphId", graphId);
|
||||
params.put("entityId", entityId);
|
||||
params.put("limit", clampedLimit);
|
||||
if (filterUserId != null) {
|
||||
params.put("filterUserId", filterUserId);
|
||||
}
|
||||
|
||||
// 查询邻居节点(路径变量约束中间节点与关系均属于同一图谱,权限过滤覆盖路径全节点)
|
||||
List<EntitySummaryVO> nodes = neo4jClient
|
||||
.query(
|
||||
"MATCH p = (e:Entity {graph_id: $graphId, id: $entityId})" +
|
||||
"-[:" + REL_TYPE + "*1.." + clampedDepth + "]-(neighbor:Entity) " +
|
||||
"WHERE e <> neighbor " +
|
||||
" AND ALL(n IN nodes(p) WHERE n.graph_id = $graphId) " +
|
||||
" AND ALL(r IN relationships(p) WHERE r.graph_id = $graphId) " +
|
||||
permFilter +
|
||||
"WITH DISTINCT neighbor LIMIT $limit " +
|
||||
"RETURN neighbor.id AS id, neighbor.name AS name, neighbor.type AS type, " +
|
||||
"neighbor.description AS description"
|
||||
)
|
||||
.bindAll(params)
|
||||
.fetchAs(EntitySummaryVO.class)
|
||||
.mappedBy((ts, record) -> EntitySummaryVO.builder()
|
||||
.id(record.get("id").asString(null))
|
||||
.name(record.get("name").asString(null))
|
||||
.type(record.get("type").asString(null))
|
||||
.description(record.get("description").asString(null))
|
||||
.build())
|
||||
.all()
|
||||
.stream().toList();
|
||||
|
||||
// 收集所有节点 ID(包括起始节点)
|
||||
Set<String> nodeIds = new LinkedHashSet<>();
|
||||
nodeIds.add(entityId);
|
||||
nodes.forEach(n -> nodeIds.add(n.getId()));
|
||||
|
||||
// 查询这些节点之间的边
|
||||
List<EdgeSummaryVO> edges = queryEdgesBetween(graphId, new ArrayList<>(nodeIds));
|
||||
|
||||
// 将起始节点加入节点列表
|
||||
List<EntitySummaryVO> allNodes = new ArrayList<>();
|
||||
allNodes.add(EntitySummaryVO.builder()
|
||||
.id(startEntity.getId())
|
||||
.name(startEntity.getName())
|
||||
.type(startEntity.getType())
|
||||
.description(startEntity.getDescription())
|
||||
.build());
|
||||
allNodes.addAll(nodes);
|
||||
|
||||
return SubgraphVO.builder()
|
||||
.nodes(allNodes)
|
||||
.edges(edges)
|
||||
.nodeCount(allNodes.size())
|
||||
.edgeCount(edges.size())
|
||||
.build();
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// 最短路径
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* 查询两个实体之间的最短路径。
|
||||
*
|
||||
* @param maxDepth 最大搜索深度(由配置上限约束)
|
||||
* @return 路径结果,如果不存在路径则返回空路径
|
||||
*/
|
||||
public PathVO getShortestPath(String graphId, String sourceId, String targetId, int maxDepth) {
|
||||
validateGraphId(graphId);
|
||||
String filterUserId = resolveOwnerFilter();
|
||||
boolean excludeConfidential = filterUserId != null && !resourceAccessService.canViewConfidential();
|
||||
|
||||
// 校验两个实体存在 + 权限
|
||||
GraphEntity sourceEntity = entityRepository.findByIdAndGraphId(sourceId, graphId)
|
||||
.orElseThrow(() -> BusinessException.of(
|
||||
KnowledgeGraphErrorCode.ENTITY_NOT_FOUND, "源实体不存在"));
|
||||
|
||||
if (filterUserId != null) {
|
||||
assertEntityAccess(sourceEntity, filterUserId, excludeConfidential);
|
||||
}
|
||||
|
||||
entityRepository.findByIdAndGraphId(targetId, graphId)
|
||||
.ifPresentOrElse(
|
||||
targetEntity -> {
|
||||
if (filterUserId != null && !sourceId.equals(targetId)) {
|
||||
assertEntityAccess(targetEntity, filterUserId, excludeConfidential);
|
||||
}
|
||||
},
|
||||
() -> { throw BusinessException.of(
|
||||
KnowledgeGraphErrorCode.ENTITY_NOT_FOUND, "目标实体不存在"); }
|
||||
);
|
||||
|
||||
if (sourceId.equals(targetId)) {
|
||||
// 起止相同,返回单节点路径
|
||||
EntitySummaryVO node = EntitySummaryVO.builder()
|
||||
.id(sourceEntity.getId())
|
||||
.name(sourceEntity.getName())
|
||||
.type(sourceEntity.getType())
|
||||
.description(sourceEntity.getDescription())
|
||||
.build();
|
||||
return PathVO.builder()
|
||||
.nodes(List.of(node))
|
||||
.edges(List.of())
|
||||
.pathLength(0)
|
||||
.build();
|
||||
}
|
||||
|
||||
int clampedDepth = Math.max(1, Math.min(maxDepth, properties.getMaxDepth()));
|
||||
|
||||
String permFilter = "";
|
||||
if (filterUserId != null) {
|
||||
StringBuilder pf = new StringBuilder("AND ALL(n IN nodes(path) WHERE ");
|
||||
pf.append("(n.type IN ['User', 'Org', 'Field'] OR n.`properties.created_by` = $filterUserId)");
|
||||
if (excludeConfidential) {
|
||||
pf.append(" AND (toUpper(trim(n.`properties.sensitivity`)) IS NULL OR toUpper(trim(n.`properties.sensitivity`)) <> 'CONFIDENTIAL')");
|
||||
}
|
||||
pf.append(") ");
|
||||
permFilter = pf.toString();
|
||||
}
|
||||
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("graphId", graphId);
|
||||
params.put("sourceId", sourceId);
|
||||
params.put("targetId", targetId);
|
||||
if (filterUserId != null) {
|
||||
params.put("filterUserId", filterUserId);
|
||||
}
|
||||
|
||||
// 使用 Neo4j shortestPath 函数
|
||||
String cypher =
|
||||
"MATCH (s:Entity {graph_id: $graphId, id: $sourceId}), " +
|
||||
" (t:Entity {graph_id: $graphId, id: $targetId}), " +
|
||||
" path = shortestPath((s)-[:" + REL_TYPE + "*1.." + clampedDepth + "]-(t)) " +
|
||||
"WHERE ALL(n IN nodes(path) WHERE n.graph_id = $graphId) " +
|
||||
" AND ALL(r IN relationships(path) WHERE r.graph_id = $graphId) " +
|
||||
permFilter +
|
||||
"RETURN " +
|
||||
" [n IN nodes(path) | {id: n.id, name: n.name, type: n.type, description: n.description}] AS pathNodes, " +
|
||||
" [r IN relationships(path) | {id: r.id, relation_type: r.relation_type, weight: r.weight, " +
|
||||
" source: startNode(r).id, target: endNode(r).id}] AS pathEdges, " +
|
||||
" length(path) AS pathLength";
|
||||
|
||||
return neo4jClient.query(cypher)
|
||||
.bindAll(params)
|
||||
.fetchAs(PathVO.class)
|
||||
.mappedBy((ts, record) -> mapPathRecord(record))
|
||||
.one()
|
||||
.orElse(PathVO.builder()
|
||||
.nodes(List.of())
|
||||
.edges(List.of())
|
||||
.pathLength(-1)
|
||||
.build());
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// 所有路径
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* 查询两个实体之间的所有路径。
|
||||
*
|
||||
* @param maxDepth 最大搜索深度(由配置上限约束)
|
||||
* @param maxPaths 返回路径数上限
|
||||
* @return 所有路径结果,按路径长度升序排列
|
||||
*/
|
||||
public AllPathsVO findAllPaths(String graphId, String sourceId, String targetId, int maxDepth, int maxPaths) {
|
||||
validateGraphId(graphId);
|
||||
String filterUserId = resolveOwnerFilter();
|
||||
boolean excludeConfidential = filterUserId != null && !resourceAccessService.canViewConfidential();
|
||||
|
||||
// 校验两个实体存在 + 权限
|
||||
GraphEntity sourceEntity = entityRepository.findByIdAndGraphId(sourceId, graphId)
|
||||
.orElseThrow(() -> BusinessException.of(
|
||||
KnowledgeGraphErrorCode.ENTITY_NOT_FOUND, "源实体不存在"));
|
||||
|
||||
if (filterUserId != null) {
|
||||
assertEntityAccess(sourceEntity, filterUserId, excludeConfidential);
|
||||
}
|
||||
|
||||
entityRepository.findByIdAndGraphId(targetId, graphId)
|
||||
.ifPresentOrElse(
|
||||
targetEntity -> {
|
||||
if (filterUserId != null && !sourceId.equals(targetId)) {
|
||||
assertEntityAccess(targetEntity, filterUserId, excludeConfidential);
|
||||
}
|
||||
},
|
||||
() -> { throw BusinessException.of(
|
||||
KnowledgeGraphErrorCode.ENTITY_NOT_FOUND, "目标实体不存在"); }
|
||||
);
|
||||
|
||||
if (sourceId.equals(targetId)) {
|
||||
EntitySummaryVO node = EntitySummaryVO.builder()
|
||||
.id(sourceEntity.getId())
|
||||
.name(sourceEntity.getName())
|
||||
.type(sourceEntity.getType())
|
||||
.description(sourceEntity.getDescription())
|
||||
.build();
|
||||
PathVO singlePath = PathVO.builder()
|
||||
.nodes(List.of(node))
|
||||
.edges(List.of())
|
||||
.pathLength(0)
|
||||
.build();
|
||||
return AllPathsVO.builder()
|
||||
.paths(List.of(singlePath))
|
||||
.pathCount(1)
|
||||
.build();
|
||||
}
|
||||
|
||||
int clampedDepth = Math.max(1, Math.min(maxDepth, properties.getMaxDepth()));
|
||||
int clampedMaxPaths = Math.max(1, Math.min(maxPaths, properties.getMaxNodesPerQuery()));
|
||||
|
||||
String permFilter = "";
|
||||
if (filterUserId != null) {
|
||||
StringBuilder pf = new StringBuilder("AND ALL(n IN nodes(path) WHERE ");
|
||||
pf.append("(n.type IN ['User', 'Org', 'Field'] OR n.`properties.created_by` = $filterUserId)");
|
||||
if (excludeConfidential) {
|
||||
pf.append(" AND (toUpper(trim(n.`properties.sensitivity`)) IS NULL OR toUpper(trim(n.`properties.sensitivity`)) <> 'CONFIDENTIAL')");
|
||||
}
|
||||
pf.append(") ");
|
||||
permFilter = pf.toString();
|
||||
}
|
||||
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("graphId", graphId);
|
||||
params.put("sourceId", sourceId);
|
||||
params.put("targetId", targetId);
|
||||
params.put("maxPaths", clampedMaxPaths);
|
||||
if (filterUserId != null) {
|
||||
params.put("filterUserId", filterUserId);
|
||||
}
|
||||
|
||||
String cypher =
|
||||
"MATCH (s:Entity {graph_id: $graphId, id: $sourceId}), " +
|
||||
" (t:Entity {graph_id: $graphId, id: $targetId}), " +
|
||||
" path = (s)-[:" + REL_TYPE + "*1.." + clampedDepth + "]-(t) " +
|
||||
"WHERE ALL(n IN nodes(path) WHERE n.graph_id = $graphId) " +
|
||||
" AND ALL(r IN relationships(path) WHERE r.graph_id = $graphId) " +
|
||||
permFilter +
|
||||
"RETURN " +
|
||||
" [n IN nodes(path) | {id: n.id, name: n.name, type: n.type, description: n.description}] AS pathNodes, " +
|
||||
" [r IN relationships(path) | {id: r.id, relation_type: r.relation_type, weight: r.weight, " +
|
||||
" source: startNode(r).id, target: endNode(r).id}] AS pathEdges, " +
|
||||
" length(path) AS pathLength " +
|
||||
"ORDER BY length(path) ASC " +
|
||||
"LIMIT $maxPaths";
|
||||
|
||||
List<PathVO> paths = queryWithTimeout(cypher, params, record -> mapPathRecord(record));
|
||||
|
||||
return AllPathsVO.builder()
|
||||
.paths(paths)
|
||||
.pathCount(paths.size())
|
||||
.build();
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// 子图提取
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* 提取指定实体集合之间的关系网络(子图)。
|
||||
*
|
||||
* @param entityIds 实体 ID 集合
|
||||
*/
|
||||
public SubgraphVO getSubgraph(String graphId, List<String> entityIds) {
|
||||
validateGraphId(graphId);
|
||||
String filterUserId = resolveOwnerFilter();
|
||||
boolean excludeConfidential = filterUserId != null && !resourceAccessService.canViewConfidential();
|
||||
|
||||
if (entityIds == null || entityIds.isEmpty()) {
|
||||
return SubgraphVO.builder()
|
||||
.nodes(List.of())
|
||||
.edges(List.of())
|
||||
.nodeCount(0)
|
||||
.edgeCount(0)
|
||||
.build();
|
||||
}
|
||||
|
||||
int maxNodes = properties.getMaxNodesPerQuery();
|
||||
if (entityIds.size() > maxNodes) {
|
||||
throw BusinessException.of(KnowledgeGraphErrorCode.MAX_NODES_EXCEEDED,
|
||||
"实体数量超出限制(最大 " + maxNodes + ")");
|
||||
}
|
||||
|
||||
// 查询存在的实体
|
||||
List<GraphEntity> entities = entityRepository.findByGraphIdAndIdIn(graphId, entityIds);
|
||||
|
||||
// 权限过滤:非管理员只能看到自己创建的业务实体和结构型实体
|
||||
if (filterUserId != null) {
|
||||
entities = entities.stream()
|
||||
.filter(e -> isEntityAccessible(e, filterUserId, excludeConfidential))
|
||||
.toList();
|
||||
}
|
||||
|
||||
List<EntitySummaryVO> nodes = entities.stream()
|
||||
.map(e -> EntitySummaryVO.builder()
|
||||
.id(e.getId())
|
||||
.name(e.getName())
|
||||
.type(e.getType())
|
||||
.description(e.getDescription())
|
||||
.build())
|
||||
.toList();
|
||||
|
||||
if (nodes.isEmpty()) {
|
||||
return SubgraphVO.builder()
|
||||
.nodes(List.of())
|
||||
.edges(List.of())
|
||||
.nodeCount(0)
|
||||
.edgeCount(0)
|
||||
.build();
|
||||
}
|
||||
|
||||
// 查询这些节点之间的边
|
||||
List<String> existingIds = entities.stream().map(GraphEntity::getId).toList();
|
||||
List<EdgeSummaryVO> edges = queryEdgesBetween(graphId, existingIds);
|
||||
|
||||
return SubgraphVO.builder()
|
||||
.nodes(nodes)
|
||||
.edges(edges)
|
||||
.nodeCount(nodes.size())
|
||||
.edgeCount(edges.size())
|
||||
.build();
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// 子图导出
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* 导出指定实体集合的子图,支持深度扩展。
|
||||
*
|
||||
* @param entityIds 种子实体 ID 列表
|
||||
* @param depth 扩展深度(0=仅种子实体,1=含 1 跳邻居,以此类推)
|
||||
* @return 包含完整属性的子图导出结果
|
||||
*/
|
||||
public SubgraphExportVO exportSubgraph(String graphId, List<String> entityIds, int depth) {
|
||||
validateGraphId(graphId);
|
||||
String filterUserId = resolveOwnerFilter();
|
||||
boolean excludeConfidential = filterUserId != null && !resourceAccessService.canViewConfidential();
|
||||
|
||||
if (entityIds == null || entityIds.isEmpty()) {
|
||||
return SubgraphExportVO.builder()
|
||||
.nodes(List.of())
|
||||
.edges(List.of())
|
||||
.nodeCount(0)
|
||||
.edgeCount(0)
|
||||
.build();
|
||||
}
|
||||
|
||||
int maxNodes = properties.getMaxNodesPerQuery();
|
||||
if (entityIds.size() > maxNodes) {
|
||||
throw BusinessException.of(KnowledgeGraphErrorCode.MAX_NODES_EXCEEDED,
|
||||
"实体数量超出限制(最大 " + maxNodes + ")");
|
||||
}
|
||||
|
||||
int clampedDepth = Math.max(0, Math.min(depth, properties.getMaxDepth()));
|
||||
List<GraphEntity> entities;
|
||||
|
||||
if (clampedDepth == 0) {
|
||||
// 仅种子实体
|
||||
entities = entityRepository.findByGraphIdAndIdIn(graphId, entityIds);
|
||||
} else {
|
||||
// 扩展邻居:先查询扩展后的节点 ID 集合
|
||||
Set<String> expandedIds = expandNeighborIds(graphId, entityIds, clampedDepth,
|
||||
filterUserId, excludeConfidential, maxNodes);
|
||||
entities = expandedIds.isEmpty()
|
||||
? List.of()
|
||||
: entityRepository.findByGraphIdAndIdIn(graphId, new ArrayList<>(expandedIds));
|
||||
}
|
||||
|
||||
// 权限过滤
|
||||
if (filterUserId != null) {
|
||||
entities = entities.stream()
|
||||
.filter(e -> isEntityAccessible(e, filterUserId, excludeConfidential))
|
||||
.toList();
|
||||
}
|
||||
|
||||
if (entities.isEmpty()) {
|
||||
return SubgraphExportVO.builder()
|
||||
.nodes(List.of())
|
||||
.edges(List.of())
|
||||
.nodeCount(0)
|
||||
.edgeCount(0)
|
||||
.build();
|
||||
}
|
||||
|
||||
List<ExportNodeVO> nodes = entities.stream()
|
||||
.map(e -> ExportNodeVO.builder()
|
||||
.id(e.getId())
|
||||
.name(e.getName())
|
||||
.type(e.getType())
|
||||
.description(e.getDescription())
|
||||
.properties(e.getProperties() != null ? e.getProperties() : Map.of())
|
||||
.build())
|
||||
.toList();
|
||||
|
||||
List<String> nodeIds = entities.stream().map(GraphEntity::getId).toList();
|
||||
List<ExportEdgeVO> edges = queryExportEdgesBetween(graphId, nodeIds);
|
||||
|
||||
return SubgraphExportVO.builder()
|
||||
.nodes(nodes)
|
||||
.edges(edges)
|
||||
.nodeCount(nodes.size())
|
||||
.edgeCount(edges.size())
|
||||
.build();
|
||||
}
|
||||
|
||||
/**
|
||||
* 将子图导出结果转换为 GraphML XML 格式。
|
||||
*/
|
||||
public String convertToGraphML(SubgraphExportVO exportVO) {
|
||||
StringBuilder xml = new StringBuilder();
|
||||
xml.append("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n");
|
||||
xml.append("<graphml xmlns=\"http://graphml.graphstruct.org/graphml\"\n");
|
||||
xml.append(" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n");
|
||||
xml.append(" xsi:schemaLocation=\"http://graphml.graphstruct.org/graphml ");
|
||||
xml.append("http://graphml.graphstruct.org/xmlns/1.0/graphml.xsd\">\n");
|
||||
|
||||
// Key 定义
|
||||
xml.append(" <key id=\"name\" for=\"node\" attr.name=\"name\" attr.type=\"string\"/>\n");
|
||||
xml.append(" <key id=\"type\" for=\"node\" attr.name=\"type\" attr.type=\"string\"/>\n");
|
||||
xml.append(" <key id=\"description\" for=\"node\" attr.name=\"description\" attr.type=\"string\"/>\n");
|
||||
xml.append(" <key id=\"relationType\" for=\"edge\" attr.name=\"relationType\" attr.type=\"string\"/>\n");
|
||||
xml.append(" <key id=\"weight\" for=\"edge\" attr.name=\"weight\" attr.type=\"double\"/>\n");
|
||||
|
||||
xml.append(" <graph id=\"G\" edgedefault=\"directed\">\n");
|
||||
|
||||
// 节点
|
||||
if (exportVO.getNodes() != null) {
|
||||
for (ExportNodeVO node : exportVO.getNodes()) {
|
||||
xml.append(" <node id=\"").append(escapeXml(node.getId())).append("\">\n");
|
||||
appendGraphMLData(xml, "name", node.getName());
|
||||
appendGraphMLData(xml, "type", node.getType());
|
||||
appendGraphMLData(xml, "description", node.getDescription());
|
||||
xml.append(" </node>\n");
|
||||
}
|
||||
}
|
||||
|
||||
// 边
|
||||
if (exportVO.getEdges() != null) {
|
||||
for (ExportEdgeVO edge : exportVO.getEdges()) {
|
||||
xml.append(" <edge id=\"").append(escapeXml(edge.getId()))
|
||||
.append("\" source=\"").append(escapeXml(edge.getSourceEntityId()))
|
||||
.append("\" target=\"").append(escapeXml(edge.getTargetEntityId()))
|
||||
.append("\">\n");
|
||||
appendGraphMLData(xml, "relationType", edge.getRelationType());
|
||||
if (edge.getWeight() != null) {
|
||||
appendGraphMLData(xml, "weight", String.valueOf(edge.getWeight()));
|
||||
}
|
||||
xml.append(" </edge>\n");
|
||||
}
|
||||
}
|
||||
|
||||
xml.append(" </graph>\n");
|
||||
xml.append("</graphml>\n");
|
||||
return xml.toString();
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// 全文搜索
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* 基于 Neo4j 全文索引搜索实体(name + description)。
|
||||
* <p>
|
||||
* 使用 GraphInitializer 创建的 {@code entity_fulltext} 索引,
|
||||
* 返回按相关度排序的结果。
|
||||
*
|
||||
* @param query 搜索关键词(支持 Lucene 查询语法)
|
||||
*/
|
||||
@Cacheable(value = RedisCacheConfig.CACHE_SEARCH,
|
||||
key = "T(com.datamate.knowledgegraph.infrastructure.cache.GraphCacheService).cacheKey(#graphId, #query, #page, #size, @resourceAccessService.resolveOwnerFilterUserId(), @resourceAccessService.canViewConfidential())",
|
||||
cacheManager = "knowledgeGraphCacheManager")
|
||||
public PagedResponse<SearchHitVO> fulltextSearch(String graphId, String query, int page, int size) {
|
||||
validateGraphId(graphId);
|
||||
String filterUserId = resolveOwnerFilter();
|
||||
boolean excludeConfidential = filterUserId != null && !resourceAccessService.canViewConfidential();
|
||||
|
||||
if (query == null || query.isBlank()) {
|
||||
return PagedResponse.of(List.of(), 0, 0, 0);
|
||||
}
|
||||
|
||||
int safePage = Math.max(0, page);
|
||||
int safeSize = Math.max(1, Math.min(size, 200));
|
||||
long skip = (long) safePage * safeSize;
|
||||
if (skip > MAX_SKIP) {
|
||||
throw BusinessException.of(SystemErrorCode.INVALID_PARAMETER, "分页偏移量过大");
|
||||
}
|
||||
|
||||
// 对搜索关键词进行安全处理:转义 Lucene 特殊字符
|
||||
String safeQuery = escapeLuceneQuery(query);
|
||||
String permFilter = buildPermissionPredicate("node", filterUserId, excludeConfidential);
|
||||
|
||||
Map<String, Object> searchParams = new HashMap<>();
|
||||
searchParams.put("graphId", graphId);
|
||||
searchParams.put("query", safeQuery);
|
||||
searchParams.put("skip", skip);
|
||||
searchParams.put("size", safeSize);
|
||||
if (filterUserId != null) {
|
||||
searchParams.put("filterUserId", filterUserId);
|
||||
}
|
||||
|
||||
List<SearchHitVO> results = neo4jClient
|
||||
.query(
|
||||
"CALL db.index.fulltext.queryNodes('entity_fulltext', $query) YIELD node, score " +
|
||||
"WHERE node.graph_id = $graphId " +
|
||||
permFilter +
|
||||
"RETURN node.id AS id, node.name AS name, node.type AS type, " +
|
||||
"node.description AS description, score " +
|
||||
"ORDER BY score DESC " +
|
||||
"SKIP $skip LIMIT $size"
|
||||
)
|
||||
.bindAll(searchParams)
|
||||
.fetchAs(SearchHitVO.class)
|
||||
.mappedBy((ts, record) -> SearchHitVO.builder()
|
||||
.id(record.get("id").asString(null))
|
||||
.name(record.get("name").asString(null))
|
||||
.type(record.get("type").asString(null))
|
||||
.description(record.get("description").asString(null))
|
||||
.score(record.get("score").asDouble())
|
||||
.build())
|
||||
.all()
|
||||
.stream().toList();
|
||||
|
||||
Map<String, Object> countParams = new HashMap<>();
|
||||
countParams.put("graphId", graphId);
|
||||
countParams.put("query", safeQuery);
|
||||
if (filterUserId != null) {
|
||||
countParams.put("filterUserId", filterUserId);
|
||||
}
|
||||
|
||||
long total = neo4jClient
|
||||
.query(
|
||||
"CALL db.index.fulltext.queryNodes('entity_fulltext', $query) YIELD node, score " +
|
||||
"WHERE node.graph_id = $graphId " +
|
||||
permFilter +
|
||||
"RETURN count(*) AS total"
|
||||
)
|
||||
.bindAll(countParams)
|
||||
.fetchAs(Long.class)
|
||||
.mappedBy((ts, record) -> record.get("total").asLong())
|
||||
.one()
|
||||
.orElse(0L);
|
||||
|
||||
long totalPages = safeSize > 0 ? (total + safeSize - 1) / safeSize : 0;
|
||||
return PagedResponse.of(results, safePage, total, totalPages);
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// 权限过滤
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* 获取 owner 过滤用户 ID。
|
||||
* 管理员返回 null(不过滤),普通用户返回当前 userId。
|
||||
*/
|
||||
private String resolveOwnerFilter() {
|
||||
return resourceAccessService.resolveOwnerFilterUserId();
|
||||
}
|
||||
|
||||
/**
|
||||
* 构建 Cypher 权限过滤条件片段。
|
||||
* <p>
|
||||
* 管理员返回空字符串(不过滤);
|
||||
* 普通用户返回 AND 子句:仅保留结构型实体(User、Org、Field)
|
||||
* 和 {@code created_by} 等于当前用户的业务实体。
|
||||
* 若无保密数据权限,额外过滤 sensitivity=CONFIDENTIAL。
|
||||
*/
|
||||
private static String buildPermissionPredicate(String nodeAlias, String filterUserId, boolean excludeConfidential) {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
if (filterUserId != null) {
|
||||
sb.append("AND (").append(nodeAlias).append(".type IN ['User', 'Org', 'Field'] OR ")
|
||||
.append(nodeAlias).append(".`properties.created_by` = $filterUserId) ");
|
||||
}
|
||||
if (excludeConfidential) {
|
||||
sb.append("AND (toUpper(trim(").append(nodeAlias).append(".`properties.sensitivity`)) IS NULL OR ")
|
||||
.append("toUpper(trim(").append(nodeAlias).append(".`properties.sensitivity`)) <> 'CONFIDENTIAL') ");
|
||||
}
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* 校验非管理员用户对实体的访问权限。
|
||||
* 保密数据需要 canViewConfidential 权限;
|
||||
* 结构型实体(User、Org、Field)对所有用户可见;
|
||||
* 业务实体必须匹配 created_by。
|
||||
*/
|
||||
private static void assertEntityAccess(GraphEntity entity, String filterUserId, boolean excludeConfidential) {
|
||||
// 保密数据检查(大小写不敏感,与 data-management 一致)
|
||||
if (excludeConfidential) {
|
||||
Object sensitivity = entity.getProperties() != null
|
||||
? entity.getProperties().get("sensitivity") : null;
|
||||
if (sensitivity != null && sensitivity.toString().trim().equalsIgnoreCase("CONFIDENTIAL")) {
|
||||
throw BusinessException.of(SystemErrorCode.INSUFFICIENT_PERMISSIONS, "无权访问保密数据");
|
||||
}
|
||||
}
|
||||
// 结构型实体按类型白名单放行
|
||||
if (STRUCTURAL_ENTITY_TYPES.contains(entity.getType())) {
|
||||
return;
|
||||
}
|
||||
// 业务实体必须匹配 owner
|
||||
Object createdBy = entity.getProperties() != null
|
||||
? entity.getProperties().get("created_by") : null;
|
||||
if (createdBy == null || !filterUserId.equals(createdBy.toString())) {
|
||||
throw BusinessException.of(SystemErrorCode.INSUFFICIENT_PERMISSIONS, "无权访问该实体");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 判断实体是否对指定用户可访问。
|
||||
* 保密数据需要 canViewConfidential 权限;
|
||||
* 结构型实体(User、Org、Field)对所有用户可见;
|
||||
* 业务实体必须匹配 created_by。
|
||||
*/
|
||||
private static boolean isEntityAccessible(GraphEntity entity, String filterUserId, boolean excludeConfidential) {
|
||||
// 保密数据检查(大小写不敏感,与 data-management 一致)
|
||||
if (excludeConfidential) {
|
||||
Object sensitivity = entity.getProperties() != null
|
||||
? entity.getProperties().get("sensitivity") : null;
|
||||
if (sensitivity != null && sensitivity.toString().trim().equalsIgnoreCase("CONFIDENTIAL")) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
// 结构型实体按类型白名单放行
|
||||
if (STRUCTURAL_ENTITY_TYPES.contains(entity.getType())) {
|
||||
return true;
|
||||
}
|
||||
// 业务实体必须匹配 owner
|
||||
Object createdBy = entity.getProperties() != null
|
||||
? entity.getProperties().get("created_by") : null;
|
||||
return createdBy != null && filterUserId.equals(createdBy.toString());
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// 内部方法
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* 查询指定节点集合之间的所有边。
|
||||
*/
|
||||
private List<EdgeSummaryVO> queryEdgesBetween(String graphId, List<String> nodeIds) {
|
||||
if (nodeIds.size() < 2) {
|
||||
return List.of();
|
||||
}
|
||||
|
||||
return neo4jClient
|
||||
.query(
|
||||
"MATCH (s:Entity {graph_id: $graphId})-[r:" + REL_TYPE + " {graph_id: $graphId}]->(t:Entity {graph_id: $graphId}) " +
|
||||
"WHERE s.id IN $nodeIds AND t.id IN $nodeIds " +
|
||||
"RETURN r.id AS id, s.id AS sourceEntityId, t.id AS targetEntityId, " +
|
||||
"r.relation_type AS relationType, r.weight AS weight"
|
||||
)
|
||||
.bindAll(Map.of("graphId", graphId, "nodeIds", nodeIds))
|
||||
.fetchAs(EdgeSummaryVO.class)
|
||||
.mappedBy((ts, record) -> EdgeSummaryVO.builder()
|
||||
.id(record.get("id").asString(null))
|
||||
.sourceEntityId(record.get("sourceEntityId").asString(null))
|
||||
.targetEntityId(record.get("targetEntityId").asString(null))
|
||||
.relationType(record.get("relationType").asString(null))
|
||||
.weight(record.get("weight").isNull() ? null : record.get("weight").asDouble())
|
||||
.build())
|
||||
.all()
|
||||
.stream().toList();
|
||||
}
|
||||
|
||||
private PathVO mapPathRecord(MapAccessor record) {
|
||||
// 解析路径节点
|
||||
List<EntitySummaryVO> nodes = new ArrayList<>();
|
||||
Value pathNodes = record.get("pathNodes");
|
||||
if (pathNodes != null && !pathNodes.isNull()) {
|
||||
for (Value nodeVal : pathNodes.asList(v -> v)) {
|
||||
nodes.add(EntitySummaryVO.builder()
|
||||
.id(getStringOrNull(nodeVal, "id"))
|
||||
.name(getStringOrNull(nodeVal, "name"))
|
||||
.type(getStringOrNull(nodeVal, "type"))
|
||||
.description(getStringOrNull(nodeVal, "description"))
|
||||
.build());
|
||||
}
|
||||
}
|
||||
|
||||
// 解析路径边
|
||||
List<EdgeSummaryVO> edges = new ArrayList<>();
|
||||
Value pathEdges = record.get("pathEdges");
|
||||
if (pathEdges != null && !pathEdges.isNull()) {
|
||||
for (Value edgeVal : pathEdges.asList(v -> v)) {
|
||||
edges.add(EdgeSummaryVO.builder()
|
||||
.id(getStringOrNull(edgeVal, "id"))
|
||||
.sourceEntityId(getStringOrNull(edgeVal, "source"))
|
||||
.targetEntityId(getStringOrNull(edgeVal, "target"))
|
||||
.relationType(getStringOrNull(edgeVal, "relation_type"))
|
||||
.weight(getDoubleOrNull(edgeVal, "weight"))
|
||||
.build());
|
||||
}
|
||||
}
|
||||
|
||||
int pathLength = record.get("pathLength").asInt(0);
|
||||
|
||||
return PathVO.builder()
|
||||
.nodes(nodes)
|
||||
.edges(edges)
|
||||
.pathLength(pathLength)
|
||||
.build();
|
||||
}
|
||||
|
||||
/**
|
||||
* 转义 Lucene 查询中的特殊字符,防止查询注入。
|
||||
*/
|
||||
private static String escapeLuceneQuery(String query) {
|
||||
// Lucene 特殊字符: + - && || ! ( ) { } [ ] ^ " ~ * ? : \ /
|
||||
StringBuilder sb = new StringBuilder();
|
||||
for (char c : query.toCharArray()) {
|
||||
if ("+-&|!(){}[]^\"~*?:\\/".indexOf(c) >= 0) {
|
||||
sb.append('\\');
|
||||
}
|
||||
sb.append(c);
|
||||
}
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
private static String getStringOrNull(Value value, String key) {
|
||||
Value v = value.get(key);
|
||||
return (v == null || v.isNull()) ? null : v.asString();
|
||||
}
|
||||
|
||||
private static Double getDoubleOrNull(Value value, String key) {
|
||||
Value v = value.get(key);
|
||||
return (v == null || v.isNull()) ? null : v.asDouble();
|
||||
}
|
||||
|
||||
/**
|
||||
* 查询指定节点集合之间的所有边(导出用,包含完整属性)。
|
||||
*/
|
||||
private List<ExportEdgeVO> queryExportEdgesBetween(String graphId, List<String> nodeIds) {
|
||||
if (nodeIds.size() < 2) {
|
||||
return List.of();
|
||||
}
|
||||
|
||||
return neo4jClient
|
||||
.query(
|
||||
"MATCH (s:Entity {graph_id: $graphId})-[r:" + REL_TYPE + " {graph_id: $graphId}]->(t:Entity {graph_id: $graphId}) " +
|
||||
"WHERE s.id IN $nodeIds AND t.id IN $nodeIds " +
|
||||
"RETURN r.id AS id, s.id AS sourceEntityId, t.id AS targetEntityId, " +
|
||||
"r.relation_type AS relationType, r.weight AS weight, " +
|
||||
"r.confidence AS confidence, r.source_id AS sourceId"
|
||||
)
|
||||
.bindAll(Map.of("graphId", graphId, "nodeIds", nodeIds))
|
||||
.fetchAs(ExportEdgeVO.class)
|
||||
.mappedBy((ts, record) -> ExportEdgeVO.builder()
|
||||
.id(record.get("id").asString(null))
|
||||
.sourceEntityId(record.get("sourceEntityId").asString(null))
|
||||
.targetEntityId(record.get("targetEntityId").asString(null))
|
||||
.relationType(record.get("relationType").asString(null))
|
||||
.weight(record.get("weight").isNull() ? null : record.get("weight").asDouble())
|
||||
.confidence(record.get("confidence").isNull() ? null : record.get("confidence").asDouble())
|
||||
.sourceId(record.get("sourceId").asString(null))
|
||||
.build())
|
||||
.all()
|
||||
.stream().toList();
|
||||
}
|
||||
|
||||
/**
|
||||
* 从种子实体扩展 N 跳邻居,返回所有节点 ID(含种子)。
|
||||
* <p>
|
||||
* 使用事务超时保护,防止深度扩展导致组合爆炸。
|
||||
* 结果总数严格不超过 maxNodes(含种子节点)。
|
||||
*/
|
||||
private Set<String> expandNeighborIds(String graphId, List<String> seedIds, int depth,
|
||||
String filterUserId, boolean excludeConfidential, int maxNodes) {
|
||||
String permFilter = "";
|
||||
if (filterUserId != null) {
|
||||
StringBuilder pf = new StringBuilder("AND ALL(n IN nodes(p) WHERE ");
|
||||
pf.append("(n.type IN ['User', 'Org', 'Field'] OR n.`properties.created_by` = $filterUserId)");
|
||||
if (excludeConfidential) {
|
||||
pf.append(" AND (toUpper(trim(n.`properties.sensitivity`)) IS NULL OR toUpper(trim(n.`properties.sensitivity`)) <> 'CONFIDENTIAL')");
|
||||
}
|
||||
pf.append(") ");
|
||||
permFilter = pf.toString();
|
||||
}
|
||||
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("graphId", graphId);
|
||||
params.put("seedIds", seedIds);
|
||||
params.put("maxNodes", maxNodes);
|
||||
if (filterUserId != null) {
|
||||
params.put("filterUserId", filterUserId);
|
||||
}
|
||||
|
||||
// 种子节点在 Cypher 中纳入 LIMIT 约束,确保总数不超过 maxNodes
|
||||
String cypher =
|
||||
"MATCH (seed:Entity {graph_id: $graphId}) " +
|
||||
"WHERE seed.id IN $seedIds " +
|
||||
"WITH collect(DISTINCT seed) AS seeds " +
|
||||
"UNWIND seeds AS s " +
|
||||
"OPTIONAL MATCH p = (s)-[:" + REL_TYPE + "*1.." + depth + "]-(neighbor:Entity) " +
|
||||
"WHERE ALL(n IN nodes(p) WHERE n.graph_id = $graphId) " +
|
||||
" AND ALL(r IN relationships(p) WHERE r.graph_id = $graphId) " +
|
||||
permFilter +
|
||||
"WITH seeds + collect(DISTINCT neighbor) AS allNodes " +
|
||||
"UNWIND allNodes AS node " +
|
||||
"WITH DISTINCT node " +
|
||||
"WHERE node IS NOT NULL " +
|
||||
"RETURN node.id AS id " +
|
||||
"LIMIT $maxNodes";
|
||||
|
||||
List<String> ids = queryWithTimeout(cypher, params,
|
||||
record -> record.get("id").asString(null));
|
||||
|
||||
return new LinkedHashSet<>(ids);
|
||||
}
|
||||
|
||||
private static void appendGraphMLData(StringBuilder xml, String key, String value) {
|
||||
if (value != null) {
|
||||
xml.append(" <data key=\"").append(key).append("\">")
|
||||
.append(escapeXml(value))
|
||||
.append("</data>\n");
|
||||
}
|
||||
}
|
||||
|
||||
private static String escapeXml(String text) {
|
||||
if (text == null) {
|
||||
return "";
|
||||
}
|
||||
return text.replace("&", "&")
|
||||
.replace("<", "<")
|
||||
.replace(">", ">")
|
||||
.replace("\"", """)
|
||||
.replace("'", "'");
|
||||
}
|
||||
|
||||
private void validateGraphId(String graphId) {
|
||||
if (graphId == null || !UUID_PATTERN.matcher(graphId).matches()) {
|
||||
throw BusinessException.of(SystemErrorCode.INVALID_PARAMETER, "graphId 格式无效");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 使用 Neo4j Driver 直接执行查询,附带事务级超时保护。
|
||||
* <p>
|
||||
* 用于路径枚举等可能触发组合爆炸的高开销查询,
|
||||
* 超时后 Neo4j 服务端会主动终止事务,避免资源耗尽。
|
||||
*/
|
||||
private <T> List<T> queryWithTimeout(String cypher, Map<String, Object> params,
|
||||
Function<Record, T> mapper) {
|
||||
int timeoutSeconds = properties.getQueryTimeoutSeconds();
|
||||
TransactionConfig txConfig = TransactionConfig.builder()
|
||||
.withTimeout(Duration.ofSeconds(timeoutSeconds))
|
||||
.build();
|
||||
try (Session session = neo4jDriver.session()) {
|
||||
return session.executeRead(tx -> {
|
||||
var result = tx.run(cypher, params);
|
||||
List<T> items = new ArrayList<>();
|
||||
while (result.hasNext()) {
|
||||
items.add(mapper.apply(result.next()));
|
||||
}
|
||||
return items;
|
||||
}, txConfig);
|
||||
} catch (Exception e) {
|
||||
if (isTransactionTimeout(e)) {
|
||||
log.warn("图查询超时({}秒): {}", timeoutSeconds, cypher.substring(0, Math.min(cypher.length(), 120)));
|
||||
throw BusinessException.of(KnowledgeGraphErrorCode.QUERY_TIMEOUT,
|
||||
"查询超时(" + timeoutSeconds + "秒),请缩小搜索范围或减少深度");
|
||||
}
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 判断异常是否为 Neo4j 事务超时。
|
||||
*/
|
||||
private static boolean isTransactionTimeout(Exception e) {
|
||||
// Neo4j 事务超时时抛出的异常链中通常包含 "terminated" 或 "timeout"
|
||||
Throwable current = e;
|
||||
while (current != null) {
|
||||
String msg = current.getMessage();
|
||||
if (msg != null) {
|
||||
String lower = msg.toLowerCase(Locale.ROOT);
|
||||
if (lower.contains("transaction has been terminated") || lower.contains("timed out")) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
current = current.getCause();
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,251 @@
|
||||
package com.datamate.knowledgegraph.application;
|
||||
|
||||
import com.datamate.common.infrastructure.exception.BusinessException;
|
||||
import com.datamate.common.infrastructure.exception.SystemErrorCode;
|
||||
import com.datamate.common.interfaces.PagedResponse;
|
||||
import com.datamate.knowledgegraph.domain.model.RelationDetail;
|
||||
import com.datamate.knowledgegraph.domain.repository.GraphEntityRepository;
|
||||
import com.datamate.knowledgegraph.domain.repository.GraphRelationRepository;
|
||||
import com.datamate.knowledgegraph.infrastructure.cache.GraphCacheService;
|
||||
import com.datamate.knowledgegraph.infrastructure.exception.KnowledgeGraphErrorCode;
|
||||
import com.datamate.knowledgegraph.interfaces.dto.CreateRelationRequest;
|
||||
import com.datamate.knowledgegraph.interfaces.dto.RelationVO;
|
||||
import com.datamate.knowledgegraph.interfaces.dto.UpdateRelationRequest;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.springframework.stereotype.Service;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
/**
|
||||
* 知识图谱关系业务服务。
|
||||
* <p>
|
||||
* <b>信任边界说明</b>:本服务仅通过内网被 API Gateway / Java 后端调用,
|
||||
* 网关层已完成用户身份认证与权限校验,服务层不再重复鉴权,
|
||||
* 仅校验 graphId 格式(防 Cypher 注入)与数据完整性约束。
|
||||
*/
|
||||
@Service
|
||||
@Slf4j
|
||||
@RequiredArgsConstructor
|
||||
public class GraphRelationService {
|
||||
|
||||
/** 分页偏移量上限,防止深翻页导致 Neo4j 性能退化。 */
|
||||
private static final long MAX_SKIP = 100_000L;
|
||||
|
||||
/** 合法的关系查询方向。 */
|
||||
private static final Set<String> VALID_DIRECTIONS = Set.of("all", "in", "out");
|
||||
|
||||
private static final Pattern UUID_PATTERN = Pattern.compile(
|
||||
"^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$"
|
||||
);
|
||||
|
||||
private final GraphRelationRepository relationRepository;
|
||||
private final GraphEntityRepository entityRepository;
|
||||
private final GraphCacheService cacheService;
|
||||
|
||||
@Transactional
|
||||
public RelationVO createRelation(String graphId, CreateRelationRequest request) {
|
||||
validateGraphId(graphId);
|
||||
|
||||
// 校验源实体存在
|
||||
entityRepository.findByIdAndGraphId(request.getSourceEntityId(), graphId)
|
||||
.orElseThrow(() -> BusinessException.of(
|
||||
KnowledgeGraphErrorCode.ENTITY_NOT_FOUND, "源实体不存在"));
|
||||
|
||||
// 校验目标实体存在
|
||||
entityRepository.findByIdAndGraphId(request.getTargetEntityId(), graphId)
|
||||
.orElseThrow(() -> BusinessException.of(
|
||||
KnowledgeGraphErrorCode.ENTITY_NOT_FOUND, "目标实体不存在"));
|
||||
|
||||
RelationDetail detail = relationRepository.create(
|
||||
graphId,
|
||||
request.getSourceEntityId(),
|
||||
request.getTargetEntityId(),
|
||||
request.getRelationType(),
|
||||
request.getProperties(),
|
||||
request.getWeight(),
|
||||
request.getSourceId(),
|
||||
request.getConfidence()
|
||||
).orElseThrow(() -> BusinessException.of(
|
||||
KnowledgeGraphErrorCode.INVALID_RELATION, "关系创建失败"));
|
||||
|
||||
log.info("Relation created: id={}, graphId={}, type={}, source={} -> target={}",
|
||||
detail.getId(), graphId, request.getRelationType(),
|
||||
request.getSourceEntityId(), request.getTargetEntityId());
|
||||
cacheService.evictEntityCaches(graphId, request.getSourceEntityId());
|
||||
return toVO(detail);
|
||||
}
|
||||
|
||||
public RelationVO getRelation(String graphId, String relationId) {
|
||||
validateGraphId(graphId);
|
||||
RelationDetail detail = relationRepository.findByIdAndGraphId(relationId, graphId)
|
||||
.orElseThrow(() -> BusinessException.of(KnowledgeGraphErrorCode.RELATION_NOT_FOUND));
|
||||
return toVO(detail);
|
||||
}
|
||||
|
||||
public PagedResponse<RelationVO> listRelations(String graphId, String type, int page, int size) {
|
||||
validateGraphId(graphId);
|
||||
|
||||
int safePage = Math.max(0, page);
|
||||
int safeSize = Math.max(1, Math.min(size, 200));
|
||||
long skip = (long) safePage * safeSize;
|
||||
if (skip > MAX_SKIP) {
|
||||
throw BusinessException.of(SystemErrorCode.INVALID_PARAMETER, "分页偏移量过大");
|
||||
}
|
||||
|
||||
List<RelationDetail> details = relationRepository.findByGraphId(graphId, type, skip, safeSize);
|
||||
long total = relationRepository.countByGraphId(graphId, type);
|
||||
long totalPages = safeSize > 0 ? (total + safeSize - 1) / safeSize : 0;
|
||||
|
||||
List<RelationVO> content = details.stream().map(GraphRelationService::toVO).toList();
|
||||
return PagedResponse.of(content, safePage, total, totalPages);
|
||||
}
|
||||
|
||||
/**
|
||||
* 查询实体的关系列表。
|
||||
*
|
||||
* @param direction "all"、"in" 或 "out"
|
||||
*/
|
||||
public PagedResponse<RelationVO> listEntityRelations(String graphId, String entityId,
|
||||
String direction, String type,
|
||||
int page, int size) {
|
||||
validateGraphId(graphId);
|
||||
|
||||
// 校验实体存在
|
||||
entityRepository.findByIdAndGraphId(entityId, graphId)
|
||||
.orElseThrow(() -> BusinessException.of(KnowledgeGraphErrorCode.ENTITY_NOT_FOUND));
|
||||
|
||||
int safePage = Math.max(0, page);
|
||||
int safeSize = Math.max(1, Math.min(size, 200));
|
||||
long skip = (long) safePage * safeSize;
|
||||
if (skip > MAX_SKIP) {
|
||||
throw BusinessException.of(SystemErrorCode.INVALID_PARAMETER, "分页偏移量过大");
|
||||
}
|
||||
|
||||
String safeDirection = (direction != null) ? direction : "all";
|
||||
if (!VALID_DIRECTIONS.contains(safeDirection)) {
|
||||
throw BusinessException.of(SystemErrorCode.INVALID_PARAMETER,
|
||||
"direction 参数无效,允许值:all, in, out");
|
||||
}
|
||||
|
||||
List<RelationDetail> details;
|
||||
switch (safeDirection) {
|
||||
case "in":
|
||||
details = relationRepository.findInboundByEntityId(graphId, entityId, type, skip, safeSize);
|
||||
break;
|
||||
case "out":
|
||||
details = relationRepository.findOutboundByEntityId(graphId, entityId, type, skip, safeSize);
|
||||
break;
|
||||
default:
|
||||
details = relationRepository.findByEntityId(graphId, entityId, type, skip, safeSize);
|
||||
break;
|
||||
}
|
||||
|
||||
long total = relationRepository.countByEntityId(graphId, entityId, type, safeDirection);
|
||||
long totalPages = safeSize > 0 ? (total + safeSize - 1) / safeSize : 0;
|
||||
|
||||
List<RelationVO> content = details.stream().map(GraphRelationService::toVO).toList();
|
||||
return PagedResponse.of(content, safePage, total, totalPages);
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public RelationVO updateRelation(String graphId, String relationId, UpdateRelationRequest request) {
|
||||
validateGraphId(graphId);
|
||||
|
||||
// 确认关系存在
|
||||
relationRepository.findByIdAndGraphId(relationId, graphId)
|
||||
.orElseThrow(() -> BusinessException.of(KnowledgeGraphErrorCode.RELATION_NOT_FOUND));
|
||||
|
||||
RelationDetail detail = relationRepository.update(
|
||||
relationId, graphId,
|
||||
request.getRelationType(),
|
||||
request.getProperties(),
|
||||
request.getWeight(),
|
||||
request.getConfidence()
|
||||
).orElseThrow(() -> BusinessException.of(KnowledgeGraphErrorCode.RELATION_NOT_FOUND));
|
||||
|
||||
log.info("Relation updated: id={}, graphId={}", relationId, graphId);
|
||||
cacheService.evictEntityCaches(graphId, detail.getSourceEntityId());
|
||||
return toVO(detail);
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public void deleteRelation(String graphId, String relationId) {
|
||||
validateGraphId(graphId);
|
||||
|
||||
// 确认关系存在并保留关系两端实体 ID,用于精准缓存失效
|
||||
RelationDetail detail = relationRepository.findByIdAndGraphId(relationId, graphId)
|
||||
.orElseThrow(() -> BusinessException.of(KnowledgeGraphErrorCode.RELATION_NOT_FOUND));
|
||||
|
||||
long deleted = relationRepository.deleteByIdAndGraphId(relationId, graphId);
|
||||
if (deleted <= 0) {
|
||||
throw BusinessException.of(KnowledgeGraphErrorCode.RELATION_NOT_FOUND);
|
||||
}
|
||||
log.info("Relation deleted: id={}, graphId={}", relationId, graphId);
|
||||
cacheService.evictEntityCaches(graphId, detail.getSourceEntityId());
|
||||
if (detail.getTargetEntityId() != null
|
||||
&& !detail.getTargetEntityId().equals(detail.getSourceEntityId())) {
|
||||
cacheService.evictEntityCaches(graphId, detail.getTargetEntityId());
|
||||
}
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public Map<String, Object> batchDeleteRelations(String graphId, List<String> relationIds) {
|
||||
validateGraphId(graphId);
|
||||
int deleted = 0;
|
||||
List<String> failedIds = new ArrayList<>();
|
||||
for (String relationId : relationIds) {
|
||||
try {
|
||||
deleteRelation(graphId, relationId);
|
||||
deleted++;
|
||||
} catch (Exception e) {
|
||||
log.warn("Batch delete: failed to delete relation {}: {}", relationId, e.getMessage());
|
||||
failedIds.add(relationId);
|
||||
}
|
||||
}
|
||||
Map<String, Object> result = Map.of(
|
||||
"deleted", deleted,
|
||||
"total", relationIds.size(),
|
||||
"failedIds", failedIds
|
||||
);
|
||||
return result;
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// 领域对象 → 视图对象 转换
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
private static RelationVO toVO(RelationDetail detail) {
|
||||
return RelationVO.builder()
|
||||
.id(detail.getId())
|
||||
.sourceEntityId(detail.getSourceEntityId())
|
||||
.sourceEntityName(detail.getSourceEntityName())
|
||||
.sourceEntityType(detail.getSourceEntityType())
|
||||
.targetEntityId(detail.getTargetEntityId())
|
||||
.targetEntityName(detail.getTargetEntityName())
|
||||
.targetEntityType(detail.getTargetEntityType())
|
||||
.relationType(detail.getRelationType())
|
||||
.properties(detail.getProperties())
|
||||
.weight(detail.getWeight())
|
||||
.confidence(detail.getConfidence())
|
||||
.sourceId(detail.getSourceId())
|
||||
.graphId(detail.getGraphId())
|
||||
.createdAt(detail.getCreatedAt())
|
||||
.build();
|
||||
}
|
||||
|
||||
/**
|
||||
* 校验 graphId 格式(UUID)。
|
||||
* 防止恶意构造的 graphId 注入 Cypher 查询。
|
||||
*/
|
||||
private void validateGraphId(String graphId) {
|
||||
if (graphId == null || !UUID_PATTERN.matcher(graphId).matches()) {
|
||||
throw BusinessException.of(SystemErrorCode.INVALID_PARAMETER, "graphId 格式无效");
|
||||
}
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,95 @@
|
||||
package com.datamate.knowledgegraph.application;
|
||||
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.springframework.data.neo4j.core.Neo4jClient;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* 索引健康检查服务。
|
||||
* <p>
|
||||
* 提供 Neo4j 索引状态查询,用于运维监控和启动验证。
|
||||
*/
|
||||
@Service
|
||||
@Slf4j
|
||||
@RequiredArgsConstructor
|
||||
public class IndexHealthService {
|
||||
|
||||
private final Neo4jClient neo4jClient;
|
||||
|
||||
/**
|
||||
* 获取所有索引状态信息。
|
||||
*
|
||||
* @return 索引名称到状态的映射列表,每项包含 name, state, type, entityType, labelsOrTypes, properties
|
||||
*/
|
||||
public List<Map<String, Object>> getIndexStatus() {
|
||||
return neo4jClient
|
||||
.query("SHOW INDEXES YIELD name, state, type, entityType, labelsOrTypes, properties " +
|
||||
"RETURN name, state, type, entityType, labelsOrTypes, properties " +
|
||||
"ORDER BY name")
|
||||
.fetchAs(Map.class)
|
||||
.mappedBy((ts, record) -> {
|
||||
Map<String, Object> info = new java.util.LinkedHashMap<>();
|
||||
info.put("name", record.get("name").asString(null));
|
||||
info.put("state", record.get("state").asString(null));
|
||||
info.put("type", record.get("type").asString(null));
|
||||
info.put("entityType", record.get("entityType").asString(null));
|
||||
var labelsOrTypes = record.get("labelsOrTypes");
|
||||
info.put("labelsOrTypes", labelsOrTypes.isNull() ? List.of() : labelsOrTypes.asList(v -> v.asString(null)));
|
||||
var properties = record.get("properties");
|
||||
info.put("properties", properties.isNull() ? List.of() : properties.asList(v -> v.asString(null)));
|
||||
return info;
|
||||
})
|
||||
.all()
|
||||
.stream()
|
||||
.map(m -> (Map<String, Object>) m)
|
||||
.toList();
|
||||
}
|
||||
|
||||
/**
|
||||
* 检查是否存在非 ONLINE 状态的索引。
|
||||
*
|
||||
* @return true 表示所有索引健康(ONLINE 状态)
|
||||
*/
|
||||
public boolean allIndexesOnline() {
|
||||
List<Map<String, Object>> indexes = getIndexStatus();
|
||||
if (indexes.isEmpty()) {
|
||||
log.warn("No indexes found in Neo4j database");
|
||||
return false;
|
||||
}
|
||||
for (Map<String, Object> idx : indexes) {
|
||||
String state = (String) idx.get("state");
|
||||
if (!"ONLINE".equals(state)) {
|
||||
log.warn("Index '{}' is in state '{}' (expected ONLINE)", idx.get("name"), state);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取数据库统计信息(节点数、关系数)。
|
||||
*
|
||||
* @return 包含 nodeCount 和 relationshipCount 的映射
|
||||
*/
|
||||
public Map<String, Long> getDatabaseStats() {
|
||||
Long nodeCount = neo4jClient
|
||||
.query("MATCH (n:Entity) RETURN count(n) AS cnt")
|
||||
.fetchAs(Long.class)
|
||||
.mappedBy((ts, record) -> record.get("cnt").asLong())
|
||||
.one()
|
||||
.orElse(0L);
|
||||
|
||||
Long relCount = neo4jClient
|
||||
.query("MATCH ()-[r:RELATED_TO]->() RETURN count(r) AS cnt")
|
||||
.fetchAs(Long.class)
|
||||
.mappedBy((ts, record) -> record.get("cnt").asLong())
|
||||
.one()
|
||||
.orElse(0L);
|
||||
|
||||
return Map.of("nodeCount", nodeCount, "relationshipCount", relCount);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,55 @@
|
||||
package com.datamate.knowledgegraph.domain.model;
|
||||
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Builder;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
|
||||
/**
|
||||
* 知识图谱编辑审核记录。
|
||||
* <p>
|
||||
* 在 Neo4j 中作为 {@code EditReview} 节点存储,
|
||||
* 记录实体/关系的增删改请求及审核状态。
|
||||
*/
|
||||
@Data
|
||||
@Builder
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class EditReview {
|
||||
|
||||
private String id;
|
||||
|
||||
/** 所属图谱 ID */
|
||||
private String graphId;
|
||||
|
||||
/** 操作类型:CREATE_ENTITY, UPDATE_ENTITY, DELETE_ENTITY, BATCH_DELETE_ENTITY, CREATE_RELATION, UPDATE_RELATION, DELETE_RELATION, BATCH_DELETE_RELATION */
|
||||
private String operationType;
|
||||
|
||||
/** 目标实体 ID(实体操作时非空) */
|
||||
private String entityId;
|
||||
|
||||
/** 目标关系 ID(关系操作时非空) */
|
||||
private String relationId;
|
||||
|
||||
/** 变更载荷(JSON 序列化的请求体) */
|
||||
private String payload;
|
||||
|
||||
/** 审核状态:PENDING, APPROVED, REJECTED */
|
||||
@Builder.Default
|
||||
private String status = "PENDING";
|
||||
|
||||
/** 提交人 ID */
|
||||
private String submittedBy;
|
||||
|
||||
/** 审核人 ID */
|
||||
private String reviewedBy;
|
||||
|
||||
/** 审核意见 */
|
||||
private String reviewComment;
|
||||
|
||||
private LocalDateTime createdAt;
|
||||
|
||||
private LocalDateTime reviewedAt;
|
||||
}
|
||||
@@ -0,0 +1,81 @@
|
||||
package com.datamate.knowledgegraph.domain.model;
|
||||
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Builder;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
import org.springframework.data.neo4j.core.schema.DynamicLabels;
|
||||
import org.springframework.data.neo4j.core.schema.GeneratedValue;
|
||||
import org.springframework.data.neo4j.core.schema.Id;
|
||||
import org.springframework.data.neo4j.core.schema.Node;
|
||||
import org.springframework.data.neo4j.core.schema.Property;
|
||||
import org.springframework.data.neo4j.core.support.UUIDStringGenerator;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* 知识图谱实体节点。
|
||||
* <p>
|
||||
* 在 Neo4j 中,每个实体作为一个节点存储,
|
||||
* 通过 {@code type} 属性区分具体类型(Person, Organization, Concept 等),
|
||||
* 并支持通过 {@code properties} 存储灵活的扩展属性。
|
||||
*/
|
||||
@Node("Entity")
|
||||
@Data
|
||||
@Builder
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class GraphEntity {
|
||||
|
||||
@Id
|
||||
@GeneratedValue(UUIDStringGenerator.class)
|
||||
private String id;
|
||||
|
||||
@Property("name")
|
||||
private String name;
|
||||
|
||||
@Property("type")
|
||||
private String type;
|
||||
|
||||
@Property("description")
|
||||
private String description;
|
||||
|
||||
@DynamicLabels
|
||||
@Builder.Default
|
||||
private List<String> labels = new ArrayList<>();
|
||||
|
||||
@Property("aliases")
|
||||
@Builder.Default
|
||||
private List<String> aliases = new ArrayList<>();
|
||||
|
||||
@Property("properties")
|
||||
@Builder.Default
|
||||
private Map<String, Object> properties = new HashMap<>();
|
||||
|
||||
/** 来源数据集/知识库的 ID */
|
||||
@Property("source_id")
|
||||
private String sourceId;
|
||||
|
||||
/** 来源类型:ANNOTATION, KNOWLEDGE_BASE, IMPORT, MANUAL */
|
||||
@Property("source_type")
|
||||
private String sourceType;
|
||||
|
||||
/** 所属图谱 ID(对应 MySQL 中的 t_dm_knowledge_graphs.id) */
|
||||
@Property("graph_id")
|
||||
private String graphId;
|
||||
|
||||
/** 自动抽取的置信度 */
|
||||
@Property("confidence")
|
||||
@Builder.Default
|
||||
private Double confidence = 1.0;
|
||||
|
||||
@Property("created_at")
|
||||
private LocalDateTime createdAt;
|
||||
|
||||
@Property("updated_at")
|
||||
private LocalDateTime updatedAt;
|
||||
}
|
||||
@@ -0,0 +1,61 @@
|
||||
package com.datamate.knowledgegraph.domain.model;
|
||||
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Builder;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
import org.springframework.data.neo4j.core.schema.GeneratedValue;
|
||||
import org.springframework.data.neo4j.core.schema.Id;
|
||||
import org.springframework.data.neo4j.core.schema.Property;
|
||||
import org.springframework.data.neo4j.core.schema.RelationshipProperties;
|
||||
import org.springframework.data.neo4j.core.schema.TargetNode;
|
||||
import org.springframework.data.neo4j.core.support.UUIDStringGenerator;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* 知识图谱关系(边)。
|
||||
* <p>
|
||||
* 使用 Spring Data Neo4j 的 {@code @RelationshipProperties} 表示带属性的关系。
|
||||
* 关系的具体类型通过 {@code relationType} 表达(如 belongs_to, located_in)。
|
||||
*/
|
||||
@RelationshipProperties
|
||||
@Data
|
||||
@Builder
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class GraphRelation {
|
||||
|
||||
@Id
|
||||
@GeneratedValue(UUIDStringGenerator.class)
|
||||
private String id;
|
||||
|
||||
@TargetNode
|
||||
private GraphEntity target;
|
||||
|
||||
@Property("relation_type")
|
||||
private String relationType;
|
||||
|
||||
@Property("properties")
|
||||
@Builder.Default
|
||||
private Map<String, Object> properties = new HashMap<>();
|
||||
|
||||
@Property("weight")
|
||||
@Builder.Default
|
||||
private Double weight = 1.0;
|
||||
|
||||
@Property("source_id")
|
||||
private String sourceId;
|
||||
|
||||
@Property("confidence")
|
||||
@Builder.Default
|
||||
private Double confidence = 1.0;
|
||||
|
||||
@Property("graph_id")
|
||||
private String graphId;
|
||||
|
||||
@Property("created_at")
|
||||
private LocalDateTime createdAt;
|
||||
}
|
||||
@@ -0,0 +1,54 @@
|
||||
package com.datamate.knowledgegraph.domain.model;
|
||||
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Builder;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* 关系及其端点实体摘要,用于仓储层查询返回。
|
||||
* <p>
|
||||
* 由于 {@link GraphRelation} 使用 {@code @RelationshipProperties} 且仅持有
|
||||
* 目标节点引用,无法完整表达 Cypher 查询返回的"源节点 + 关系 + 目标节点"结构,
|
||||
* 因此使用该领域对象作为仓储层的返回类型。
|
||||
*/
|
||||
@Data
|
||||
@Builder
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class RelationDetail {
|
||||
|
||||
private String id;
|
||||
|
||||
private String sourceEntityId;
|
||||
|
||||
private String sourceEntityName;
|
||||
|
||||
private String sourceEntityType;
|
||||
|
||||
private String targetEntityId;
|
||||
|
||||
private String targetEntityName;
|
||||
|
||||
private String targetEntityType;
|
||||
|
||||
private String relationType;
|
||||
|
||||
@Builder.Default
|
||||
private Map<String, Object> properties = new HashMap<>();
|
||||
|
||||
private Double weight;
|
||||
|
||||
private Double confidence;
|
||||
|
||||
/** 来源数据集/知识库的 ID */
|
||||
private String sourceId;
|
||||
|
||||
private String graphId;
|
||||
|
||||
private LocalDateTime createdAt;
|
||||
}
|
||||
@@ -0,0 +1,194 @@
|
||||
package com.datamate.knowledgegraph.domain.model;
|
||||
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Builder;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
import org.springframework.data.annotation.Transient;
|
||||
import org.springframework.data.neo4j.core.schema.GeneratedValue;
|
||||
import org.springframework.data.neo4j.core.schema.Id;
|
||||
import org.springframework.data.neo4j.core.schema.Node;
|
||||
import org.springframework.data.neo4j.core.schema.Property;
|
||||
import org.springframework.data.neo4j.core.support.UUIDStringGenerator;
|
||||
|
||||
import java.time.Duration;
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* 同步操作元数据,用于记录每次同步的整体状态和统计信息。
|
||||
* <p>
|
||||
* 同时作为 Neo4j 节点持久化到图数据库,支持历史查询和问题排查。
|
||||
*/
|
||||
@Node("SyncHistory")
|
||||
@Data
|
||||
@Builder
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class SyncMetadata {
|
||||
|
||||
public static final String STATUS_SUCCESS = "SUCCESS";
|
||||
public static final String STATUS_FAILED = "FAILED";
|
||||
public static final String STATUS_PARTIAL = "PARTIAL";
|
||||
|
||||
public static final String TYPE_FULL = "FULL";
|
||||
public static final String TYPE_INCREMENTAL = "INCREMENTAL";
|
||||
public static final String TYPE_DATASETS = "DATASETS";
|
||||
public static final String TYPE_FIELDS = "FIELDS";
|
||||
public static final String TYPE_USERS = "USERS";
|
||||
public static final String TYPE_ORGS = "ORGS";
|
||||
public static final String TYPE_WORKFLOWS = "WORKFLOWS";
|
||||
public static final String TYPE_JOBS = "JOBS";
|
||||
public static final String TYPE_LABEL_TASKS = "LABEL_TASKS";
|
||||
public static final String TYPE_KNOWLEDGE_SETS = "KNOWLEDGE_SETS";
|
||||
|
||||
@Id
|
||||
@GeneratedValue(UUIDStringGenerator.class)
|
||||
private String id;
|
||||
|
||||
@Property("sync_id")
|
||||
private String syncId;
|
||||
|
||||
@Property("graph_id")
|
||||
private String graphId;
|
||||
|
||||
/** 同步类型:FULL / DATASETS / WORKFLOWS 等 */
|
||||
@Property("sync_type")
|
||||
private String syncType;
|
||||
|
||||
/** 同步状态:SUCCESS / FAILED / PARTIAL */
|
||||
@Property("status")
|
||||
private String status;
|
||||
|
||||
@Property("started_at")
|
||||
private LocalDateTime startedAt;
|
||||
|
||||
@Property("completed_at")
|
||||
private LocalDateTime completedAt;
|
||||
|
||||
@Property("duration_millis")
|
||||
private long durationMillis;
|
||||
|
||||
@Property("total_created")
|
||||
@Builder.Default
|
||||
private int totalCreated = 0;
|
||||
|
||||
@Property("total_updated")
|
||||
@Builder.Default
|
||||
private int totalUpdated = 0;
|
||||
|
||||
@Property("total_skipped")
|
||||
@Builder.Default
|
||||
private int totalSkipped = 0;
|
||||
|
||||
@Property("total_failed")
|
||||
@Builder.Default
|
||||
private int totalFailed = 0;
|
||||
|
||||
@Property("total_purged")
|
||||
@Builder.Default
|
||||
private int totalPurged = 0;
|
||||
|
||||
/** 增量同步的时间窗口起始 */
|
||||
@Property("updated_from")
|
||||
private LocalDateTime updatedFrom;
|
||||
|
||||
/** 增量同步的时间窗口结束 */
|
||||
@Property("updated_to")
|
||||
private LocalDateTime updatedTo;
|
||||
|
||||
/** 同步失败时的错误信息 */
|
||||
@Property("error_message")
|
||||
private String errorMessage;
|
||||
|
||||
/** 各步骤的摘要,如 "Dataset(+5/~2/-0/purged:1)" */
|
||||
@Property("step_summaries")
|
||||
@Builder.Default
|
||||
private List<String> stepSummaries = new ArrayList<>();
|
||||
|
||||
/** 详细的各步骤结果(不持久化到 Neo4j,仅在返回时携带) */
|
||||
@Transient
|
||||
private List<SyncResult> results;
|
||||
|
||||
public int totalEntities() {
|
||||
return totalCreated + totalUpdated + totalSkipped + totalFailed;
|
||||
}
|
||||
|
||||
/**
|
||||
* 从 SyncResult 列表构建元数据。
|
||||
*/
|
||||
public static SyncMetadata fromResults(String syncId, String graphId, String syncType,
|
||||
LocalDateTime startedAt, List<SyncResult> results) {
|
||||
LocalDateTime completedAt = LocalDateTime.now();
|
||||
long duration = Duration.between(startedAt, completedAt).toMillis();
|
||||
|
||||
int created = 0, updated = 0, skipped = 0, failed = 0, purged = 0;
|
||||
List<String> summaries = new ArrayList<>();
|
||||
boolean hasFailures = false;
|
||||
|
||||
for (SyncResult r : results) {
|
||||
created += r.getCreated();
|
||||
updated += r.getUpdated();
|
||||
skipped += r.getSkipped();
|
||||
failed += r.getFailed();
|
||||
purged += r.getPurged();
|
||||
if (r.getFailed() > 0) {
|
||||
hasFailures = true;
|
||||
}
|
||||
summaries.add(formatStepSummary(r));
|
||||
}
|
||||
|
||||
String status = hasFailures ? STATUS_PARTIAL : STATUS_SUCCESS;
|
||||
|
||||
return SyncMetadata.builder()
|
||||
.syncId(syncId)
|
||||
.graphId(graphId)
|
||||
.syncType(syncType)
|
||||
.status(status)
|
||||
.startedAt(startedAt)
|
||||
.completedAt(completedAt)
|
||||
.durationMillis(duration)
|
||||
.totalCreated(created)
|
||||
.totalUpdated(updated)
|
||||
.totalSkipped(skipped)
|
||||
.totalFailed(failed)
|
||||
.totalPurged(purged)
|
||||
.stepSummaries(summaries)
|
||||
.results(results)
|
||||
.build();
|
||||
}
|
||||
|
||||
/**
|
||||
* 构建失败的元数据。
|
||||
*/
|
||||
public static SyncMetadata failed(String syncId, String graphId, String syncType,
|
||||
LocalDateTime startedAt, String errorMessage) {
|
||||
LocalDateTime completedAt = LocalDateTime.now();
|
||||
long duration = Duration.between(startedAt, completedAt).toMillis();
|
||||
|
||||
return SyncMetadata.builder()
|
||||
.syncId(syncId)
|
||||
.graphId(graphId)
|
||||
.syncType(syncType)
|
||||
.status(STATUS_FAILED)
|
||||
.startedAt(startedAt)
|
||||
.completedAt(completedAt)
|
||||
.durationMillis(duration)
|
||||
.errorMessage(errorMessage)
|
||||
.build();
|
||||
}
|
||||
|
||||
private static String formatStepSummary(SyncResult r) {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
sb.append(r.getSyncType())
|
||||
.append("(+").append(r.getCreated())
|
||||
.append("/~").append(r.getUpdated())
|
||||
.append("/-").append(r.getFailed());
|
||||
if (r.getPurged() > 0) {
|
||||
sb.append("/purged:").append(r.getPurged());
|
||||
}
|
||||
sb.append(")");
|
||||
return sb.toString();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,81 @@
|
||||
package com.datamate.knowledgegraph.domain.model;
|
||||
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Builder;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* 同步操作结果统计。
|
||||
*/
|
||||
@Data
|
||||
@Builder
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class SyncResult {
|
||||
|
||||
/** 本次同步的追踪标识 */
|
||||
private String syncId;
|
||||
|
||||
/** 同步的实体/关系类型 */
|
||||
private String syncType;
|
||||
|
||||
@Builder.Default
|
||||
private int created = 0;
|
||||
|
||||
@Builder.Default
|
||||
private int updated = 0;
|
||||
|
||||
@Builder.Default
|
||||
private int skipped = 0;
|
||||
|
||||
@Builder.Default
|
||||
private int failed = 0;
|
||||
|
||||
/** 全量对账删除的过期实体数 */
|
||||
@Builder.Default
|
||||
private int purged = 0;
|
||||
|
||||
/** 标记为占位符的步骤(功能尚未实现,结果无实际数据) */
|
||||
@Builder.Default
|
||||
private boolean placeholder = false;
|
||||
|
||||
@Builder.Default
|
||||
private List<String> errors = new ArrayList<>();
|
||||
|
||||
private LocalDateTime startedAt;
|
||||
|
||||
private LocalDateTime completedAt;
|
||||
|
||||
public int total() {
|
||||
return created + updated + skipped + failed;
|
||||
}
|
||||
|
||||
public long durationMillis() {
|
||||
if (startedAt == null || completedAt == null) {
|
||||
return 0;
|
||||
}
|
||||
return java.time.Duration.between(startedAt, completedAt).toMillis();
|
||||
}
|
||||
|
||||
public void incrementCreated() {
|
||||
created++;
|
||||
}
|
||||
|
||||
public void incrementUpdated() {
|
||||
updated++;
|
||||
}
|
||||
|
||||
public void incrementSkipped() {
|
||||
skipped++;
|
||||
}
|
||||
|
||||
public void addError(String error) {
|
||||
failed++;
|
||||
errors.add(error);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,193 @@
|
||||
package com.datamate.knowledgegraph.domain.repository;
|
||||
|
||||
import com.datamate.knowledgegraph.domain.model.EditReview;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.neo4j.driver.Value;
|
||||
import org.neo4j.driver.types.MapAccessor;
|
||||
import org.springframework.data.neo4j.core.Neo4jClient;
|
||||
import org.springframework.stereotype.Repository;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.*;
|
||||
|
||||
/**
|
||||
* 编辑审核仓储。
|
||||
* <p>
|
||||
* 使用 {@code Neo4jClient} 管理 {@code EditReview} 节点。
|
||||
*/
|
||||
@Repository
|
||||
@Slf4j
|
||||
@RequiredArgsConstructor
|
||||
public class EditReviewRepository {
|
||||
|
||||
private final Neo4jClient neo4jClient;
|
||||
|
||||
public EditReview save(EditReview review) {
|
||||
if (review.getId() == null) {
|
||||
review.setId(UUID.randomUUID().toString());
|
||||
}
|
||||
if (review.getCreatedAt() == null) {
|
||||
review.setCreatedAt(LocalDateTime.now());
|
||||
}
|
||||
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("id", review.getId());
|
||||
params.put("graphId", review.getGraphId());
|
||||
params.put("operationType", review.getOperationType());
|
||||
params.put("entityId", review.getEntityId() != null ? review.getEntityId() : "");
|
||||
params.put("relationId", review.getRelationId() != null ? review.getRelationId() : "");
|
||||
params.put("payload", review.getPayload() != null ? review.getPayload() : "");
|
||||
params.put("status", review.getStatus());
|
||||
params.put("submittedBy", review.getSubmittedBy() != null ? review.getSubmittedBy() : "");
|
||||
params.put("reviewedBy", review.getReviewedBy() != null ? review.getReviewedBy() : "");
|
||||
params.put("reviewComment", review.getReviewComment() != null ? review.getReviewComment() : "");
|
||||
params.put("createdAt", review.getCreatedAt());
|
||||
|
||||
// reviewed_at 为 null 时(PENDING 状态)不写入 SET,避免 null 参数导致属性缺失
|
||||
String reviewedAtSet = "";
|
||||
if (review.getReviewedAt() != null) {
|
||||
reviewedAtSet = ", r.reviewed_at = $reviewedAt";
|
||||
params.put("reviewedAt", review.getReviewedAt());
|
||||
}
|
||||
|
||||
neo4jClient
|
||||
.query(
|
||||
"MERGE (r:EditReview {id: $id}) " +
|
||||
"SET r.graph_id = $graphId, " +
|
||||
" r.operation_type = $operationType, " +
|
||||
" r.entity_id = $entityId, " +
|
||||
" r.relation_id = $relationId, " +
|
||||
" r.payload = $payload, " +
|
||||
" r.status = $status, " +
|
||||
" r.submitted_by = $submittedBy, " +
|
||||
" r.reviewed_by = $reviewedBy, " +
|
||||
" r.review_comment = $reviewComment, " +
|
||||
" r.created_at = $createdAt" +
|
||||
reviewedAtSet + " " +
|
||||
"RETURN r"
|
||||
)
|
||||
.bindAll(params)
|
||||
.run();
|
||||
|
||||
return review;
|
||||
}
|
||||
|
||||
public Optional<EditReview> findById(String reviewId, String graphId) {
|
||||
return neo4jClient
|
||||
.query("MATCH (r:EditReview {id: $id, graph_id: $graphId}) RETURN r")
|
||||
.bindAll(Map.of("id", reviewId, "graphId", graphId))
|
||||
.fetchAs(EditReview.class)
|
||||
.mappedBy((typeSystem, record) -> mapRecord(record))
|
||||
.one();
|
||||
}
|
||||
|
||||
public List<EditReview> findPendingByGraphId(String graphId, long skip, int size) {
|
||||
return neo4jClient
|
||||
.query(
|
||||
"MATCH (r:EditReview {graph_id: $graphId, status: 'PENDING'}) " +
|
||||
"RETURN r ORDER BY r.created_at DESC SKIP $skip LIMIT $size"
|
||||
)
|
||||
.bindAll(Map.of("graphId", graphId, "skip", skip, "size", size))
|
||||
.fetchAs(EditReview.class)
|
||||
.mappedBy((typeSystem, record) -> mapRecord(record))
|
||||
.all()
|
||||
.stream().toList();
|
||||
}
|
||||
|
||||
public long countPendingByGraphId(String graphId) {
|
||||
return neo4jClient
|
||||
.query("MATCH (r:EditReview {graph_id: $graphId, status: 'PENDING'}) RETURN count(r) AS cnt")
|
||||
.bindAll(Map.of("graphId", graphId))
|
||||
.fetchAs(Long.class)
|
||||
.mappedBy((typeSystem, record) -> record.get("cnt").asLong())
|
||||
.one()
|
||||
.orElse(0L);
|
||||
}
|
||||
|
||||
public List<EditReview> findByGraphId(String graphId, String status, long skip, int size) {
|
||||
String statusFilter = (status != null && !status.isBlank())
|
||||
? "AND r.status = $status "
|
||||
: "";
|
||||
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("graphId", graphId);
|
||||
params.put("status", status != null ? status : "");
|
||||
params.put("skip", skip);
|
||||
params.put("size", size);
|
||||
|
||||
return neo4jClient
|
||||
.query(
|
||||
"MATCH (r:EditReview {graph_id: $graphId}) " +
|
||||
"WHERE true " + statusFilter +
|
||||
"RETURN r ORDER BY r.created_at DESC SKIP $skip LIMIT $size"
|
||||
)
|
||||
.bindAll(params)
|
||||
.fetchAs(EditReview.class)
|
||||
.mappedBy((typeSystem, record) -> mapRecord(record))
|
||||
.all()
|
||||
.stream().toList();
|
||||
}
|
||||
|
||||
public long countByGraphId(String graphId, String status) {
|
||||
String statusFilter = (status != null && !status.isBlank())
|
||||
? "AND r.status = $status "
|
||||
: "";
|
||||
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("graphId", graphId);
|
||||
params.put("status", status != null ? status : "");
|
||||
|
||||
return neo4jClient
|
||||
.query(
|
||||
"MATCH (r:EditReview {graph_id: $graphId}) " +
|
||||
"WHERE true " + statusFilter +
|
||||
"RETURN count(r) AS cnt"
|
||||
)
|
||||
.bindAll(params)
|
||||
.fetchAs(Long.class)
|
||||
.mappedBy((typeSystem, record) -> record.get("cnt").asLong())
|
||||
.one()
|
||||
.orElse(0L);
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// 内部映射
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
private EditReview mapRecord(MapAccessor record) {
|
||||
Value r = record.get("r");
|
||||
|
||||
return EditReview.builder()
|
||||
.id(getStringOrNull(r, "id"))
|
||||
.graphId(getStringOrNull(r, "graph_id"))
|
||||
.operationType(getStringOrNull(r, "operation_type"))
|
||||
.entityId(getStringOrEmpty(r, "entity_id"))
|
||||
.relationId(getStringOrEmpty(r, "relation_id"))
|
||||
.payload(getStringOrNull(r, "payload"))
|
||||
.status(getStringOrNull(r, "status"))
|
||||
.submittedBy(getStringOrEmpty(r, "submitted_by"))
|
||||
.reviewedBy(getStringOrEmpty(r, "reviewed_by"))
|
||||
.reviewComment(getStringOrEmpty(r, "review_comment"))
|
||||
.createdAt(getLocalDateTimeOrNull(r, "created_at"))
|
||||
.reviewedAt(getLocalDateTimeOrNull(r, "reviewed_at"))
|
||||
.build();
|
||||
}
|
||||
|
||||
private static String getStringOrNull(Value value, String key) {
|
||||
Value v = value.get(key);
|
||||
return (v == null || v.isNull()) ? null : v.asString();
|
||||
}
|
||||
|
||||
private static String getStringOrEmpty(Value value, String key) {
|
||||
Value v = value.get(key);
|
||||
if (v == null || v.isNull()) return null;
|
||||
String s = v.asString();
|
||||
return s.isEmpty() ? null : s;
|
||||
}
|
||||
|
||||
private static LocalDateTime getLocalDateTimeOrNull(Value value, String key) {
|
||||
Value v = value.get(key);
|
||||
return (v == null || v.isNull()) ? null : v.asLocalDateTime();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,103 @@
|
||||
package com.datamate.knowledgegraph.domain.repository;
|
||||
|
||||
import com.datamate.knowledgegraph.domain.model.GraphEntity;
|
||||
import org.springframework.data.neo4j.repository.Neo4jRepository;
|
||||
import org.springframework.data.neo4j.repository.query.Query;
|
||||
import org.springframework.data.repository.query.Param;
|
||||
import org.springframework.stereotype.Repository;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
@Repository
|
||||
public interface GraphEntityRepository extends Neo4jRepository<GraphEntity, String> {
|
||||
|
||||
@Query("MATCH (e:Entity {graph_id: $graphId}) WHERE e.id = $entityId RETURN e")
|
||||
Optional<GraphEntity> findByIdAndGraphId(
|
||||
@Param("entityId") String entityId,
|
||||
@Param("graphId") String graphId);
|
||||
|
||||
List<GraphEntity> findByGraphId(String graphId);
|
||||
|
||||
List<GraphEntity> findByGraphIdAndType(String graphId, String type);
|
||||
|
||||
List<GraphEntity> findByGraphIdAndNameContaining(String graphId, String name);
|
||||
|
||||
@Query("MATCH (e:Entity {graph_id: $graphId}) " +
|
||||
"WHERE e.name = $name AND e.type = $type " +
|
||||
"RETURN e")
|
||||
List<GraphEntity> findByGraphIdAndNameAndType(
|
||||
@Param("graphId") String graphId,
|
||||
@Param("name") String name,
|
||||
@Param("type") String type);
|
||||
|
||||
@Query("MATCH p = (e:Entity {graph_id: $graphId, id: $entityId})-[*1..$depth]-(neighbor:Entity) " +
|
||||
"WHERE e <> neighbor " +
|
||||
" AND ALL(n IN nodes(p) WHERE n.graph_id = $graphId) " +
|
||||
" AND ALL(r IN relationships(p) WHERE r.graph_id = $graphId) " +
|
||||
"RETURN DISTINCT neighbor LIMIT $limit")
|
||||
List<GraphEntity> findNeighbors(
|
||||
@Param("graphId") String graphId,
|
||||
@Param("entityId") String entityId,
|
||||
@Param("depth") int depth,
|
||||
@Param("limit") int limit);
|
||||
|
||||
@Query("MATCH (e:Entity {graph_id: $graphId}) RETURN count(e)")
|
||||
long countByGraphId(@Param("graphId") String graphId);
|
||||
|
||||
@Query("MATCH (e:Entity {graph_id: $graphId}) " +
|
||||
"WHERE e.source_id = $sourceId AND e.type = $type " +
|
||||
"RETURN e")
|
||||
Optional<GraphEntity> findByGraphIdAndSourceIdAndType(
|
||||
@Param("graphId") String graphId,
|
||||
@Param("sourceId") String sourceId,
|
||||
@Param("type") String type);
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// 分页查询
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Query("MATCH (e:Entity {graph_id: $graphId}) " +
|
||||
"RETURN e ORDER BY e.created_at DESC SKIP $skip LIMIT $limit")
|
||||
List<GraphEntity> findByGraphIdPaged(
|
||||
@Param("graphId") String graphId,
|
||||
@Param("skip") long skip,
|
||||
@Param("limit") int limit);
|
||||
|
||||
@Query("MATCH (e:Entity {graph_id: $graphId}) WHERE e.type = $type " +
|
||||
"RETURN e ORDER BY e.created_at DESC SKIP $skip LIMIT $limit")
|
||||
List<GraphEntity> findByGraphIdAndTypePaged(
|
||||
@Param("graphId") String graphId,
|
||||
@Param("type") String type,
|
||||
@Param("skip") long skip,
|
||||
@Param("limit") int limit);
|
||||
|
||||
@Query("MATCH (e:Entity {graph_id: $graphId}) WHERE e.type = $type " +
|
||||
"RETURN count(e)")
|
||||
long countByGraphIdAndType(
|
||||
@Param("graphId") String graphId,
|
||||
@Param("type") String type);
|
||||
|
||||
@Query("MATCH (e:Entity {graph_id: $graphId}) WHERE e.name CONTAINS $name " +
|
||||
"RETURN e ORDER BY e.created_at DESC SKIP $skip LIMIT $limit")
|
||||
List<GraphEntity> findByGraphIdAndNameContainingPaged(
|
||||
@Param("graphId") String graphId,
|
||||
@Param("name") String name,
|
||||
@Param("skip") long skip,
|
||||
@Param("limit") int limit);
|
||||
|
||||
@Query("MATCH (e:Entity {graph_id: $graphId}) WHERE e.name CONTAINS $name " +
|
||||
"RETURN count(e)")
|
||||
long countByGraphIdAndNameContaining(
|
||||
@Param("graphId") String graphId,
|
||||
@Param("name") String name);
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// 图查询
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Query("MATCH (e:Entity {graph_id: $graphId}) WHERE e.id IN $entityIds RETURN e")
|
||||
List<GraphEntity> findByGraphIdAndIdIn(
|
||||
@Param("graphId") String graphId,
|
||||
@Param("entityIds") List<String> entityIds);
|
||||
}
|
||||
@@ -0,0 +1,499 @@
|
||||
package com.datamate.knowledgegraph.domain.repository;
|
||||
|
||||
import com.datamate.knowledgegraph.domain.model.RelationDetail;
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.core.type.TypeReference;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.neo4j.driver.Value;
|
||||
import org.neo4j.driver.types.MapAccessor;
|
||||
import org.springframework.data.neo4j.core.Neo4jClient;
|
||||
import org.springframework.stereotype.Repository;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.*;
|
||||
|
||||
/**
|
||||
* 知识图谱关系仓储。
|
||||
* <p>
|
||||
* 由于 {@code GraphRelation} 使用 {@code @RelationshipProperties},
|
||||
* 无法通过 {@code Neo4jRepository} 直接管理,
|
||||
* 因此使用 {@code Neo4jClient} 执行 Cypher 查询实现 CRUD。
|
||||
* <p>
|
||||
* Neo4j 中使用统一的 {@code RELATED_TO} 关系类型,
|
||||
* 语义类型通过 {@code relation_type} 属性区分。
|
||||
* 扩展属性(properties)序列化为 JSON 字符串存储在 {@code properties_json} 属性中。
|
||||
*/
|
||||
@Repository
|
||||
@Slf4j
|
||||
@RequiredArgsConstructor
|
||||
public class GraphRelationRepository {
|
||||
|
||||
private static final String REL_TYPE = "RELATED_TO";
|
||||
private static final TypeReference<Map<String, Object>> MAP_TYPE = new TypeReference<>() {};
|
||||
private static final ObjectMapper MAPPER = new ObjectMapper();
|
||||
|
||||
/** 查询返回列(源节点 + 关系 + 目标节点)。 */
|
||||
private static final String RETURN_COLUMNS =
|
||||
"RETURN r, " +
|
||||
"s.id AS sourceEntityId, s.name AS sourceEntityName, s.type AS sourceEntityType, " +
|
||||
"t.id AS targetEntityId, t.name AS targetEntityName, t.type AS targetEntityType";
|
||||
|
||||
private final Neo4jClient neo4jClient;
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// 查询
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
public Optional<RelationDetail> findByIdAndGraphId(String relationId, String graphId) {
|
||||
return neo4jClient
|
||||
.query(
|
||||
"MATCH (s:Entity {graph_id: $graphId})" +
|
||||
"-[r:" + REL_TYPE + " {id: $relationId, graph_id: $graphId}]->" +
|
||||
"(t:Entity {graph_id: $graphId}) " +
|
||||
RETURN_COLUMNS
|
||||
)
|
||||
.bindAll(Map.of("graphId", graphId, "relationId", relationId))
|
||||
.fetchAs(RelationDetail.class)
|
||||
.mappedBy((typeSystem, record) -> mapRecord(record))
|
||||
.one();
|
||||
}
|
||||
|
||||
public List<RelationDetail> findByGraphId(String graphId, String type, long skip, int size) {
|
||||
String typeFilter = (type != null && !type.isBlank())
|
||||
? "AND r.relation_type = $type "
|
||||
: "";
|
||||
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("graphId", graphId);
|
||||
params.put("type", type != null ? type : "");
|
||||
params.put("skip", skip);
|
||||
params.put("size", size);
|
||||
|
||||
return neo4jClient
|
||||
.query(
|
||||
"MATCH (s:Entity {graph_id: $graphId})" +
|
||||
"-[r:" + REL_TYPE + " {graph_id: $graphId}]->" +
|
||||
"(t:Entity {graph_id: $graphId}) " +
|
||||
"WHERE true " + typeFilter +
|
||||
RETURN_COLUMNS + " " +
|
||||
"ORDER BY r.created_at DESC " +
|
||||
"SKIP $skip LIMIT $size"
|
||||
)
|
||||
.bindAll(params)
|
||||
.fetchAs(RelationDetail.class)
|
||||
.mappedBy((typeSystem, record) -> mapRecord(record))
|
||||
.all()
|
||||
.stream().toList();
|
||||
}
|
||||
|
||||
/**
|
||||
* 查询实体的所有关系(出边 + 入边)。
|
||||
* <p>
|
||||
* 使用 {@code CALL{UNION ALL}} 分别锚定出边和入边查询,
|
||||
* 避免全图扫描后再过滤的性能瓶颈。
|
||||
* {@code WITH DISTINCT} 处理自环关系的去重。
|
||||
*/
|
||||
public List<RelationDetail> findByEntityId(String graphId, String entityId, String type,
|
||||
long skip, int size) {
|
||||
String typeFilter = (type != null && !type.isBlank())
|
||||
? "WHERE r.relation_type = $type "
|
||||
: "";
|
||||
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("graphId", graphId);
|
||||
params.put("entityId", entityId);
|
||||
params.put("type", type != null ? type : "");
|
||||
params.put("skip", skip);
|
||||
params.put("size", size);
|
||||
|
||||
return neo4jClient
|
||||
.query(
|
||||
"CALL { " +
|
||||
"MATCH (s:Entity {graph_id: $graphId, id: $entityId})" +
|
||||
"-[r:" + REL_TYPE + " {graph_id: $graphId}]->" +
|
||||
"(t:Entity {graph_id: $graphId}) " +
|
||||
typeFilter +
|
||||
"RETURN r, s, t " +
|
||||
"UNION ALL " +
|
||||
"MATCH (s:Entity {graph_id: $graphId})" +
|
||||
"-[r:" + REL_TYPE + " {graph_id: $graphId}]->" +
|
||||
"(t:Entity {graph_id: $graphId, id: $entityId}) " +
|
||||
typeFilter +
|
||||
"RETURN r, s, t " +
|
||||
"} " +
|
||||
"WITH DISTINCT r, s, t " +
|
||||
"ORDER BY r.created_at DESC SKIP $skip LIMIT $size " +
|
||||
RETURN_COLUMNS
|
||||
)
|
||||
.bindAll(params)
|
||||
.fetchAs(RelationDetail.class)
|
||||
.mappedBy((typeSystem, record) -> mapRecord(record))
|
||||
.all()
|
||||
.stream().toList();
|
||||
}
|
||||
|
||||
/**
|
||||
* 查询实体的入边关系(该实体为目标节点)。
|
||||
*/
|
||||
public List<RelationDetail> findInboundByEntityId(String graphId, String entityId, String type,
|
||||
long skip, int size) {
|
||||
String typeFilter = (type != null && !type.isBlank())
|
||||
? "AND r.relation_type = $type "
|
||||
: "";
|
||||
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("graphId", graphId);
|
||||
params.put("entityId", entityId);
|
||||
params.put("type", type != null ? type : "");
|
||||
params.put("skip", skip);
|
||||
params.put("size", size);
|
||||
|
||||
return neo4jClient
|
||||
.query(
|
||||
"MATCH (s:Entity {graph_id: $graphId})" +
|
||||
"-[r:" + REL_TYPE + " {graph_id: $graphId}]->" +
|
||||
"(t:Entity {graph_id: $graphId, id: $entityId}) " +
|
||||
"WHERE true " + typeFilter +
|
||||
RETURN_COLUMNS + " " +
|
||||
"ORDER BY r.created_at DESC " +
|
||||
"SKIP $skip LIMIT $size"
|
||||
)
|
||||
.bindAll(params)
|
||||
.fetchAs(RelationDetail.class)
|
||||
.mappedBy((typeSystem, record) -> mapRecord(record))
|
||||
.all()
|
||||
.stream().toList();
|
||||
}
|
||||
|
||||
/**
|
||||
* 查询实体的出边关系(该实体为源节点)。
|
||||
*/
|
||||
public List<RelationDetail> findOutboundByEntityId(String graphId, String entityId, String type,
|
||||
long skip, int size) {
|
||||
String typeFilter = (type != null && !type.isBlank())
|
||||
? "AND r.relation_type = $type "
|
||||
: "";
|
||||
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("graphId", graphId);
|
||||
params.put("entityId", entityId);
|
||||
params.put("type", type != null ? type : "");
|
||||
params.put("skip", skip);
|
||||
params.put("size", size);
|
||||
|
||||
return neo4jClient
|
||||
.query(
|
||||
"MATCH (s:Entity {graph_id: $graphId, id: $entityId})" +
|
||||
"-[r:" + REL_TYPE + " {graph_id: $graphId}]->" +
|
||||
"(t:Entity {graph_id: $graphId}) " +
|
||||
"WHERE true " + typeFilter +
|
||||
RETURN_COLUMNS + " " +
|
||||
"ORDER BY r.created_at DESC " +
|
||||
"SKIP $skip LIMIT $size"
|
||||
)
|
||||
.bindAll(params)
|
||||
.fetchAs(RelationDetail.class)
|
||||
.mappedBy((typeSystem, record) -> mapRecord(record))
|
||||
.all()
|
||||
.stream().toList();
|
||||
}
|
||||
|
||||
/**
|
||||
* 统计实体的关系数量。
|
||||
* <p>
|
||||
* 各方向均以实体锚定 MATCH 模式,避免全图扫描。
|
||||
* "all" 方向使用 {@code CALL{UNION}} 自动去重自环关系。
|
||||
*
|
||||
* @param direction "all"、"in" 或 "out"
|
||||
*/
|
||||
public long countByEntityId(String graphId, String entityId, String type, String direction) {
|
||||
String typeFilter = (type != null && !type.isBlank())
|
||||
? "WHERE r.relation_type = $type "
|
||||
: "";
|
||||
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("graphId", graphId);
|
||||
params.put("entityId", entityId);
|
||||
params.put("type", type != null ? type : "");
|
||||
|
||||
String cypher;
|
||||
switch (direction) {
|
||||
case "in":
|
||||
cypher = "MATCH (:Entity {graph_id: $graphId})" +
|
||||
"-[r:" + REL_TYPE + " {graph_id: $graphId}]->" +
|
||||
"(:Entity {graph_id: $graphId, id: $entityId}) " +
|
||||
typeFilter +
|
||||
"RETURN count(r) AS cnt";
|
||||
break;
|
||||
case "out":
|
||||
cypher = "MATCH (:Entity {graph_id: $graphId, id: $entityId})" +
|
||||
"-[r:" + REL_TYPE + " {graph_id: $graphId}]->" +
|
||||
"(:Entity {graph_id: $graphId}) " +
|
||||
typeFilter +
|
||||
"RETURN count(r) AS cnt";
|
||||
break;
|
||||
default:
|
||||
cypher = "CALL { " +
|
||||
"MATCH (:Entity {graph_id: $graphId, id: $entityId})" +
|
||||
"-[r:" + REL_TYPE + " {graph_id: $graphId}]->" +
|
||||
"(:Entity {graph_id: $graphId}) " +
|
||||
typeFilter +
|
||||
"RETURN r " +
|
||||
"UNION " +
|
||||
"MATCH (:Entity {graph_id: $graphId})" +
|
||||
"-[r:" + REL_TYPE + " {graph_id: $graphId}]->" +
|
||||
"(:Entity {graph_id: $graphId, id: $entityId}) " +
|
||||
typeFilter +
|
||||
"RETURN r " +
|
||||
"} " +
|
||||
"RETURN count(r) AS cnt";
|
||||
break;
|
||||
}
|
||||
|
||||
return neo4jClient
|
||||
.query(cypher)
|
||||
.bindAll(params)
|
||||
.fetchAs(Long.class)
|
||||
.mappedBy((typeSystem, record) -> record.get("cnt").asLong())
|
||||
.one()
|
||||
.orElse(0L);
|
||||
}
|
||||
|
||||
public List<RelationDetail> findBySourceAndTarget(String graphId, String sourceEntityId, String targetEntityId) {
|
||||
return neo4jClient
|
||||
.query(
|
||||
"MATCH (s:Entity {graph_id: $graphId, id: $sourceEntityId})" +
|
||||
"-[r:" + REL_TYPE + " {graph_id: $graphId}]->" +
|
||||
"(t:Entity {graph_id: $graphId, id: $targetEntityId}) " +
|
||||
RETURN_COLUMNS
|
||||
)
|
||||
.bindAll(Map.of(
|
||||
"graphId", graphId,
|
||||
"sourceEntityId", sourceEntityId,
|
||||
"targetEntityId", targetEntityId
|
||||
))
|
||||
.fetchAs(RelationDetail.class)
|
||||
.mappedBy((typeSystem, record) -> mapRecord(record))
|
||||
.all()
|
||||
.stream().toList();
|
||||
}
|
||||
|
||||
public List<RelationDetail> findByType(String graphId, String type) {
|
||||
return neo4jClient
|
||||
.query(
|
||||
"MATCH (s:Entity {graph_id: $graphId})" +
|
||||
"-[r:" + REL_TYPE + " {graph_id: $graphId, relation_type: $type}]->" +
|
||||
"(t:Entity {graph_id: $graphId}) " +
|
||||
RETURN_COLUMNS
|
||||
)
|
||||
.bindAll(Map.of("graphId", graphId, "type", type))
|
||||
.fetchAs(RelationDetail.class)
|
||||
.mappedBy((typeSystem, record) -> mapRecord(record))
|
||||
.all()
|
||||
.stream().toList();
|
||||
}
|
||||
|
||||
public long countByGraphId(String graphId, String type) {
|
||||
String typeFilter = (type != null && !type.isBlank())
|
||||
? "AND r.relation_type = $type "
|
||||
: "";
|
||||
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("graphId", graphId);
|
||||
params.put("type", type != null ? type : "");
|
||||
|
||||
return neo4jClient
|
||||
.query(
|
||||
"MATCH (:Entity {graph_id: $graphId})" +
|
||||
"-[r:" + REL_TYPE + " {graph_id: $graphId}]->" +
|
||||
"(:Entity {graph_id: $graphId}) " +
|
||||
"WHERE true " + typeFilter +
|
||||
"RETURN count(r) AS cnt"
|
||||
)
|
||||
.bindAll(params)
|
||||
.fetchAs(Long.class)
|
||||
.mappedBy((typeSystem, record) -> record.get("cnt").asLong())
|
||||
.one()
|
||||
.orElse(0L);
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// 写入
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
public Optional<RelationDetail> create(String graphId, String sourceEntityId, String targetEntityId,
|
||||
String relationType, Map<String, Object> properties,
|
||||
Double weight, String sourceId, Double confidence) {
|
||||
String id = UUID.randomUUID().toString();
|
||||
LocalDateTime now = LocalDateTime.now();
|
||||
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("graphId", graphId);
|
||||
params.put("sourceEntityId", sourceEntityId);
|
||||
params.put("targetEntityId", targetEntityId);
|
||||
params.put("id", id);
|
||||
params.put("relationType", relationType);
|
||||
params.put("weight", weight != null ? weight : 1.0);
|
||||
params.put("confidence", confidence != null ? confidence : 1.0);
|
||||
params.put("sourceId", sourceId != null ? sourceId : "");
|
||||
params.put("propertiesJson", serializeProperties(properties));
|
||||
params.put("createdAt", now);
|
||||
|
||||
return neo4jClient
|
||||
.query(
|
||||
"MATCH (s:Entity {graph_id: $graphId, id: $sourceEntityId}) " +
|
||||
"MATCH (t:Entity {graph_id: $graphId, id: $targetEntityId}) " +
|
||||
"MERGE (s)-[r:" + REL_TYPE + " {graph_id: $graphId, relation_type: $relationType}]->(t) " +
|
||||
"ON CREATE SET r.id = $id, r.weight = $weight, r.confidence = $confidence, " +
|
||||
" r.source_id = $sourceId, r.properties_json = $propertiesJson, r.created_at = $createdAt " +
|
||||
"ON MATCH SET r.weight = CASE WHEN $weight IS NOT NULL THEN $weight ELSE r.weight END, " +
|
||||
" r.confidence = CASE WHEN $confidence IS NOT NULL THEN $confidence ELSE r.confidence END, " +
|
||||
" r.source_id = CASE WHEN $sourceId <> '' THEN $sourceId ELSE r.source_id END, " +
|
||||
" r.properties_json = CASE WHEN $propertiesJson <> '{}' THEN $propertiesJson ELSE r.properties_json END " +
|
||||
RETURN_COLUMNS
|
||||
)
|
||||
.bindAll(params)
|
||||
.fetchAs(RelationDetail.class)
|
||||
.mappedBy((typeSystem, record) -> mapRecord(record))
|
||||
.one();
|
||||
}
|
||||
|
||||
public Optional<RelationDetail> update(String relationId, String graphId,
|
||||
String relationType, Map<String, Object> properties,
|
||||
Double weight, Double confidence) {
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("graphId", graphId);
|
||||
params.put("relationId", relationId);
|
||||
|
||||
StringBuilder setClauses = new StringBuilder();
|
||||
if (relationType != null) {
|
||||
setClauses.append("SET r.relation_type = $relationType ");
|
||||
params.put("relationType", relationType);
|
||||
}
|
||||
if (properties != null) {
|
||||
setClauses.append("SET r.properties_json = $propertiesJson ");
|
||||
params.put("propertiesJson", serializeProperties(properties));
|
||||
}
|
||||
if (weight != null) {
|
||||
setClauses.append("SET r.weight = $weight ");
|
||||
params.put("weight", weight);
|
||||
}
|
||||
if (confidence != null) {
|
||||
setClauses.append("SET r.confidence = $confidence ");
|
||||
params.put("confidence", confidence);
|
||||
}
|
||||
|
||||
if (setClauses.isEmpty()) {
|
||||
return findByIdAndGraphId(relationId, graphId);
|
||||
}
|
||||
|
||||
return neo4jClient
|
||||
.query(
|
||||
"MATCH (s:Entity {graph_id: $graphId})" +
|
||||
"-[r:" + REL_TYPE + " {id: $relationId, graph_id: $graphId}]->" +
|
||||
"(t:Entity {graph_id: $graphId}) " +
|
||||
setClauses +
|
||||
RETURN_COLUMNS
|
||||
)
|
||||
.bindAll(params)
|
||||
.fetchAs(RelationDetail.class)
|
||||
.mappedBy((typeSystem, record) -> mapRecord(record))
|
||||
.one();
|
||||
}
|
||||
|
||||
/**
|
||||
* 删除指定关系,返回实际删除的数量(0 或 1)。
|
||||
*/
|
||||
public long deleteByIdAndGraphId(String relationId, String graphId) {
|
||||
// MATCH 找不到时管道为空行,count(*) 聚合后仍返回 0;
|
||||
// 找到 1 条时 DELETE 后管道保留该行,count(*) 返回 1。
|
||||
return neo4jClient
|
||||
.query(
|
||||
"MATCH (:Entity {graph_id: $graphId})" +
|
||||
"-[r:" + REL_TYPE + " {id: $relationId, graph_id: $graphId}]->" +
|
||||
"(:Entity {graph_id: $graphId}) " +
|
||||
"DELETE r " +
|
||||
"RETURN count(*) AS deleted"
|
||||
)
|
||||
.bindAll(Map.of("graphId", graphId, "relationId", relationId))
|
||||
.fetchAs(Long.class)
|
||||
.mappedBy((typeSystem, record) -> record.get("deleted").asLong())
|
||||
.one()
|
||||
.orElse(0L);
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// 内部映射
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
private RelationDetail mapRecord(MapAccessor record) {
|
||||
Value r = record.get("r");
|
||||
|
||||
return RelationDetail.builder()
|
||||
.id(getStringOrNull(r, "id"))
|
||||
.sourceEntityId(record.get("sourceEntityId").asString(null))
|
||||
.sourceEntityName(record.get("sourceEntityName").asString(null))
|
||||
.sourceEntityType(record.get("sourceEntityType").asString(null))
|
||||
.targetEntityId(record.get("targetEntityId").asString(null))
|
||||
.targetEntityName(record.get("targetEntityName").asString(null))
|
||||
.targetEntityType(record.get("targetEntityType").asString(null))
|
||||
.relationType(getStringOrNull(r, "relation_type"))
|
||||
.properties(deserializeProperties(getStringOrNull(r, "properties_json")))
|
||||
.weight(getDoubleOrNull(r, "weight"))
|
||||
.confidence(getDoubleOrNull(r, "confidence"))
|
||||
.sourceId(getStringOrNull(r, "source_id"))
|
||||
.graphId(getStringOrNull(r, "graph_id"))
|
||||
.createdAt(getLocalDateTimeOrNull(r, "created_at"))
|
||||
.build();
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// Properties JSON 序列化
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
private static String serializeProperties(Map<String, Object> properties) {
|
||||
if (properties == null || properties.isEmpty()) {
|
||||
return "{}";
|
||||
}
|
||||
try {
|
||||
return MAPPER.writeValueAsString(properties);
|
||||
} catch (JsonProcessingException e) {
|
||||
// 序列化失败不应静默吞掉,向上抛出以暴露数据问题
|
||||
throw new IllegalArgumentException("Failed to serialize relation properties to JSON", e);
|
||||
}
|
||||
}
|
||||
|
||||
private static Map<String, Object> deserializeProperties(String json) {
|
||||
if (json == null || json.isBlank()) {
|
||||
return new HashMap<>();
|
||||
}
|
||||
try {
|
||||
return MAPPER.readValue(json, MAP_TYPE);
|
||||
} catch (JsonProcessingException e) {
|
||||
log.warn("Failed to deserialize properties_json (returning empty map): json='{}', error={}",
|
||||
json.length() > 100 ? json.substring(0, 100) + "..." : json, e.getMessage());
|
||||
return new HashMap<>();
|
||||
}
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// 字段读取辅助
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
private static String getStringOrNull(Value value, String key) {
|
||||
Value v = value.get(key);
|
||||
return (v == null || v.isNull()) ? null : v.asString();
|
||||
}
|
||||
|
||||
private static Double getDoubleOrNull(Value value, String key) {
|
||||
Value v = value.get(key);
|
||||
return (v == null || v.isNull()) ? null : v.asDouble();
|
||||
}
|
||||
|
||||
private static LocalDateTime getLocalDateTimeOrNull(Value value, String key) {
|
||||
Value v = value.get(key);
|
||||
return (v == null || v.isNull()) ? null : v.asLocalDateTime();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,43 @@
|
||||
package com.datamate.knowledgegraph.domain.repository;
|
||||
|
||||
import com.datamate.knowledgegraph.domain.model.SyncMetadata;
|
||||
import org.springframework.data.neo4j.repository.Neo4jRepository;
|
||||
import org.springframework.data.neo4j.repository.query.Query;
|
||||
import org.springframework.data.repository.query.Param;
|
||||
import org.springframework.stereotype.Repository;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
@Repository
|
||||
public interface SyncHistoryRepository extends Neo4jRepository<SyncMetadata, String> {
|
||||
|
||||
@Query("MATCH (h:SyncHistory {graph_id: $graphId}) " +
|
||||
"RETURN h ORDER BY h.started_at DESC LIMIT $limit")
|
||||
List<SyncMetadata> findByGraphId(
|
||||
@Param("graphId") String graphId,
|
||||
@Param("limit") int limit);
|
||||
|
||||
@Query("MATCH (h:SyncHistory {graph_id: $graphId, status: $status}) " +
|
||||
"RETURN h ORDER BY h.started_at DESC LIMIT $limit")
|
||||
List<SyncMetadata> findByGraphIdAndStatus(
|
||||
@Param("graphId") String graphId,
|
||||
@Param("status") String status,
|
||||
@Param("limit") int limit);
|
||||
|
||||
@Query("MATCH (h:SyncHistory {graph_id: $graphId, sync_id: $syncId}) RETURN h")
|
||||
Optional<SyncMetadata> findByGraphIdAndSyncId(
|
||||
@Param("graphId") String graphId,
|
||||
@Param("syncId") String syncId);
|
||||
|
||||
@Query("MATCH (h:SyncHistory {graph_id: $graphId}) " +
|
||||
"WHERE h.started_at >= $from AND h.started_at <= $to " +
|
||||
"RETURN h ORDER BY h.started_at DESC SKIP $skip LIMIT $limit")
|
||||
List<SyncMetadata> findByGraphIdAndTimeRange(
|
||||
@Param("graphId") String graphId,
|
||||
@Param("from") LocalDateTime from,
|
||||
@Param("to") LocalDateTime to,
|
||||
@Param("skip") long skip,
|
||||
@Param("limit") int limit);
|
||||
}
|
||||
@@ -0,0 +1,149 @@
|
||||
package com.datamate.knowledgegraph.infrastructure.cache;
|
||||
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Qualifier;
|
||||
import org.springframework.cache.Cache;
|
||||
import org.springframework.cache.CacheManager;
|
||||
import org.springframework.data.redis.core.StringRedisTemplate;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
* 图谱缓存管理服务。
|
||||
* <p>
|
||||
* 提供缓存失效操作,在写操作(增删改)后由 Service 层调用,
|
||||
* 确保缓存与数据库的最终一致性。
|
||||
* <p>
|
||||
* 当 {@link StringRedisTemplate} 可用时,使用按 graphId 前缀的细粒度失效,
|
||||
* 避免跨图谱缓存刷新;否则退化为清空整个缓存区域。
|
||||
*/
|
||||
@Service
|
||||
@Slf4j
|
||||
public class GraphCacheService {
|
||||
|
||||
private static final String KEY_PREFIX = "datamate:";
|
||||
|
||||
private final CacheManager cacheManager;
|
||||
private StringRedisTemplate redisTemplate;
|
||||
|
||||
public GraphCacheService(@Qualifier("knowledgeGraphCacheManager") CacheManager cacheManager) {
|
||||
this.cacheManager = cacheManager;
|
||||
}
|
||||
|
||||
@Autowired(required = false)
|
||||
public void setRedisTemplate(StringRedisTemplate redisTemplate) {
|
||||
this.redisTemplate = redisTemplate;
|
||||
}
|
||||
|
||||
/**
|
||||
* 失效指定图谱的全部缓存。
|
||||
* <p>
|
||||
* 在 sync、批量操作后调用,确保缓存一致性。
|
||||
* 当 Redis 可用时仅失效该 graphId 的缓存条目,避免影响其他图谱。
|
||||
*/
|
||||
public void evictGraphCaches(String graphId) {
|
||||
log.debug("Evicting all caches for graph_id={}", graphId);
|
||||
evictByGraphPrefix(RedisCacheConfig.CACHE_ENTITIES, graphId);
|
||||
evictByGraphPrefix(RedisCacheConfig.CACHE_QUERIES, graphId);
|
||||
evictByGraphPrefix(RedisCacheConfig.CACHE_SEARCH, graphId);
|
||||
}
|
||||
|
||||
/**
|
||||
* 失效指定实体相关的缓存。
|
||||
* <p>
|
||||
* 在单实体增删改后调用。精确失效该实体缓存和 list 缓存,
|
||||
* 并清除该图谱的查询缓存(因邻居关系可能变化)。
|
||||
*/
|
||||
public void evictEntityCaches(String graphId, String entityId) {
|
||||
log.debug("Evicting entity caches: graph_id={}, entity_id={}", graphId, entityId);
|
||||
// 精确失效具体实体和 list 缓存
|
||||
evictKey(RedisCacheConfig.CACHE_ENTITIES, cacheKey(graphId, entityId));
|
||||
evictKey(RedisCacheConfig.CACHE_ENTITIES, cacheKey(graphId, "list"));
|
||||
// 按 graphId 前缀失效查询缓存
|
||||
evictByGraphPrefix(RedisCacheConfig.CACHE_QUERIES, graphId);
|
||||
}
|
||||
|
||||
/**
|
||||
* 失效指定图谱的搜索缓存。
|
||||
* <p>
|
||||
* 在实体名称/描述变更后调用。
|
||||
*/
|
||||
public void evictSearchCaches(String graphId) {
|
||||
log.debug("Evicting search caches for graph_id={}", graphId);
|
||||
evictByGraphPrefix(RedisCacheConfig.CACHE_SEARCH, graphId);
|
||||
}
|
||||
|
||||
/**
|
||||
* 失效所有搜索缓存(无 graphId 上下文时使用)。
|
||||
*/
|
||||
public void evictSearchCaches() {
|
||||
log.debug("Evicting all search caches");
|
||||
evictCache(RedisCacheConfig.CACHE_SEARCH);
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// 内部方法
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* 按 graphId 前缀失效缓存条目。
|
||||
* <p>
|
||||
* 所有缓存 key 均以 {@code graphId:} 开头,因此可通过前缀模式匹配。
|
||||
* 当 Redis 不可用时退化为清空整个缓存区域。
|
||||
*/
|
||||
private void evictByGraphPrefix(String cacheName, String graphId) {
|
||||
if (redisTemplate != null) {
|
||||
try {
|
||||
String pattern = KEY_PREFIX + cacheName + "::" + graphId + ":*";
|
||||
Set<String> keys = redisTemplate.keys(pattern);
|
||||
if (keys != null && !keys.isEmpty()) {
|
||||
redisTemplate.delete(keys);
|
||||
log.debug("Evicted {} keys for graph_id={} in cache={}", keys.size(), graphId, cacheName);
|
||||
}
|
||||
return;
|
||||
} catch (Exception e) {
|
||||
log.warn("Failed to evict by graph prefix, falling back to full cache clear: {}", e.getMessage());
|
||||
}
|
||||
}
|
||||
// 降级:清空整个缓存区域
|
||||
evictCache(cacheName);
|
||||
}
|
||||
|
||||
/**
|
||||
* 精确失效单个缓存条目。
|
||||
*/
|
||||
private void evictKey(String cacheName, String key) {
|
||||
Cache cache = cacheManager.getCache(cacheName);
|
||||
if (cache != null) {
|
||||
cache.evict(key);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 清空整个缓存区域。
|
||||
*/
|
||||
private void evictCache(String cacheName) {
|
||||
Cache cache = cacheManager.getCache(cacheName);
|
||||
if (cache != null) {
|
||||
cache.clear();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 生成缓存 key。
|
||||
* <p>
|
||||
* 将多个参数拼接为冒号分隔的字符串 key,用于 {@code @Cacheable} 的 key 表达式。
|
||||
* <b>约定</b>:graphId 必须作为第一个参数,以支持按 graphId 前缀失效。
|
||||
*/
|
||||
public static String cacheKey(Object... parts) {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
for (int i = 0; i < parts.length; i++) {
|
||||
if (i > 0) sb.append(':');
|
||||
sb.append(Objects.toString(parts[i], "null"));
|
||||
}
|
||||
return sb.toString();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,83 @@
|
||||
package com.datamate.knowledgegraph.infrastructure.cache;
|
||||
|
||||
import com.datamate.knowledgegraph.infrastructure.neo4j.KnowledgeGraphProperties;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||
import org.springframework.cache.CacheManager;
|
||||
import org.springframework.cache.annotation.EnableCaching;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.context.annotation.Primary;
|
||||
import org.springframework.data.redis.cache.RedisCacheConfiguration;
|
||||
import org.springframework.data.redis.cache.RedisCacheManager;
|
||||
import org.springframework.data.redis.connection.RedisConnectionFactory;
|
||||
import org.springframework.data.redis.serializer.GenericJackson2JsonRedisSerializer;
|
||||
import org.springframework.data.redis.serializer.RedisSerializationContext;
|
||||
import org.springframework.data.redis.serializer.StringRedisSerializer;
|
||||
|
||||
import java.time.Duration;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* Redis 缓存配置。
|
||||
* <p>
|
||||
* 当 {@code datamate.knowledge-graph.cache.enabled=true} 时激活,
|
||||
* 为不同缓存区域配置独立的 TTL。
|
||||
*/
|
||||
@Slf4j
|
||||
@Configuration
|
||||
@EnableCaching
|
||||
@ConditionalOnProperty(
|
||||
prefix = "datamate.knowledge-graph.cache",
|
||||
name = "enabled",
|
||||
havingValue = "true",
|
||||
matchIfMissing = true
|
||||
)
|
||||
public class RedisCacheConfig {
|
||||
|
||||
/** 实体缓存:单实体查询、实体列表 */
|
||||
public static final String CACHE_ENTITIES = "kg:entities";
|
||||
|
||||
/** 查询缓存:邻居图、子图、路径查询 */
|
||||
public static final String CACHE_QUERIES = "kg:queries";
|
||||
|
||||
/** 搜索缓存:全文搜索结果 */
|
||||
public static final String CACHE_SEARCH = "kg:search";
|
||||
|
||||
@Primary
|
||||
@Bean("knowledgeGraphCacheManager")
|
||||
public CacheManager knowledgeGraphCacheManager(
|
||||
RedisConnectionFactory connectionFactory,
|
||||
KnowledgeGraphProperties properties
|
||||
) {
|
||||
KnowledgeGraphProperties.Cache cacheProps = properties.getCache();
|
||||
|
||||
// JSON 序列化,确保缓存数据可读且兼容版本变更
|
||||
var jsonSerializer = new GenericJackson2JsonRedisSerializer();
|
||||
var serializationPair = RedisSerializationContext.SerializationPair.fromSerializer(jsonSerializer);
|
||||
|
||||
RedisCacheConfiguration defaultConfig = RedisCacheConfiguration.defaultCacheConfig()
|
||||
.serializeKeysWith(RedisSerializationContext.SerializationPair.fromSerializer(new StringRedisSerializer()))
|
||||
.serializeValuesWith(serializationPair)
|
||||
.disableCachingNullValues()
|
||||
.prefixCacheNameWith("datamate:");
|
||||
|
||||
// 各缓存区域独立 TTL
|
||||
Map<String, RedisCacheConfiguration> cacheConfigs = Map.of(
|
||||
CACHE_ENTITIES, defaultConfig.entryTtl(Duration.ofSeconds(cacheProps.getEntityTtlSeconds())),
|
||||
CACHE_QUERIES, defaultConfig.entryTtl(Duration.ofSeconds(cacheProps.getQueryTtlSeconds())),
|
||||
CACHE_SEARCH, defaultConfig.entryTtl(Duration.ofSeconds(cacheProps.getSearchTtlSeconds()))
|
||||
);
|
||||
|
||||
log.info("Redis cache enabled: entity TTL={}s, query TTL={}s, search TTL={}s",
|
||||
cacheProps.getEntityTtlSeconds(),
|
||||
cacheProps.getQueryTtlSeconds(),
|
||||
cacheProps.getSearchTtlSeconds());
|
||||
|
||||
return RedisCacheManager.builder(connectionFactory)
|
||||
.cacheDefaults(defaultConfig.entryTtl(Duration.ofSeconds(cacheProps.getQueryTtlSeconds())))
|
||||
.withInitialCacheConfigurations(cacheConfigs)
|
||||
.transactionAware()
|
||||
.build();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,503 @@
|
||||
package com.datamate.knowledgegraph.infrastructure.client;
|
||||
|
||||
import com.datamate.knowledgegraph.infrastructure.neo4j.KnowledgeGraphProperties;
|
||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
||||
import lombok.Data;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.springframework.beans.factory.annotation.Qualifier;
|
||||
import org.springframework.core.ParameterizedTypeReference;
|
||||
import org.springframework.http.HttpMethod;
|
||||
import org.springframework.http.ResponseEntity;
|
||||
import org.springframework.stereotype.Component;
|
||||
import org.springframework.web.client.RestClientException;
|
||||
import org.springframework.web.client.RestTemplate;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
import java.time.format.DateTimeFormatter;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.function.Function;
|
||||
import java.net.URLEncoder;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
|
||||
/**
|
||||
* 数据管理服务 REST 客户端。
|
||||
* <p>
|
||||
* 通过 HTTP 调用 data-management-service 的 REST API,
|
||||
* 拉取数据集、文件等元数据用于同步到 Neo4j。
|
||||
*/
|
||||
@Component
|
||||
@Slf4j
|
||||
public class DataManagementClient {
|
||||
|
||||
private static final String UPDATED_FROM_PARAM = "updatedFrom";
|
||||
private static final String UPDATED_TO_PARAM = "updatedTo";
|
||||
private static final DateTimeFormatter DATETIME_QUERY_FORMATTER = DateTimeFormatter.ISO_LOCAL_DATE_TIME;
|
||||
|
||||
private final RestTemplate restTemplate;
|
||||
private final String baseUrl;
|
||||
private final String annotationBaseUrl;
|
||||
private final int pageSize;
|
||||
|
||||
public DataManagementClient(
|
||||
@Qualifier("kgRestTemplate") RestTemplate restTemplate,
|
||||
KnowledgeGraphProperties properties) {
|
||||
this.restTemplate = restTemplate;
|
||||
this.baseUrl = properties.getSync().getDataManagementUrl();
|
||||
this.annotationBaseUrl = properties.getSync().getAnnotationServiceUrl();
|
||||
this.pageSize = properties.getSync().getPageSize();
|
||||
}
|
||||
|
||||
/**
|
||||
* 拉取所有数据集(自动分页)。
|
||||
*/
|
||||
public List<DatasetDTO> listAllDatasets() {
|
||||
return listAllDatasets(null, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* 拉取所有数据集(自动分页)并按更新时间窗口过滤。
|
||||
* <p>
|
||||
* 时间窗口参数会透传给上游服务;同时在本地再过滤一次,
|
||||
* 以兼容上游暂未支持该查询参数的场景。
|
||||
*/
|
||||
public List<DatasetDTO> listAllDatasets(LocalDateTime updatedFrom, LocalDateTime updatedTo) {
|
||||
Map<String, String> timeWindowQuery = buildTimeWindowQuery(updatedFrom, updatedTo);
|
||||
List<DatasetDTO> datasets = fetchAllPaged(
|
||||
baseUrl + "/data-management/datasets",
|
||||
new ParameterizedTypeReference<PagedResult<DatasetDTO>>() {},
|
||||
"datasets",
|
||||
timeWindowQuery);
|
||||
return filterByUpdatedAt(datasets, DatasetDTO::getUpdatedAt, updatedFrom, updatedTo);
|
||||
}
|
||||
|
||||
/**
|
||||
* 拉取所有工作流(自动分页)。
|
||||
*/
|
||||
public List<WorkflowDTO> listAllWorkflows() {
|
||||
return listAllWorkflows(null, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* 拉取所有工作流(自动分页)并按更新时间窗口过滤。
|
||||
*/
|
||||
public List<WorkflowDTO> listAllWorkflows(LocalDateTime updatedFrom, LocalDateTime updatedTo) {
|
||||
Map<String, String> timeWindowQuery = buildTimeWindowQuery(updatedFrom, updatedTo);
|
||||
List<WorkflowDTO> workflows = fetchAllPaged(
|
||||
baseUrl + "/data-management/workflows",
|
||||
new ParameterizedTypeReference<PagedResult<WorkflowDTO>>() {},
|
||||
"workflows",
|
||||
timeWindowQuery);
|
||||
return filterByUpdatedAt(workflows, WorkflowDTO::getUpdatedAt, updatedFrom, updatedTo);
|
||||
}
|
||||
|
||||
/**
|
||||
* 拉取所有作业(自动分页)。
|
||||
*/
|
||||
public List<JobDTO> listAllJobs() {
|
||||
return listAllJobs(null, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* 拉取所有作业(自动分页)并按更新时间窗口过滤。
|
||||
*/
|
||||
public List<JobDTO> listAllJobs(LocalDateTime updatedFrom, LocalDateTime updatedTo) {
|
||||
Map<String, String> timeWindowQuery = buildTimeWindowQuery(updatedFrom, updatedTo);
|
||||
List<JobDTO> jobs = fetchAllPaged(
|
||||
baseUrl + "/data-management/jobs",
|
||||
new ParameterizedTypeReference<PagedResult<JobDTO>>() {},
|
||||
"jobs",
|
||||
timeWindowQuery);
|
||||
return filterByUpdatedAt(jobs, JobDTO::getUpdatedAt, updatedFrom, updatedTo);
|
||||
}
|
||||
|
||||
/**
|
||||
* 拉取所有标注任务(自动分页,从标注服务)。
|
||||
*/
|
||||
public List<LabelTaskDTO> listAllLabelTasks() {
|
||||
return listAllLabelTasks(null, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* 拉取所有标注任务(自动分页,从标注服务)并按更新时间窗口过滤。
|
||||
*/
|
||||
public List<LabelTaskDTO> listAllLabelTasks(LocalDateTime updatedFrom, LocalDateTime updatedTo) {
|
||||
Map<String, String> timeWindowQuery = buildTimeWindowQuery(updatedFrom, updatedTo);
|
||||
List<LabelTaskDTO> tasks = fetchAllPaged(
|
||||
annotationBaseUrl + "/annotation/label-tasks",
|
||||
new ParameterizedTypeReference<PagedResult<LabelTaskDTO>>() {},
|
||||
"label-tasks",
|
||||
timeWindowQuery);
|
||||
return filterByUpdatedAt(tasks, LabelTaskDTO::getUpdatedAt, updatedFrom, updatedTo);
|
||||
}
|
||||
|
||||
/**
|
||||
* 拉取所有知识集(自动分页)。
|
||||
*/
|
||||
public List<KnowledgeSetDTO> listAllKnowledgeSets() {
|
||||
return listAllKnowledgeSets(null, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* 拉取所有知识集(自动分页)并按更新时间窗口过滤。
|
||||
*/
|
||||
public List<KnowledgeSetDTO> listAllKnowledgeSets(LocalDateTime updatedFrom, LocalDateTime updatedTo) {
|
||||
Map<String, String> timeWindowQuery = buildTimeWindowQuery(updatedFrom, updatedTo);
|
||||
List<KnowledgeSetDTO> sets = fetchAllPaged(
|
||||
baseUrl + "/data-management/knowledge-sets",
|
||||
new ParameterizedTypeReference<PagedResult<KnowledgeSetDTO>>() {},
|
||||
"knowledge-sets",
|
||||
timeWindowQuery);
|
||||
return filterByUpdatedAt(sets, KnowledgeSetDTO::getUpdatedAt, updatedFrom, updatedTo);
|
||||
}
|
||||
|
||||
/**
|
||||
* 拉取所有数据集(自动分页)。
|
||||
*/
|
||||
public List<DatasetDTO> listAllDatasetsLegacy() {
|
||||
return fetchAllPaged(
|
||||
baseUrl + "/data-management/datasets",
|
||||
new ParameterizedTypeReference<PagedResult<DatasetDTO>>() {},
|
||||
"datasets");
|
||||
}
|
||||
|
||||
/**
|
||||
* 拉取所有工作流(自动分页)。
|
||||
*/
|
||||
public List<WorkflowDTO> listAllWorkflowsLegacy() {
|
||||
return fetchAllPaged(
|
||||
baseUrl + "/data-management/workflows",
|
||||
new ParameterizedTypeReference<PagedResult<WorkflowDTO>>() {},
|
||||
"workflows");
|
||||
}
|
||||
|
||||
/**
|
||||
* 拉取所有作业(自动分页)。
|
||||
*/
|
||||
public List<JobDTO> listAllJobsLegacy() {
|
||||
return fetchAllPaged(
|
||||
baseUrl + "/data-management/jobs",
|
||||
new ParameterizedTypeReference<PagedResult<JobDTO>>() {},
|
||||
"jobs");
|
||||
}
|
||||
|
||||
/**
|
||||
* 拉取所有标注任务(自动分页,从标注服务)。
|
||||
*/
|
||||
public List<LabelTaskDTO> listAllLabelTasksLegacy() {
|
||||
return fetchAllPaged(
|
||||
annotationBaseUrl + "/annotation/label-tasks",
|
||||
new ParameterizedTypeReference<PagedResult<LabelTaskDTO>>() {},
|
||||
"label-tasks");
|
||||
}
|
||||
|
||||
/**
|
||||
* 拉取所有知识集(自动分页)。
|
||||
*/
|
||||
public List<KnowledgeSetDTO> listAllKnowledgeSetsLegacy() {
|
||||
return fetchAllPaged(
|
||||
baseUrl + "/data-management/knowledge-sets",
|
||||
new ParameterizedTypeReference<PagedResult<KnowledgeSetDTO>>() {},
|
||||
"knowledge-sets");
|
||||
}
|
||||
|
||||
/**
|
||||
* 拉取所有用户的组织映射。
|
||||
*/
|
||||
public Map<String, String> fetchUserOrganizationMap() {
|
||||
String url = baseUrl + "/auth/users/organizations";
|
||||
log.debug("Fetching user-organization mappings from: {}", url);
|
||||
try {
|
||||
ResponseEntity<List<UserOrgDTO>> response = restTemplate.exchange(
|
||||
url, HttpMethod.GET, null,
|
||||
new ParameterizedTypeReference<List<UserOrgDTO>>() {});
|
||||
|
||||
List<UserOrgDTO> body = response.getBody();
|
||||
if (body == null || body.isEmpty()) {
|
||||
log.warn("No user-organization mappings returned from auth service");
|
||||
return Collections.emptyMap();
|
||||
}
|
||||
|
||||
Map<String, String> result = new LinkedHashMap<>();
|
||||
for (UserOrgDTO dto : body) {
|
||||
if (dto.getUsername() != null && !dto.getUsername().isBlank()) {
|
||||
result.put(dto.getUsername(), dto.getOrganization());
|
||||
}
|
||||
}
|
||||
log.info("Fetched {} user-organization mappings", result.size());
|
||||
return result;
|
||||
} catch (RestClientException e) {
|
||||
log.error("Failed to fetch user-organization mappings from: {}", url, e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 通用自动分页拉取方法。
|
||||
*/
|
||||
private <T> List<T> fetchAllPaged(String baseEndpoint,
|
||||
ParameterizedTypeReference<PagedResult<T>> typeRef,
|
||||
String resourceName) {
|
||||
return fetchAllPaged(baseEndpoint, typeRef, resourceName, Collections.emptyMap());
|
||||
}
|
||||
|
||||
/**
|
||||
* 通用自动分页拉取方法(支持附加查询参数)。
|
||||
*/
|
||||
private <T> List<T> fetchAllPaged(String baseEndpoint,
|
||||
ParameterizedTypeReference<PagedResult<T>> typeRef,
|
||||
String resourceName,
|
||||
Map<String, String> extraQueryParams) {
|
||||
List<T> allItems = new ArrayList<>();
|
||||
int page = 0;
|
||||
|
||||
while (true) {
|
||||
String url = buildPagedUrl(baseEndpoint, page, extraQueryParams);
|
||||
log.debug("Fetching {}: page={}, size={}", resourceName, page, pageSize);
|
||||
|
||||
try {
|
||||
ResponseEntity<PagedResult<T>> response = restTemplate.exchange(
|
||||
url, HttpMethod.GET, null, typeRef);
|
||||
|
||||
PagedResult<T> body = response.getBody();
|
||||
if (body == null || body.getContent() == null || body.getContent().isEmpty()) {
|
||||
break;
|
||||
}
|
||||
|
||||
allItems.addAll(body.getContent());
|
||||
log.debug("Fetched {} {} (page {}), total so far: {}",
|
||||
body.getContent().size(), resourceName, page, allItems.size());
|
||||
|
||||
if (page >= body.getTotalPages() - 1) {
|
||||
break;
|
||||
}
|
||||
page++;
|
||||
} catch (RestClientException e) {
|
||||
log.error("Failed to fetch {} : page={}, url={}", resourceName, page, url, e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
log.info("Fetched {} {} in total", allItems.size(), resourceName);
|
||||
return allItems;
|
||||
}
|
||||
|
||||
private String buildPagedUrl(String baseEndpoint, int page, Map<String, String> extraQueryParams) {
|
||||
StringBuilder builder = new StringBuilder(baseEndpoint)
|
||||
.append("?page=").append(page)
|
||||
.append("&size=").append(pageSize);
|
||||
|
||||
if (extraQueryParams != null && !extraQueryParams.isEmpty()) {
|
||||
extraQueryParams.forEach((key, value) -> {
|
||||
if (key == null || key.isBlank() || value == null || value.isBlank()) {
|
||||
return;
|
||||
}
|
||||
builder.append("&")
|
||||
.append(URLEncoder.encode(key, StandardCharsets.UTF_8))
|
||||
.append("=")
|
||||
.append(URLEncoder.encode(value, StandardCharsets.UTF_8));
|
||||
});
|
||||
}
|
||||
return builder.toString();
|
||||
}
|
||||
|
||||
private static Map<String, String> buildTimeWindowQuery(LocalDateTime updatedFrom, LocalDateTime updatedTo) {
|
||||
if (updatedFrom != null && updatedTo != null && updatedFrom.isAfter(updatedTo)) {
|
||||
throw new IllegalArgumentException("updatedFrom must be less than or equal to updatedTo");
|
||||
}
|
||||
|
||||
Map<String, String> query = new LinkedHashMap<>();
|
||||
if (updatedFrom != null) {
|
||||
query.put(UPDATED_FROM_PARAM, DATETIME_QUERY_FORMATTER.format(updatedFrom));
|
||||
}
|
||||
if (updatedTo != null) {
|
||||
query.put(UPDATED_TO_PARAM, DATETIME_QUERY_FORMATTER.format(updatedTo));
|
||||
}
|
||||
return query;
|
||||
}
|
||||
|
||||
private static <T> List<T> filterByUpdatedAt(
|
||||
List<T> items,
|
||||
Function<T, LocalDateTime> updatedAtGetter,
|
||||
LocalDateTime updatedFrom,
|
||||
LocalDateTime updatedTo) {
|
||||
if ((updatedFrom == null && updatedTo == null) || items == null || items.isEmpty()) {
|
||||
return items;
|
||||
}
|
||||
|
||||
return items.stream()
|
||||
.filter(item -> {
|
||||
if (item == null) {
|
||||
return false;
|
||||
}
|
||||
LocalDateTime updatedAt = updatedAtGetter.apply(item);
|
||||
if (updatedAt == null) {
|
||||
return false;
|
||||
}
|
||||
if (updatedFrom != null && updatedAt.isBefore(updatedFrom)) {
|
||||
return false;
|
||||
}
|
||||
return updatedTo == null || !updatedAt.isAfter(updatedTo);
|
||||
})
|
||||
.toList();
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// 响应 DTO(仅包含同步所需字段)
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Data
|
||||
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||
public static class PagedResult<T> {
|
||||
private List<T> content;
|
||||
private long page;
|
||||
private long totalElements;
|
||||
private long totalPages;
|
||||
}
|
||||
|
||||
/**
|
||||
* 与 data-management-service 的 DatasetResponse 对齐。
|
||||
*/
|
||||
@Data
|
||||
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||
public static class DatasetDTO {
|
||||
private String id;
|
||||
private String name;
|
||||
private String description;
|
||||
private String parentDatasetId;
|
||||
private String datasetType;
|
||||
private String status;
|
||||
private Long totalSize;
|
||||
private Integer fileCount;
|
||||
private String createdBy;
|
||||
private String updatedBy;
|
||||
private LocalDateTime createdAt;
|
||||
private LocalDateTime updatedAt;
|
||||
private List<TagDTO> tags;
|
||||
}
|
||||
|
||||
/**
|
||||
* 与 data-management-service 的 TagResponse 对齐。
|
||||
*/
|
||||
@Data
|
||||
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||
public static class TagDTO {
|
||||
private String id;
|
||||
private String name;
|
||||
private String color;
|
||||
private String description;
|
||||
}
|
||||
|
||||
/**
|
||||
* 与 data-management-service / data-cleaning-service 的 Workflow 对齐。
|
||||
*/
|
||||
@Data
|
||||
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||
public static class WorkflowDTO {
|
||||
private String id;
|
||||
private String name;
|
||||
private String description;
|
||||
private String workflowType;
|
||||
private String status;
|
||||
private String version;
|
||||
private Integer operatorCount;
|
||||
private String schedule;
|
||||
private String createdBy;
|
||||
private String updatedBy;
|
||||
private LocalDateTime createdAt;
|
||||
private LocalDateTime updatedAt;
|
||||
/** 工作流使用的输入数据集 ID 列表 */
|
||||
private List<String> inputDatasetIds;
|
||||
}
|
||||
|
||||
/**
|
||||
* 与 data-management-service 的 Job / CleaningTask / DataSynthInstance 等对齐。
|
||||
*/
|
||||
@Data
|
||||
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||
public static class JobDTO {
|
||||
private String id;
|
||||
private String name;
|
||||
private String description;
|
||||
private String jobType;
|
||||
private String status;
|
||||
private String startedAt;
|
||||
private String completedAt;
|
||||
private Long durationSeconds;
|
||||
private Long inputCount;
|
||||
private Long outputCount;
|
||||
private String errorMessage;
|
||||
private String createdBy;
|
||||
private String updatedBy;
|
||||
private LocalDateTime createdAt;
|
||||
private LocalDateTime updatedAt;
|
||||
/** 输入数据集 ID */
|
||||
private String inputDatasetId;
|
||||
/** 输出数据集 ID */
|
||||
private String outputDatasetId;
|
||||
/** 所属工作流 ID(TRIGGERS 关系) */
|
||||
private String workflowId;
|
||||
/** 依赖的作业 ID(DEPENDS_ON 关系) */
|
||||
private String dependsOnJobId;
|
||||
}
|
||||
|
||||
/**
|
||||
* 与 data-annotation-service 的 LabelingProject / AutoAnnotationTask 对齐。
|
||||
*/
|
||||
@Data
|
||||
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||
public static class LabelTaskDTO {
|
||||
private String id;
|
||||
private String name;
|
||||
private String description;
|
||||
private String taskMode;
|
||||
private String dataType;
|
||||
private String labelingType;
|
||||
private String status;
|
||||
private Double progress;
|
||||
private String templateName;
|
||||
private String createdBy;
|
||||
private String updatedBy;
|
||||
private LocalDateTime createdAt;
|
||||
private LocalDateTime updatedAt;
|
||||
/** 标注使用的数据集 ID(USES_DATASET 关系) */
|
||||
private String datasetId;
|
||||
}
|
||||
|
||||
/**
|
||||
* 与 data-management-service 的 KnowledgeSet 对齐。
|
||||
*/
|
||||
@Data
|
||||
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||
public static class KnowledgeSetDTO {
|
||||
private String id;
|
||||
private String name;
|
||||
private String description;
|
||||
private String status;
|
||||
private String domain;
|
||||
private String businessLine;
|
||||
private String sensitivity;
|
||||
private Integer itemCount;
|
||||
private String validFrom;
|
||||
private String validTo;
|
||||
private String createdBy;
|
||||
private String updatedBy;
|
||||
private LocalDateTime createdAt;
|
||||
private LocalDateTime updatedAt;
|
||||
/** 来源数据集 ID 列表(SOURCED_FROM 关系) */
|
||||
private List<String> sourceDatasetIds;
|
||||
}
|
||||
|
||||
/**
|
||||
* 用户-组织映射 DTO(与 AuthController.listUserOrganizations 对齐)。
|
||||
*/
|
||||
@Data
|
||||
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||
public static class UserOrgDTO {
|
||||
private String username;
|
||||
private String organization;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,36 @@
|
||||
package com.datamate.knowledgegraph.infrastructure.exception;
|
||||
|
||||
import com.datamate.common.infrastructure.exception.ErrorCode;
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Getter;
|
||||
|
||||
/**
|
||||
* 知识图谱模块错误码
|
||||
*/
|
||||
@Getter
|
||||
@AllArgsConstructor
|
||||
public enum KnowledgeGraphErrorCode implements ErrorCode {
|
||||
|
||||
ENTITY_NOT_FOUND("knowledge_graph.0001", "实体不存在"),
|
||||
RELATION_NOT_FOUND("knowledge_graph.0002", "关系不存在"),
|
||||
GRAPH_NOT_FOUND("knowledge_graph.0003", "图谱不存在"),
|
||||
DUPLICATE_ENTITY("knowledge_graph.0004", "实体已存在"),
|
||||
INVALID_RELATION("knowledge_graph.0005", "无效的关系定义"),
|
||||
IMPORT_FAILED("knowledge_graph.0006", "图谱导入失败"),
|
||||
QUERY_DEPTH_EXCEEDED("knowledge_graph.0007", "查询深度超出限制"),
|
||||
MAX_NODES_EXCEEDED("knowledge_graph.0008", "查询结果节点数超出限制"),
|
||||
SYNC_FAILED("knowledge_graph.0009", "数据同步失败"),
|
||||
EMPTY_SNAPSHOT_PURGE_BLOCKED("knowledge_graph.0010", "空快照保护:上游返回空列表,已阻止 purge 操作"),
|
||||
SCHEMA_INIT_FAILED("knowledge_graph.0011", "图谱 Schema 初始化失败"),
|
||||
INSECURE_DEFAULT_CREDENTIALS("knowledge_graph.0012", "检测到默认凭据,生产环境禁止使用默认密码"),
|
||||
UNAUTHORIZED_INTERNAL_CALL("knowledge_graph.0013", "内部调用未授权:X-Internal-Token 校验失败"),
|
||||
QUERY_TIMEOUT("knowledge_graph.0014", "图查询超时,请缩小搜索范围或减少深度"),
|
||||
SCHEMA_MIGRATION_FAILED("knowledge_graph.0015", "Schema 迁移执行失败"),
|
||||
SCHEMA_CHECKSUM_MISMATCH("knowledge_graph.0016", "Schema 迁移 checksum 不匹配:已应用的迁移被修改"),
|
||||
SCHEMA_MIGRATION_LOCKED("knowledge_graph.0017", "Schema 迁移锁被占用,其他实例正在执行迁移"),
|
||||
REVIEW_NOT_FOUND("knowledge_graph.0018", "审核记录不存在"),
|
||||
REVIEW_ALREADY_PROCESSED("knowledge_graph.0019", "审核记录已处理");
|
||||
|
||||
private final String code;
|
||||
private final String message;
|
||||
}
|
||||
@@ -0,0 +1,63 @@
|
||||
package com.datamate.knowledgegraph.infrastructure.neo4j;
|
||||
|
||||
import com.datamate.knowledgegraph.infrastructure.neo4j.migration.SchemaMigrationService;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.boot.ApplicationArguments;
|
||||
import org.springframework.boot.ApplicationRunner;
|
||||
import org.springframework.core.annotation.Order;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
import java.util.Set;
|
||||
import java.util.UUID;
|
||||
|
||||
/**
|
||||
* 图谱 Schema 初始化器。
|
||||
* <p>
|
||||
* 应用启动时通过 {@link SchemaMigrationService} 执行版本化 Schema 迁移。
|
||||
* <p>
|
||||
* <b>安全自检</b>:在非开发环境中,检测到默认 Neo4j 密码时拒绝启动。
|
||||
*/
|
||||
@Component
|
||||
@Slf4j
|
||||
@RequiredArgsConstructor
|
||||
@Order(1)
|
||||
public class GraphInitializer implements ApplicationRunner {
|
||||
|
||||
/** 已知的弱默认密码,启动时拒绝。 */
|
||||
private static final Set<String> BLOCKED_DEFAULT_PASSWORDS = Set.of(
|
||||
"datamate123", "neo4j", "password", "123456", "admin"
|
||||
);
|
||||
|
||||
private final KnowledgeGraphProperties properties;
|
||||
private final SchemaMigrationService schemaMigrationService;
|
||||
|
||||
@Value("${spring.neo4j.authentication.password:}")
|
||||
private String neo4jPassword;
|
||||
|
||||
@Value("${spring.profiles.active:default}")
|
||||
private String activeProfile;
|
||||
|
||||
@Override
|
||||
public void run(ApplicationArguments args) {
|
||||
// ── 安全自检:默认凭据检测(已禁用) ──
|
||||
// validateCredentials();
|
||||
|
||||
if (!properties.getSync().isAutoInitSchema()) {
|
||||
log.info("Schema auto-init is disabled, skipping");
|
||||
return;
|
||||
}
|
||||
|
||||
schemaMigrationService.migrate(UUID.randomUUID().toString());
|
||||
}
|
||||
|
||||
/**
|
||||
* 检测是否使用了默认凭据。
|
||||
* <p>
|
||||
* <b>注意:密码安全检查已禁用。</b>
|
||||
*/
|
||||
private void validateCredentials() {
|
||||
// 密码安全检查已禁用,开发环境跳过
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,117 @@
|
||||
package com.datamate.knowledgegraph.infrastructure.neo4j;
|
||||
|
||||
import jakarta.validation.constraints.Min;
|
||||
import lombok.Data;
|
||||
import org.springframework.boot.context.properties.ConfigurationProperties;
|
||||
import org.springframework.stereotype.Component;
|
||||
import org.springframework.validation.annotation.Validated;
|
||||
|
||||
@Data
|
||||
@Component
|
||||
@Validated
|
||||
@ConfigurationProperties(prefix = "datamate.knowledge-graph")
|
||||
public class KnowledgeGraphProperties {
|
||||
|
||||
/** 默认查询跳数限制 */
|
||||
private int maxDepth = 3;
|
||||
|
||||
/** 子图返回最大节点数 */
|
||||
private int maxNodesPerQuery = 500;
|
||||
|
||||
/** 复杂图查询超时(秒),防止路径枚举等高开销查询失控 */
|
||||
@Min(value = 1, message = "queryTimeoutSeconds 必须 >= 1")
|
||||
private int queryTimeoutSeconds = 10;
|
||||
|
||||
/** 批量导入批次大小(必须 >= 1,否则取模运算会抛异常) */
|
||||
@Min(value = 1, message = "importBatchSize 必须 >= 1")
|
||||
private int importBatchSize = 100;
|
||||
|
||||
/** 同步相关配置 */
|
||||
private Sync sync = new Sync();
|
||||
|
||||
/** 安全相关配置 */
|
||||
private Security security = new Security();
|
||||
|
||||
/** Schema 迁移配置 */
|
||||
private Migration migration = new Migration();
|
||||
|
||||
/** 缓存配置 */
|
||||
private Cache cache = new Cache();
|
||||
|
||||
@Data
|
||||
public static class Security {
|
||||
|
||||
/** 内部服务调用 Token,用于校验 sync 端点的 X-Internal-Token 请求头 */
|
||||
private String internalToken;
|
||||
|
||||
/**
|
||||
* 是否跳过内部 Token 校验(默认 false,即 fail-closed)。
|
||||
* <p>
|
||||
* 仅允许在 dev/test 环境显式设置为 true 以跳过校验。
|
||||
* 生产环境必须保持 false 并配置 {@code internal-token}。
|
||||
*/
|
||||
private boolean skipTokenCheck = false;
|
||||
}
|
||||
|
||||
@Data
|
||||
public static class Sync {
|
||||
|
||||
/** 数据管理服务基础 URL */
|
||||
private String dataManagementUrl = "http://localhost:8080/api";
|
||||
|
||||
/** 标注服务基础 URL */
|
||||
private String annotationServiceUrl = "http://localhost:8080/api";
|
||||
|
||||
/** 同步每页拉取数量 */
|
||||
private int pageSize = 200;
|
||||
|
||||
/** HTTP 连接超时(毫秒) */
|
||||
private int connectTimeout = 5000;
|
||||
|
||||
/** HTTP 读取超时(毫秒) */
|
||||
private int readTimeout = 30000;
|
||||
|
||||
/** 失败时最大重试次数 */
|
||||
private int maxRetries = 3;
|
||||
|
||||
/** 重试间隔(毫秒) */
|
||||
private long retryInterval = 1000;
|
||||
|
||||
/** 是否在启动时自动初始化 Schema */
|
||||
private boolean autoInitSchema = true;
|
||||
|
||||
/**
|
||||
* 是否允许空快照触发 purge(默认 false)。
|
||||
* <p>
|
||||
* 当上游返回空列表时,如果该开关为 false,purge 将被跳过以防误删全部同步实体。
|
||||
* 仅在确认数据源确实为空时才应开启此开关。
|
||||
*/
|
||||
private boolean allowPurgeOnEmptySnapshot = false;
|
||||
}
|
||||
|
||||
@Data
|
||||
public static class Migration {
|
||||
|
||||
/** 是否启用 Schema 版本化迁移 */
|
||||
private boolean enabled = true;
|
||||
|
||||
/** 是否校验已应用迁移的 checksum(防止迁移被篡改) */
|
||||
private boolean validateChecksums = true;
|
||||
}
|
||||
|
||||
@Data
|
||||
public static class Cache {
|
||||
|
||||
/** 是否启用缓存 */
|
||||
private boolean enabled = true;
|
||||
|
||||
/** 实体缓存 TTL(秒) */
|
||||
private long entityTtlSeconds = 3600;
|
||||
|
||||
/** 查询结果缓存 TTL(秒) */
|
||||
private long queryTtlSeconds = 300;
|
||||
|
||||
/** 全文搜索结果缓存 TTL(秒) */
|
||||
private long searchTtlSeconds = 180;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,20 @@
|
||||
package com.datamate.knowledgegraph.infrastructure.neo4j.migration;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Schema 迁移接口。
|
||||
* <p>
|
||||
* 每个实现类代表一个版本化的 Schema 变更,版本号单调递增。
|
||||
*/
|
||||
public interface SchemaMigration {
|
||||
|
||||
/** 单调递增版本号 (1, 2, 3...) */
|
||||
int getVersion();
|
||||
|
||||
/** 人类可读描述 */
|
||||
String getDescription();
|
||||
|
||||
/** Cypher DDL 语句列表 */
|
||||
List<String> getStatements();
|
||||
}
|
||||
@@ -0,0 +1,42 @@
|
||||
package com.datamate.knowledgegraph.infrastructure.neo4j.migration;
|
||||
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Builder;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
/**
|
||||
* 迁移记录数据类,映射 {@code _SchemaMigration} 节点。
|
||||
* <p>
|
||||
* 纯 POJO,不使用 SDN {@code @Node} 注解。
|
||||
*/
|
||||
@Data
|
||||
@Builder
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class SchemaMigrationRecord {
|
||||
|
||||
/** 迁移版本号 */
|
||||
private int version;
|
||||
|
||||
/** 迁移描述 */
|
||||
private String description;
|
||||
|
||||
/** 迁移语句的 SHA-256 校验和 */
|
||||
private String checksum;
|
||||
|
||||
/** 迁移应用时间(ISO-8601) */
|
||||
private String appliedAt;
|
||||
|
||||
/** 迁移执行耗时(毫秒) */
|
||||
private long executionTimeMs;
|
||||
|
||||
/** 迁移是否成功 */
|
||||
private boolean success;
|
||||
|
||||
/** 迁移语句数量 */
|
||||
private int statementsCount;
|
||||
|
||||
/** 失败时的错误信息 */
|
||||
private String errorMessage;
|
||||
}
|
||||
@@ -0,0 +1,384 @@
|
||||
package com.datamate.knowledgegraph.infrastructure.neo4j.migration;
|
||||
|
||||
import com.datamate.common.infrastructure.exception.BusinessException;
|
||||
import com.datamate.knowledgegraph.infrastructure.exception.KnowledgeGraphErrorCode;
|
||||
import com.datamate.knowledgegraph.infrastructure.neo4j.KnowledgeGraphProperties;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.springframework.data.neo4j.core.Neo4jClient;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.security.MessageDigest;
|
||||
import java.security.NoSuchAlgorithmException;
|
||||
import java.time.Instant;
|
||||
import java.util.*;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
* Schema 迁移编排器。
|
||||
* <p>
|
||||
* 参考 Flyway 设计思路,为 Neo4j 图数据库提供版本化迁移机制:
|
||||
* <ul>
|
||||
* <li>在数据库中记录已应用的迁移版本({@code _SchemaMigration} 节点)</li>
|
||||
* <li>自动检测并执行新增迁移</li>
|
||||
* <li>通过 checksum 校验防止已应用迁移被篡改</li>
|
||||
* <li>通过分布式锁({@code _SchemaLock} 节点)防止多实例并发迁移</li>
|
||||
* </ul>
|
||||
*/
|
||||
@Component
|
||||
@Slf4j
|
||||
public class SchemaMigrationService {
|
||||
|
||||
/** 分布式锁过期时间(毫秒),5 分钟 */
|
||||
private static final long LOCK_TIMEOUT_MS = 5 * 60 * 1000;
|
||||
|
||||
/** 仅识别「已存在」类错误消息的关键词,其余错误不应吞掉。 */
|
||||
private static final Set<String> ALREADY_EXISTS_KEYWORDS = Set.of(
|
||||
"already exists", "already exist", "EquivalentSchemaRuleAlreadyExists"
|
||||
);
|
||||
|
||||
private final Neo4jClient neo4jClient;
|
||||
private final KnowledgeGraphProperties properties;
|
||||
private final List<SchemaMigration> migrations;
|
||||
|
||||
public SchemaMigrationService(Neo4jClient neo4jClient,
|
||||
KnowledgeGraphProperties properties,
|
||||
List<SchemaMigration> migrations) {
|
||||
this.neo4jClient = neo4jClient;
|
||||
this.properties = properties;
|
||||
this.migrations = migrations.stream()
|
||||
.sorted(Comparator.comparingInt(SchemaMigration::getVersion))
|
||||
.toList();
|
||||
}
|
||||
|
||||
/**
|
||||
* 执行 Schema 迁移主流程。
|
||||
*
|
||||
* @param instanceId 当前实例标识,用于分布式锁
|
||||
*/
|
||||
public void migrate(String instanceId) {
|
||||
if (!properties.getMigration().isEnabled()) {
|
||||
log.info("Schema migration is disabled, skipping");
|
||||
return;
|
||||
}
|
||||
|
||||
log.info("Starting schema migration, instanceId={}", instanceId);
|
||||
|
||||
// 1. Bootstrap — 创建迁移系统自身需要的约束
|
||||
bootstrapMigrationSchema();
|
||||
|
||||
// 2. 获取分布式锁
|
||||
acquireLock(instanceId);
|
||||
|
||||
try {
|
||||
// 3. 加载已应用迁移
|
||||
List<SchemaMigrationRecord> applied = loadAppliedMigrations();
|
||||
|
||||
// 4. 校验 checksum
|
||||
if (properties.getMigration().isValidateChecksums()) {
|
||||
validateChecksums(applied, migrations);
|
||||
}
|
||||
|
||||
// 5. 过滤待执行迁移
|
||||
Set<Integer> appliedVersions = applied.stream()
|
||||
.map(SchemaMigrationRecord::getVersion)
|
||||
.collect(Collectors.toSet());
|
||||
|
||||
List<SchemaMigration> pending = migrations.stream()
|
||||
.filter(m -> !appliedVersions.contains(m.getVersion()))
|
||||
.toList();
|
||||
|
||||
if (pending.isEmpty()) {
|
||||
log.info("Schema is up to date, no pending migrations");
|
||||
return;
|
||||
}
|
||||
|
||||
// 6. 逐个执行
|
||||
executePendingMigrations(pending);
|
||||
|
||||
log.info("Schema migration completed successfully, applied {} migration(s)", pending.size());
|
||||
|
||||
} finally {
|
||||
// 7. 释放锁
|
||||
releaseLock(instanceId);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 创建迁移系统自身需要的约束(解决鸡生蛋问题)。
|
||||
*/
|
||||
void bootstrapMigrationSchema() {
|
||||
log.debug("Bootstrapping migration schema constraints");
|
||||
neo4jClient.query(
|
||||
"CREATE CONSTRAINT schema_migration_version_unique IF NOT EXISTS " +
|
||||
"FOR (n:_SchemaMigration) REQUIRE n.version IS UNIQUE"
|
||||
).run();
|
||||
neo4jClient.query(
|
||||
"CREATE CONSTRAINT schema_lock_name_unique IF NOT EXISTS " +
|
||||
"FOR (n:_SchemaLock) REQUIRE n.name IS UNIQUE"
|
||||
).run();
|
||||
|
||||
// 修复历史遗留节点:为缺失属性补充默认值,避免后续查询产生属性缺失警告
|
||||
neo4jClient.query(
|
||||
"MATCH (m:_SchemaMigration) WHERE m.description IS NULL OR m.checksum IS NULL " +
|
||||
"SET m.description = COALESCE(m.description, ''), " +
|
||||
" m.checksum = COALESCE(m.checksum, ''), " +
|
||||
" m.applied_at = COALESCE(m.applied_at, ''), " +
|
||||
" m.execution_time_ms = COALESCE(m.execution_time_ms, 0), " +
|
||||
" m.statements_count = COALESCE(m.statements_count, 0), " +
|
||||
" m.error_message = COALESCE(m.error_message, '')"
|
||||
).run();
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取分布式锁。
|
||||
* <p>
|
||||
* MERGE {@code _SchemaLock} 节点,如果锁已被其他实例持有且未过期,则抛出异常。
|
||||
* 如果锁已过期(超过 5 分钟),自动接管。
|
||||
* <p>
|
||||
* 时间戳完全使用数据库端 {@code datetime().epochMillis},避免多实例时钟偏差导致锁被误抢占。
|
||||
*/
|
||||
void acquireLock(String instanceId) {
|
||||
log.debug("Acquiring schema migration lock, instanceId={}", instanceId);
|
||||
|
||||
// 使用数据库时间(datetime().epochMillis)避免多实例时钟偏差导致锁被误抢占
|
||||
Optional<Map<String, Object>> result = neo4jClient.query(
|
||||
"MERGE (lock:_SchemaLock {name: 'schema_migration'}) " +
|
||||
"ON CREATE SET lock.locked_by = $instanceId, lock.locked_at = datetime().epochMillis " +
|
||||
"WITH lock, " +
|
||||
" CASE WHEN lock.locked_by = $instanceId THEN true " +
|
||||
" WHEN lock.locked_at < (datetime().epochMillis - $timeoutMs) THEN true " +
|
||||
" ELSE false END AS canAcquire " +
|
||||
"SET lock.locked_by = CASE WHEN canAcquire THEN $instanceId ELSE lock.locked_by END, " +
|
||||
" lock.locked_at = CASE WHEN canAcquire THEN datetime().epochMillis ELSE lock.locked_at END " +
|
||||
"RETURN lock.locked_by AS lockedBy, canAcquire"
|
||||
).bindAll(Map.of("instanceId", instanceId, "timeoutMs", LOCK_TIMEOUT_MS))
|
||||
.fetch().first();
|
||||
|
||||
if (result.isEmpty()) {
|
||||
throw new IllegalStateException("Failed to acquire schema migration lock: unexpected empty result");
|
||||
}
|
||||
|
||||
Boolean canAcquire = (Boolean) result.get().get("canAcquire");
|
||||
if (!Boolean.TRUE.equals(canAcquire)) {
|
||||
String lockedBy = (String) result.get().get("lockedBy");
|
||||
throw BusinessException.of(
|
||||
KnowledgeGraphErrorCode.SCHEMA_MIGRATION_LOCKED,
|
||||
"Schema migration lock is held by instance: " + lockedBy
|
||||
);
|
||||
}
|
||||
|
||||
log.info("Schema migration lock acquired, instanceId={}", instanceId);
|
||||
}
|
||||
|
||||
/**
|
||||
* 释放分布式锁。
|
||||
*/
|
||||
void releaseLock(String instanceId) {
|
||||
try {
|
||||
neo4jClient.query(
|
||||
"MATCH (lock:_SchemaLock {name: 'schema_migration', locked_by: $instanceId}) " +
|
||||
"DELETE lock"
|
||||
).bindAll(Map.of("instanceId", instanceId)).run();
|
||||
log.debug("Schema migration lock released, instanceId={}", instanceId);
|
||||
} catch (Exception e) {
|
||||
log.warn("Failed to release schema migration lock: {}", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 加载已应用的迁移记录。
|
||||
*/
|
||||
List<SchemaMigrationRecord> loadAppliedMigrations() {
|
||||
return neo4jClient.query(
|
||||
"MATCH (m:_SchemaMigration {success: true}) " +
|
||||
"RETURN m.version AS version, " +
|
||||
" COALESCE(m.description, '') AS description, " +
|
||||
" COALESCE(m.checksum, '') AS checksum, " +
|
||||
" COALESCE(m.applied_at, '') AS appliedAt, " +
|
||||
" COALESCE(m.execution_time_ms, 0) AS executionTimeMs, " +
|
||||
" m.success AS success, " +
|
||||
" COALESCE(m.statements_count, 0) AS statementsCount, " +
|
||||
" COALESCE(m.error_message, '') AS errorMessage " +
|
||||
"ORDER BY m.version"
|
||||
).fetch().all().stream()
|
||||
.map(row -> SchemaMigrationRecord.builder()
|
||||
.version(((Number) row.get("version")).intValue())
|
||||
.description((String) row.get("description"))
|
||||
.checksum((String) row.get("checksum"))
|
||||
.appliedAt((String) row.get("appliedAt"))
|
||||
.executionTimeMs(((Number) row.get("executionTimeMs")).longValue())
|
||||
.success(Boolean.TRUE.equals(row.get("success")))
|
||||
.statementsCount(((Number) row.get("statementsCount")).intValue())
|
||||
.errorMessage((String) row.get("errorMessage"))
|
||||
.build())
|
||||
.toList();
|
||||
}
|
||||
|
||||
/**
|
||||
* 校验已应用迁移的 checksum。
|
||||
*/
|
||||
void validateChecksums(List<SchemaMigrationRecord> applied, List<SchemaMigration> registered) {
|
||||
Map<Integer, SchemaMigration> registeredByVersion = registered.stream()
|
||||
.collect(Collectors.toMap(SchemaMigration::getVersion, m -> m));
|
||||
|
||||
for (SchemaMigrationRecord record : applied) {
|
||||
SchemaMigration migration = registeredByVersion.get(record.getVersion());
|
||||
if (migration == null) {
|
||||
continue; // 已应用但代码中不再有该迁移(可能是老版本被删除)
|
||||
}
|
||||
|
||||
// 跳过 checksum 为空的历史遗留记录(属性缺失修复后的节点)
|
||||
if (record.getChecksum() == null || record.getChecksum().isEmpty()) {
|
||||
log.warn("Migration V{} ({}) has no recorded checksum, skipping validation",
|
||||
record.getVersion(), record.getDescription());
|
||||
continue;
|
||||
}
|
||||
|
||||
String currentChecksum = computeChecksum(migration.getStatements());
|
||||
if (!currentChecksum.equals(record.getChecksum())) {
|
||||
throw BusinessException.of(
|
||||
KnowledgeGraphErrorCode.SCHEMA_CHECKSUM_MISMATCH,
|
||||
String.format("Migration V%d (%s): recorded checksum=%s, current checksum=%s",
|
||||
record.getVersion(), record.getDescription(),
|
||||
record.getChecksum(), currentChecksum)
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 逐个执行待迁移。
|
||||
*/
|
||||
void executePendingMigrations(List<SchemaMigration> pending) {
|
||||
for (SchemaMigration migration : pending) {
|
||||
log.info("Executing migration V{}: {}", migration.getVersion(), migration.getDescription());
|
||||
|
||||
long startTime = System.currentTimeMillis();
|
||||
String errorMessage = null;
|
||||
boolean success = true;
|
||||
|
||||
try {
|
||||
for (String statement : migration.getStatements()) {
|
||||
try {
|
||||
neo4jClient.query(statement).run();
|
||||
log.debug(" Statement executed: {}",
|
||||
statement.length() <= 100 ? statement : statement.substring(0, 97) + "...");
|
||||
} catch (Exception e) {
|
||||
if (isAlreadyExistsError(e)) {
|
||||
log.debug(" Schema element already exists (safe to skip): {}",
|
||||
statement.length() <= 100 ? statement : statement.substring(0, 97) + "...");
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
success = false;
|
||||
errorMessage = e.getMessage();
|
||||
|
||||
long elapsed = System.currentTimeMillis() - startTime;
|
||||
recordMigration(SchemaMigrationRecord.builder()
|
||||
.version(migration.getVersion())
|
||||
.description(migration.getDescription())
|
||||
.checksum(computeChecksum(migration.getStatements()))
|
||||
.appliedAt(Instant.now().toString())
|
||||
.executionTimeMs(elapsed)
|
||||
.success(false)
|
||||
.statementsCount(migration.getStatements().size())
|
||||
.errorMessage(errorMessage)
|
||||
.build());
|
||||
|
||||
throw BusinessException.of(
|
||||
KnowledgeGraphErrorCode.SCHEMA_MIGRATION_FAILED,
|
||||
String.format("Migration V%d (%s) failed: %s",
|
||||
migration.getVersion(), migration.getDescription(), errorMessage)
|
||||
);
|
||||
}
|
||||
|
||||
long elapsed = System.currentTimeMillis() - startTime;
|
||||
recordMigration(SchemaMigrationRecord.builder()
|
||||
.version(migration.getVersion())
|
||||
.description(migration.getDescription())
|
||||
.checksum(computeChecksum(migration.getStatements()))
|
||||
.appliedAt(Instant.now().toString())
|
||||
.executionTimeMs(elapsed)
|
||||
.success(true)
|
||||
.statementsCount(migration.getStatements().size())
|
||||
.build());
|
||||
|
||||
log.info("Migration V{} completed in {}ms", migration.getVersion(), elapsed);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 写入迁移记录节点。
|
||||
* <p>
|
||||
* 使用 MERGE(按 version 匹配)+ SET 而非 CREATE,确保:
|
||||
* <ul>
|
||||
* <li>失败后重试不会因唯一约束冲突而卡死(P0)</li>
|
||||
* <li>迁移执行成功但记录写入失败后,重跑可安全补写记录(幂等性)</li>
|
||||
* </ul>
|
||||
*/
|
||||
void recordMigration(SchemaMigrationRecord record) {
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("version", record.getVersion());
|
||||
params.put("description", nullToEmpty(record.getDescription()));
|
||||
params.put("checksum", nullToEmpty(record.getChecksum()));
|
||||
params.put("appliedAt", nullToEmpty(record.getAppliedAt()));
|
||||
params.put("executionTimeMs", record.getExecutionTimeMs());
|
||||
params.put("success", record.isSuccess());
|
||||
params.put("statementsCount", record.getStatementsCount());
|
||||
params.put("errorMessage", nullToEmpty(record.getErrorMessage()));
|
||||
|
||||
neo4jClient.query(
|
||||
"MERGE (m:_SchemaMigration {version: $version}) " +
|
||||
"SET m.description = $description, " +
|
||||
" m.checksum = $checksum, " +
|
||||
" m.applied_at = $appliedAt, " +
|
||||
" m.execution_time_ms = $executionTimeMs, " +
|
||||
" m.success = $success, " +
|
||||
" m.statements_count = $statementsCount, " +
|
||||
" m.error_message = $errorMessage"
|
||||
).bindAll(params).run();
|
||||
}
|
||||
|
||||
/**
|
||||
* 计算语句列表的 SHA-256 校验和。
|
||||
*/
|
||||
static String computeChecksum(List<String> statements) {
|
||||
try {
|
||||
MessageDigest digest = MessageDigest.getInstance("SHA-256");
|
||||
for (String statement : statements) {
|
||||
digest.update(statement.getBytes(StandardCharsets.UTF_8));
|
||||
}
|
||||
byte[] hash = digest.digest();
|
||||
StringBuilder hex = new StringBuilder();
|
||||
for (byte b : hash) {
|
||||
hex.append(String.format("%02x", b));
|
||||
}
|
||||
return hex.toString();
|
||||
} catch (NoSuchAlgorithmException e) {
|
||||
throw new IllegalStateException("SHA-256 algorithm not available", e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 判断异常是否仅因为 Schema 元素已存在(安全可忽略)。
|
||||
*/
|
||||
static boolean isAlreadyExistsError(Exception e) {
|
||||
String msg = e.getMessage();
|
||||
if (msg == null) {
|
||||
return false;
|
||||
}
|
||||
String lowerMsg = msg.toLowerCase();
|
||||
return ALREADY_EXISTS_KEYWORDS.stream().anyMatch(kw -> lowerMsg.contains(kw.toLowerCase()));
|
||||
}
|
||||
|
||||
/**
|
||||
* 将 null 字符串转换为空字符串,避免 Neo4j 驱动 bindAll 传入 null 值导致属性缺失。
|
||||
*/
|
||||
private static String nullToEmpty(String value) {
|
||||
return value != null ? value : "";
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,66 @@
|
||||
package com.datamate.knowledgegraph.infrastructure.neo4j.migration;
|
||||
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* V1 基线迁移:初始 Schema。
|
||||
* <p>
|
||||
* 包含 {@code GraphInitializer} 中原有的全部 14 条 DDL 语句。
|
||||
* 在已有数据库上首次运行时,所有语句因 {@code IF NOT EXISTS} 而为 no-op,
|
||||
* 但会建立版本基线。
|
||||
*/
|
||||
@Component
|
||||
public class V1__InitialSchema implements SchemaMigration {
|
||||
|
||||
@Override
|
||||
public int getVersion() {
|
||||
return 1;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getDescription() {
|
||||
return "Initial schema: Entity and SyncHistory constraints and indexes";
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<String> getStatements() {
|
||||
return List.of(
|
||||
// 约束(自动创建对应索引)
|
||||
"CREATE CONSTRAINT entity_id_unique IF NOT EXISTS FOR (n:Entity) REQUIRE n.id IS UNIQUE",
|
||||
|
||||
// 同步 upsert 复合唯一约束:防止并发写入产生重复实体
|
||||
"CREATE CONSTRAINT entity_sync_unique IF NOT EXISTS " +
|
||||
"FOR (n:Entity) REQUIRE (n.graph_id, n.source_id, n.type) IS UNIQUE",
|
||||
|
||||
// 单字段索引
|
||||
"CREATE INDEX entity_graph_id IF NOT EXISTS FOR (n:Entity) ON (n.graph_id)",
|
||||
"CREATE INDEX entity_type IF NOT EXISTS FOR (n:Entity) ON (n.type)",
|
||||
"CREATE INDEX entity_name IF NOT EXISTS FOR (n:Entity) ON (n.name)",
|
||||
"CREATE INDEX entity_source_id IF NOT EXISTS FOR (n:Entity) ON (n.source_id)",
|
||||
"CREATE INDEX entity_created_at IF NOT EXISTS FOR (n:Entity) ON (n.created_at)",
|
||||
|
||||
// 复合索引
|
||||
"CREATE INDEX entity_graph_id_type IF NOT EXISTS FOR (n:Entity) ON (n.graph_id, n.type)",
|
||||
"CREATE INDEX entity_graph_id_id IF NOT EXISTS FOR (n:Entity) ON (n.graph_id, n.id)",
|
||||
"CREATE INDEX entity_graph_id_source_id IF NOT EXISTS FOR (n:Entity) ON (n.graph_id, n.source_id)",
|
||||
|
||||
// 全文索引
|
||||
"CREATE FULLTEXT INDEX entity_fulltext IF NOT EXISTS FOR (n:Entity) ON EACH [n.name, n.description]",
|
||||
|
||||
// ── SyncHistory 约束和索引 ──
|
||||
|
||||
// syncId 唯一约束,防止 ID 碰撞
|
||||
"CREATE CONSTRAINT sync_history_graph_sync_unique IF NOT EXISTS " +
|
||||
"FOR (h:SyncHistory) REQUIRE (h.graph_id, h.sync_id) IS UNIQUE",
|
||||
|
||||
// 查询优化索引
|
||||
"CREATE INDEX sync_history_graph_started IF NOT EXISTS " +
|
||||
"FOR (h:SyncHistory) ON (h.graph_id, h.started_at)",
|
||||
|
||||
"CREATE INDEX sync_history_graph_status_started IF NOT EXISTS " +
|
||||
"FOR (h:SyncHistory) ON (h.graph_id, h.status, h.started_at)"
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,51 @@
|
||||
package com.datamate.knowledgegraph.infrastructure.neo4j.migration;
|
||||
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* V2 性能优化迁移:关系索引和属性索引。
|
||||
* <p>
|
||||
* V1 仅对 Entity 节点创建了索引。该迁移补充:
|
||||
* <ul>
|
||||
* <li>RELATED_TO 关系的 graph_id 索引(加速子图查询中的关系过滤)</li>
|
||||
* <li>RELATED_TO 关系的 relation_type 索引(加速按类型筛选)</li>
|
||||
* <li>Entity 的 (graph_id, name) 复合索引(加速 name 过滤查询)</li>
|
||||
* <li>Entity 的 updated_at 索引(加速增量同步范围查询)</li>
|
||||
* <li>RELATED_TO 关系的 (graph_id, relation_type) 复合索引</li>
|
||||
* </ul>
|
||||
*/
|
||||
@Component
|
||||
public class V2__PerformanceIndexes implements SchemaMigration {
|
||||
|
||||
@Override
|
||||
public int getVersion() {
|
||||
return 2;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getDescription() {
|
||||
return "Performance indexes: relationship indexes and additional composite indexes";
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<String> getStatements() {
|
||||
return List.of(
|
||||
// 关系索引:加速子图查询中 WHERE r.graph_id = $graphId 的过滤
|
||||
"CREATE INDEX rel_graph_id IF NOT EXISTS FOR ()-[r:RELATED_TO]-() ON (r.graph_id)",
|
||||
|
||||
// 关系索引:加速按关系类型筛选
|
||||
"CREATE INDEX rel_relation_type IF NOT EXISTS FOR ()-[r:RELATED_TO]-() ON (r.relation_type)",
|
||||
|
||||
// 关系复合索引:加速同一图谱内按类型查询关系
|
||||
"CREATE INDEX rel_graph_id_type IF NOT EXISTS FOR ()-[r:RELATED_TO]-() ON (r.graph_id, r.relation_type)",
|
||||
|
||||
// 节点复合索引:加速 graph_id + name 过滤查询
|
||||
"CREATE INDEX entity_graph_id_name IF NOT EXISTS FOR (n:Entity) ON (n.graph_id, n.name)",
|
||||
|
||||
// 节点索引:加速增量同步中的时间范围查询
|
||||
"CREATE INDEX entity_updated_at IF NOT EXISTS FOR (n:Entity) ON (n.updated_at)"
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,74 @@
|
||||
package com.datamate.knowledgegraph.infrastructure.security;
|
||||
|
||||
import com.datamate.common.infrastructure.common.Response;
|
||||
import com.datamate.knowledgegraph.infrastructure.exception.KnowledgeGraphErrorCode;
|
||||
import com.datamate.knowledgegraph.infrastructure.neo4j.KnowledgeGraphProperties;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import jakarta.servlet.http.HttpServletRequest;
|
||||
import jakarta.servlet.http.HttpServletResponse;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.http.MediaType;
|
||||
import org.springframework.stereotype.Component;
|
||||
import org.springframework.util.StringUtils;
|
||||
import org.springframework.web.servlet.HandlerInterceptor;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* 内部服务调用 Token 校验拦截器。
|
||||
* <p>
|
||||
* 验证 {@code X-Internal-Token} 请求头,保护 sync 端点仅供内部服务/定时任务调用。
|
||||
* <p>
|
||||
* <strong>安全策略(fail-closed)</strong>:
|
||||
* <ul>
|
||||
* <li>Token 未配置且 {@code skip-token-check=false}(默认)时,直接拒绝请求</li>
|
||||
* <li>仅当 dev/test 环境显式设置 {@code skip-token-check=true} 时,才跳过校验</li>
|
||||
* </ul>
|
||||
*/
|
||||
@Component
|
||||
@RequiredArgsConstructor
|
||||
public class InternalTokenInterceptor implements HandlerInterceptor {
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger(InternalTokenInterceptor.class);
|
||||
private static final String HEADER_INTERNAL_TOKEN = "X-Internal-Token";
|
||||
|
||||
private final KnowledgeGraphProperties properties;
|
||||
private final ObjectMapper objectMapper;
|
||||
|
||||
@Override
|
||||
public boolean preHandle(HttpServletRequest request, HttpServletResponse response, Object handler)
|
||||
throws IOException {
|
||||
KnowledgeGraphProperties.Security security = properties.getSecurity();
|
||||
String configuredToken = security.getInternalToken();
|
||||
|
||||
if (!StringUtils.hasText(configuredToken)) {
|
||||
if (security.isSkipTokenCheck()) {
|
||||
log.warn("内部调用 Token 未配置且 skip-token-check=true,跳过校验(仅限 dev/test 环境)。");
|
||||
return true;
|
||||
}
|
||||
log.error("内部调用 Token 未配置且 skip-token-check=false(fail-closed),拒绝请求。"
|
||||
+ "请设置 KG_INTERNAL_TOKEN 环境变量或在 dev/test 环境启用 skip-token-check。");
|
||||
writeErrorResponse(response);
|
||||
return false;
|
||||
}
|
||||
|
||||
String requestToken = request.getHeader(HEADER_INTERNAL_TOKEN);
|
||||
|
||||
if (!configuredToken.equals(requestToken)) {
|
||||
writeErrorResponse(response);
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
private void writeErrorResponse(HttpServletResponse response) throws IOException {
|
||||
Response<?> errorBody = Response.error(KnowledgeGraphErrorCode.UNAUTHORIZED_INTERNAL_CALL);
|
||||
response.setStatus(HttpServletResponse.SC_UNAUTHORIZED);
|
||||
response.setContentType(MediaType.APPLICATION_JSON_VALUE);
|
||||
response.setCharacterEncoding("UTF-8");
|
||||
response.getWriter().write(objectMapper.writeValueAsString(errorBody));
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,22 @@
|
||||
package com.datamate.knowledgegraph.infrastructure.security;
|
||||
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.web.servlet.config.annotation.InterceptorRegistry;
|
||||
import org.springframework.web.servlet.config.annotation.WebMvcConfigurer;
|
||||
|
||||
/**
|
||||
* 注册 {@link InternalTokenInterceptor},仅拦截 sync 端点。
|
||||
*/
|
||||
@Configuration
|
||||
@RequiredArgsConstructor
|
||||
public class InternalTokenWebMvcConfigurer implements WebMvcConfigurer {
|
||||
|
||||
private final InternalTokenInterceptor internalTokenInterceptor;
|
||||
|
||||
@Override
|
||||
public void addInterceptors(InterceptorRegistry registry) {
|
||||
registry.addInterceptor(internalTokenInterceptor)
|
||||
.addPathPatterns("/knowledge-graph/*/sync/**");
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,24 @@
|
||||
package com.datamate.knowledgegraph.interfaces.dto;
|
||||
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Builder;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* 所有路径查询结果。
|
||||
*/
|
||||
@Data
|
||||
@Builder
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class AllPathsVO {
|
||||
|
||||
/** 所有路径列表(按路径长度升序) */
|
||||
private List<PathVO> paths;
|
||||
|
||||
/** 路径总数 */
|
||||
private int pathCount;
|
||||
}
|
||||
@@ -0,0 +1,18 @@
|
||||
package com.datamate.knowledgegraph.interfaces.dto;
|
||||
|
||||
import jakarta.validation.constraints.NotEmpty;
|
||||
import jakarta.validation.constraints.Size;
|
||||
import lombok.Data;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* 批量删除请求。
|
||||
*/
|
||||
@Data
|
||||
public class BatchDeleteRequest {
|
||||
|
||||
@NotEmpty(message = "ID 列表不能为空")
|
||||
@Size(max = 100, message = "单次批量删除最多 100 条")
|
||||
private List<String> ids;
|
||||
}
|
||||
@@ -0,0 +1,31 @@
|
||||
package com.datamate.knowledgegraph.interfaces.dto;
|
||||
|
||||
import jakarta.validation.constraints.NotBlank;
|
||||
import lombok.Data;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
@Data
|
||||
public class CreateEntityRequest {
|
||||
|
||||
@NotBlank(message = "实体名称不能为空")
|
||||
private String name;
|
||||
|
||||
@NotBlank(message = "实体类型不能为空")
|
||||
private String type;
|
||||
|
||||
private String description;
|
||||
|
||||
private List<String> aliases = new ArrayList<>();
|
||||
|
||||
private Map<String, Object> properties = new HashMap<>();
|
||||
|
||||
private String sourceId;
|
||||
|
||||
private String sourceType;
|
||||
|
||||
private Double confidence;
|
||||
}
|
||||
@@ -0,0 +1,42 @@
|
||||
package com.datamate.knowledgegraph.interfaces.dto;
|
||||
|
||||
import jakarta.validation.constraints.DecimalMax;
|
||||
import jakarta.validation.constraints.DecimalMin;
|
||||
import jakarta.validation.constraints.NotBlank;
|
||||
import jakarta.validation.constraints.Pattern;
|
||||
import jakarta.validation.constraints.Size;
|
||||
import lombok.Data;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
@Data
|
||||
public class CreateRelationRequest {
|
||||
|
||||
private static final String UUID_REGEX =
|
||||
"^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$";
|
||||
|
||||
@NotBlank(message = "源实体ID不能为空")
|
||||
@Pattern(regexp = UUID_REGEX, message = "源实体ID格式无效")
|
||||
private String sourceEntityId;
|
||||
|
||||
@NotBlank(message = "目标实体ID不能为空")
|
||||
@Pattern(regexp = UUID_REGEX, message = "目标实体ID格式无效")
|
||||
private String targetEntityId;
|
||||
|
||||
@NotBlank(message = "关系类型不能为空")
|
||||
@Size(min = 1, max = 50, message = "关系类型长度必须在1-50之间")
|
||||
private String relationType;
|
||||
|
||||
private Map<String, Object> properties = new HashMap<>();
|
||||
|
||||
@DecimalMin(value = "0.0", message = "权重必须在0.0-1.0之间")
|
||||
@DecimalMax(value = "1.0", message = "权重必须在0.0-1.0之间")
|
||||
private Double weight;
|
||||
|
||||
private String sourceId;
|
||||
|
||||
@DecimalMin(value = "0.0", message = "置信度必须在0.0-1.0之间")
|
||||
@DecimalMax(value = "1.0", message = "置信度必须在0.0-1.0之间")
|
||||
private Double confidence;
|
||||
}
|
||||
@@ -0,0 +1,22 @@
|
||||
package com.datamate.knowledgegraph.interfaces.dto;
|
||||
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Builder;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
/**
|
||||
* 关系摘要,用于图遍历结果中的边表示。
|
||||
*/
|
||||
@Data
|
||||
@Builder
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class EdgeSummaryVO {
|
||||
|
||||
private String id;
|
||||
private String sourceEntityId;
|
||||
private String targetEntityId;
|
||||
private String relationType;
|
||||
private Double weight;
|
||||
}
|
||||
@@ -0,0 +1,31 @@
|
||||
package com.datamate.knowledgegraph.interfaces.dto;
|
||||
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Builder;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
|
||||
/**
|
||||
* 编辑审核记录视图对象。
|
||||
*/
|
||||
@Data
|
||||
@Builder
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class EditReviewVO {
|
||||
|
||||
private String id;
|
||||
private String graphId;
|
||||
private String operationType;
|
||||
private String entityId;
|
||||
private String relationId;
|
||||
private String payload;
|
||||
private String status;
|
||||
private String submittedBy;
|
||||
private String reviewedBy;
|
||||
private String reviewComment;
|
||||
private LocalDateTime createdAt;
|
||||
private LocalDateTime reviewedAt;
|
||||
}
|
||||
@@ -0,0 +1,21 @@
|
||||
package com.datamate.knowledgegraph.interfaces.dto;
|
||||
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Builder;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
/**
|
||||
* 实体摘要,用于图遍历结果中的节点表示。
|
||||
*/
|
||||
@Data
|
||||
@Builder
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class EntitySummaryVO {
|
||||
|
||||
private String id;
|
||||
private String name;
|
||||
private String type;
|
||||
private String description;
|
||||
}
|
||||
@@ -0,0 +1,24 @@
|
||||
package com.datamate.knowledgegraph.interfaces.dto;
|
||||
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Builder;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
/**
|
||||
* 导出用关系边,包含完整属性。
|
||||
*/
|
||||
@Data
|
||||
@Builder
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class ExportEdgeVO {
|
||||
|
||||
private String id;
|
||||
private String sourceEntityId;
|
||||
private String targetEntityId;
|
||||
private String relationType;
|
||||
private Double weight;
|
||||
private Double confidence;
|
||||
private String sourceId;
|
||||
}
|
||||
@@ -0,0 +1,24 @@
|
||||
package com.datamate.knowledgegraph.interfaces.dto;
|
||||
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Builder;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* 导出用节点,包含完整属性。
|
||||
*/
|
||||
@Data
|
||||
@Builder
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class ExportNodeVO {
|
||||
|
||||
private String id;
|
||||
private String name;
|
||||
private String type;
|
||||
private String description;
|
||||
private Map<String, Object> properties;
|
||||
}
|
||||
@@ -0,0 +1,27 @@
|
||||
package com.datamate.knowledgegraph.interfaces.dto;
|
||||
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Builder;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* 最短路径查询结果。
|
||||
*/
|
||||
@Data
|
||||
@Builder
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class PathVO {
|
||||
|
||||
/** 路径上的节点列表(按顺序) */
|
||||
private List<EntitySummaryVO> nodes;
|
||||
|
||||
/** 路径上的边列表(按顺序) */
|
||||
private List<EdgeSummaryVO> edges;
|
||||
|
||||
/** 路径长度(跳数) */
|
||||
private int pathLength;
|
||||
}
|
||||
@@ -0,0 +1,53 @@
|
||||
package com.datamate.knowledgegraph.interfaces.dto;
|
||||
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Builder;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* 关系查询结果视图对象。
|
||||
* <p>
|
||||
* 包含关系的完整信息,包括源实体和目标实体的摘要信息,
|
||||
* 用于 REST API 响应。
|
||||
*/
|
||||
@Data
|
||||
@Builder
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class RelationVO {
|
||||
|
||||
private String id;
|
||||
|
||||
private String sourceEntityId;
|
||||
|
||||
private String sourceEntityName;
|
||||
|
||||
private String sourceEntityType;
|
||||
|
||||
private String targetEntityId;
|
||||
|
||||
private String targetEntityName;
|
||||
|
||||
private String targetEntityType;
|
||||
|
||||
private String relationType;
|
||||
|
||||
@Builder.Default
|
||||
private Map<String, Object> properties = new HashMap<>();
|
||||
|
||||
private Double weight;
|
||||
|
||||
private Double confidence;
|
||||
|
||||
/** 来源数据集/知识库的 ID */
|
||||
private String sourceId;
|
||||
|
||||
private String graphId;
|
||||
|
||||
private LocalDateTime createdAt;
|
||||
}
|
||||
@@ -0,0 +1,13 @@
|
||||
package com.datamate.knowledgegraph.interfaces.dto;
|
||||
|
||||
import lombok.Data;
|
||||
|
||||
/**
|
||||
* 审核通过/拒绝请求。
|
||||
*/
|
||||
@Data
|
||||
public class ReviewActionRequest {
|
||||
|
||||
/** 审核意见(可选) */
|
||||
private String comment;
|
||||
}
|
||||
@@ -0,0 +1,24 @@
|
||||
package com.datamate.knowledgegraph.interfaces.dto;
|
||||
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Builder;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
/**
|
||||
* 全文搜索命中结果,包含相关度分数。
|
||||
*/
|
||||
@Data
|
||||
@Builder
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class SearchHitVO {
|
||||
|
||||
private String id;
|
||||
private String name;
|
||||
private String type;
|
||||
private String description;
|
||||
|
||||
/** 全文搜索相关度分数(越高越相关) */
|
||||
private double score;
|
||||
}
|
||||
@@ -0,0 +1,30 @@
|
||||
package com.datamate.knowledgegraph.interfaces.dto;
|
||||
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Builder;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* 子图导出结果。
|
||||
*/
|
||||
@Data
|
||||
@Builder
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class SubgraphExportVO {
|
||||
|
||||
/** 子图中的节点列表(包含完整属性) */
|
||||
private List<ExportNodeVO> nodes;
|
||||
|
||||
/** 子图中的边列表 */
|
||||
private List<ExportEdgeVO> edges;
|
||||
|
||||
/** 节点数量 */
|
||||
private int nodeCount;
|
||||
|
||||
/** 边数量 */
|
||||
private int edgeCount;
|
||||
}
|
||||
@@ -0,0 +1,26 @@
|
||||
package com.datamate.knowledgegraph.interfaces.dto;
|
||||
|
||||
import jakarta.validation.constraints.NotEmpty;
|
||||
import jakarta.validation.constraints.Pattern;
|
||||
import jakarta.validation.constraints.Size;
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* 子图查询请求。
|
||||
*/
|
||||
@Data
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class SubgraphRequest {
|
||||
|
||||
private static final String UUID_REGEX =
|
||||
"^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$";
|
||||
|
||||
@NotEmpty(message = "实体 ID 列表不能为空")
|
||||
@Size(max = 500, message = "实体数量超出限制(最大 500)")
|
||||
private List<@Pattern(regexp = UUID_REGEX, message = "entityId 格式无效") String> entityIds;
|
||||
}
|
||||
@@ -0,0 +1,30 @@
|
||||
package com.datamate.knowledgegraph.interfaces.dto;
|
||||
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Builder;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* 子图查询结果。
|
||||
*/
|
||||
@Data
|
||||
@Builder
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class SubgraphVO {
|
||||
|
||||
/** 子图中的节点列表 */
|
||||
private List<EntitySummaryVO> nodes;
|
||||
|
||||
/** 子图中的边列表 */
|
||||
private List<EdgeSummaryVO> edges;
|
||||
|
||||
/** 节点数量 */
|
||||
private int nodeCount;
|
||||
|
||||
/** 边数量 */
|
||||
private int edgeCount;
|
||||
}
|
||||
@@ -0,0 +1,65 @@
|
||||
package com.datamate.knowledgegraph.interfaces.dto;
|
||||
|
||||
import jakarta.validation.constraints.AssertTrue;
|
||||
import jakarta.validation.constraints.NotBlank;
|
||||
import jakarta.validation.constraints.Pattern;
|
||||
import lombok.Data;
|
||||
|
||||
/**
|
||||
* 提交编辑审核请求。
|
||||
*/
|
||||
@Data
|
||||
public class SubmitReviewRequest {
|
||||
|
||||
private static final String UUID_REGEX =
|
||||
"^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$";
|
||||
|
||||
/**
|
||||
* 操作类型:CREATE_ENTITY, UPDATE_ENTITY, DELETE_ENTITY,
|
||||
* CREATE_RELATION, UPDATE_RELATION, DELETE_RELATION,
|
||||
* BATCH_DELETE_ENTITY, BATCH_DELETE_RELATION
|
||||
*/
|
||||
@NotBlank(message = "操作类型不能为空")
|
||||
@Pattern(regexp = "^(CREATE|UPDATE|DELETE|BATCH_DELETE)_(ENTITY|RELATION)$",
|
||||
message = "操作类型无效")
|
||||
private String operationType;
|
||||
|
||||
/** 目标实体 ID(实体操作时必填) */
|
||||
private String entityId;
|
||||
|
||||
/** 目标关系 ID(关系操作时必填) */
|
||||
private String relationId;
|
||||
|
||||
/** 变更载荷(JSON 格式的请求体) */
|
||||
private String payload;
|
||||
|
||||
@AssertTrue(message = "UPDATE/DELETE 实体操作必须提供 entityId")
|
||||
private boolean isEntityIdValid() {
|
||||
if (operationType == null) return true;
|
||||
if (operationType.endsWith("_ENTITY") && !operationType.startsWith("CREATE")
|
||||
&& !operationType.startsWith("BATCH")) {
|
||||
return entityId != null && !entityId.isBlank();
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
@AssertTrue(message = "UPDATE/DELETE 关系操作必须提供 relationId")
|
||||
private boolean isRelationIdValid() {
|
||||
if (operationType == null) return true;
|
||||
if (operationType.endsWith("_RELATION") && !operationType.startsWith("CREATE")
|
||||
&& !operationType.startsWith("BATCH")) {
|
||||
return relationId != null && !relationId.isBlank();
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
@AssertTrue(message = "CREATE/UPDATE/BATCH_DELETE 操作必须提供 payload")
|
||||
private boolean isPayloadValid() {
|
||||
if (operationType == null) return true;
|
||||
if (operationType.startsWith("CREATE") || operationType.startsWith("UPDATE")
|
||||
|| operationType.startsWith("BATCH_DELETE")) {
|
||||
return payload != null && !payload.isBlank();
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,75 @@
|
||||
package com.datamate.knowledgegraph.interfaces.dto;
|
||||
|
||||
import com.datamate.knowledgegraph.domain.model.SyncMetadata;
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Builder;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* 同步元数据视图对象。
|
||||
* <p>
|
||||
* 包含本次同步的整体统计信息和各步骤的详细结果。
|
||||
*/
|
||||
@Data
|
||||
@Builder
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class SyncMetadataVO {
|
||||
|
||||
private String syncId;
|
||||
private String graphId;
|
||||
private String syncType;
|
||||
private String status;
|
||||
private LocalDateTime startedAt;
|
||||
private LocalDateTime completedAt;
|
||||
private long durationMillis;
|
||||
private int totalCreated;
|
||||
private int totalUpdated;
|
||||
private int totalSkipped;
|
||||
private int totalFailed;
|
||||
private int totalPurged;
|
||||
private int totalEntities;
|
||||
private LocalDateTime updatedFrom;
|
||||
private LocalDateTime updatedTo;
|
||||
private String errorMessage;
|
||||
private List<String> stepSummaries;
|
||||
/** 各步骤的详细结果(仅当前同步返回时携带,历史查询时为 null) */
|
||||
private List<SyncResultVO> results;
|
||||
|
||||
/**
|
||||
* 从 SyncMetadata 转换(包含详细步骤结果)。
|
||||
*/
|
||||
public static SyncMetadataVO from(SyncMetadata metadata) {
|
||||
List<SyncResultVO> resultVOs = null;
|
||||
if (metadata.getResults() != null) {
|
||||
resultVOs = metadata.getResults().stream()
|
||||
.map(SyncResultVO::from)
|
||||
.toList();
|
||||
}
|
||||
|
||||
return SyncMetadataVO.builder()
|
||||
.syncId(metadata.getSyncId())
|
||||
.graphId(metadata.getGraphId())
|
||||
.syncType(metadata.getSyncType())
|
||||
.status(metadata.getStatus())
|
||||
.startedAt(metadata.getStartedAt())
|
||||
.completedAt(metadata.getCompletedAt())
|
||||
.durationMillis(metadata.getDurationMillis())
|
||||
.totalCreated(metadata.getTotalCreated())
|
||||
.totalUpdated(metadata.getTotalUpdated())
|
||||
.totalSkipped(metadata.getTotalSkipped())
|
||||
.totalFailed(metadata.getTotalFailed())
|
||||
.totalPurged(metadata.getTotalPurged())
|
||||
.totalEntities(metadata.totalEntities())
|
||||
.updatedFrom(metadata.getUpdatedFrom())
|
||||
.updatedTo(metadata.getUpdatedTo())
|
||||
.errorMessage(metadata.getErrorMessage())
|
||||
.stepSummaries(metadata.getStepSummaries())
|
||||
.results(resultVOs)
|
||||
.build();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,56 @@
|
||||
package com.datamate.knowledgegraph.interfaces.dto;
|
||||
|
||||
import com.datamate.knowledgegraph.domain.model.SyncResult;
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Builder;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
|
||||
/**
|
||||
* 同步结果视图对象。
|
||||
* <p>
|
||||
* 不暴露内部错误详情(errors 列表),仅返回错误计数和 syncId,
|
||||
* 前端可通过 syncId 向运维查询具体日志。
|
||||
*/
|
||||
@Data
|
||||
@Builder
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class SyncResultVO {
|
||||
|
||||
private String syncId;
|
||||
private String syncType;
|
||||
private int created;
|
||||
private int updated;
|
||||
private int skipped;
|
||||
private int failed;
|
||||
private int purged;
|
||||
private int total;
|
||||
private long durationMillis;
|
||||
/** 标记为占位符的步骤(功能尚未实现) */
|
||||
private boolean placeholder;
|
||||
/** 错误数量(不暴露具体错误信息) */
|
||||
private int errorCount;
|
||||
private LocalDateTime startedAt;
|
||||
private LocalDateTime completedAt;
|
||||
|
||||
public static SyncResultVO from(SyncResult result) {
|
||||
return SyncResultVO.builder()
|
||||
.syncId(result.getSyncId())
|
||||
.syncType(result.getSyncType())
|
||||
.created(result.getCreated())
|
||||
.updated(result.getUpdated())
|
||||
.skipped(result.getSkipped())
|
||||
.failed(result.getFailed())
|
||||
.purged(result.getPurged())
|
||||
.total(result.total())
|
||||
.durationMillis(result.durationMillis())
|
||||
.placeholder(result.isPlaceholder())
|
||||
.errorCount(result.getErrors() != null ? result.getErrors().size() : 0)
|
||||
.startedAt(result.getStartedAt())
|
||||
.completedAt(result.getCompletedAt())
|
||||
.build();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,20 @@
|
||||
package com.datamate.knowledgegraph.interfaces.dto;
|
||||
|
||||
import lombok.Data;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
@Data
|
||||
public class UpdateEntityRequest {
|
||||
|
||||
private String name;
|
||||
|
||||
private String description;
|
||||
|
||||
private List<String> aliases;
|
||||
|
||||
private Map<String, Object> properties;
|
||||
|
||||
private Double confidence;
|
||||
}
|
||||
@@ -0,0 +1,30 @@
|
||||
package com.datamate.knowledgegraph.interfaces.dto;
|
||||
|
||||
import jakarta.validation.constraints.DecimalMax;
|
||||
import jakarta.validation.constraints.DecimalMin;
|
||||
import jakarta.validation.constraints.Size;
|
||||
import lombok.Data;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* 关系更新请求。
|
||||
* <p>
|
||||
* 所有字段均为可选,仅更新提供了值的字段(patch 语义)。
|
||||
*/
|
||||
@Data
|
||||
public class UpdateRelationRequest {
|
||||
|
||||
@Size(min = 1, max = 50, message = "关系类型长度必须在1-50之间")
|
||||
private String relationType;
|
||||
|
||||
private Map<String, Object> properties;
|
||||
|
||||
@DecimalMin(value = "0.0", message = "权重必须在0.0-1.0之间")
|
||||
@DecimalMax(value = "1.0", message = "权重必须在0.0-1.0之间")
|
||||
private Double weight;
|
||||
|
||||
@DecimalMin(value = "0.0", message = "置信度必须在0.0-1.0之间")
|
||||
@DecimalMax(value = "1.0", message = "置信度必须在0.0-1.0之间")
|
||||
private Double confidence;
|
||||
}
|
||||
@@ -0,0 +1,71 @@
|
||||
package com.datamate.knowledgegraph.interfaces.rest;
|
||||
|
||||
import com.datamate.common.interfaces.PagedResponse;
|
||||
import com.datamate.knowledgegraph.application.EditReviewService;
|
||||
import com.datamate.knowledgegraph.interfaces.dto.EditReviewVO;
|
||||
import com.datamate.knowledgegraph.interfaces.dto.ReviewActionRequest;
|
||||
import com.datamate.knowledgegraph.interfaces.dto.SubmitReviewRequest;
|
||||
import jakarta.validation.Valid;
|
||||
import jakarta.validation.constraints.Pattern;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import org.springframework.http.HttpStatus;
|
||||
import org.springframework.validation.annotation.Validated;
|
||||
import org.springframework.web.bind.annotation.*;
|
||||
|
||||
@RestController
|
||||
@RequestMapping("/knowledge-graph/{graphId}/review")
|
||||
@RequiredArgsConstructor
|
||||
@Validated
|
||||
public class EditReviewController {
|
||||
|
||||
private static final String UUID_REGEX =
|
||||
"^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$";
|
||||
|
||||
private final EditReviewService reviewService;
|
||||
|
||||
@PostMapping("/submit")
|
||||
@ResponseStatus(HttpStatus.CREATED)
|
||||
public EditReviewVO submitReview(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId,
|
||||
@Valid @RequestBody SubmitReviewRequest request,
|
||||
@RequestHeader(value = "X-User-Id", defaultValue = "anonymous") String userId) {
|
||||
return reviewService.submitReview(graphId, request, userId);
|
||||
}
|
||||
|
||||
@PostMapping("/{reviewId}/approve")
|
||||
public EditReviewVO approveReview(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId,
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "reviewId 格式无效") String reviewId,
|
||||
@RequestBody(required = false) ReviewActionRequest request,
|
||||
@RequestHeader(value = "X-User-Id", defaultValue = "anonymous") String userId) {
|
||||
String comment = (request != null) ? request.getComment() : null;
|
||||
return reviewService.approveReview(graphId, reviewId, userId, comment);
|
||||
}
|
||||
|
||||
@PostMapping("/{reviewId}/reject")
|
||||
public EditReviewVO rejectReview(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId,
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "reviewId 格式无效") String reviewId,
|
||||
@RequestBody(required = false) ReviewActionRequest request,
|
||||
@RequestHeader(value = "X-User-Id", defaultValue = "anonymous") String userId) {
|
||||
String comment = (request != null) ? request.getComment() : null;
|
||||
return reviewService.rejectReview(graphId, reviewId, userId, comment);
|
||||
}
|
||||
|
||||
@GetMapping("/pending")
|
||||
public PagedResponse<EditReviewVO> listPendingReviews(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId,
|
||||
@RequestParam(defaultValue = "0") int page,
|
||||
@RequestParam(defaultValue = "20") int size) {
|
||||
return reviewService.listPendingReviews(graphId, page, size);
|
||||
}
|
||||
|
||||
@GetMapping
|
||||
public PagedResponse<EditReviewVO> listReviews(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId,
|
||||
@RequestParam(required = false) String status,
|
||||
@RequestParam(defaultValue = "0") int page,
|
||||
@RequestParam(defaultValue = "20") int size) {
|
||||
return reviewService.listReviews(graphId, status, page, size);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,123 @@
|
||||
package com.datamate.knowledgegraph.interfaces.rest;
|
||||
|
||||
import com.datamate.common.interfaces.PagedResponse;
|
||||
import com.datamate.knowledgegraph.application.GraphEntityService;
|
||||
import com.datamate.knowledgegraph.application.GraphRelationService;
|
||||
import com.datamate.knowledgegraph.domain.model.GraphEntity;
|
||||
import com.datamate.knowledgegraph.interfaces.dto.CreateEntityRequest;
|
||||
import com.datamate.knowledgegraph.interfaces.dto.RelationVO;
|
||||
import com.datamate.knowledgegraph.interfaces.dto.UpdateEntityRequest;
|
||||
import jakarta.validation.Valid;
|
||||
import jakarta.validation.constraints.Pattern;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import org.springframework.http.HttpStatus;
|
||||
import org.springframework.validation.annotation.Validated;
|
||||
import org.springframework.web.bind.annotation.*;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
@RestController
|
||||
@RequestMapping("/knowledge-graph/{graphId}/entities")
|
||||
@RequiredArgsConstructor
|
||||
@Validated
|
||||
public class GraphEntityController {
|
||||
|
||||
private static final String UUID_REGEX =
|
||||
"^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$";
|
||||
|
||||
private final GraphEntityService entityService;
|
||||
private final GraphRelationService relationService;
|
||||
|
||||
@PostMapping
|
||||
@ResponseStatus(HttpStatus.CREATED)
|
||||
public GraphEntity createEntity(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId,
|
||||
@Valid @RequestBody CreateEntityRequest request) {
|
||||
return entityService.createEntity(graphId, request);
|
||||
}
|
||||
|
||||
@GetMapping("/{entityId}")
|
||||
public GraphEntity getEntity(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId,
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "entityId 格式无效") String entityId) {
|
||||
return entityService.getEntity(graphId, entityId);
|
||||
}
|
||||
|
||||
/**
|
||||
* 查询实体列表(非分页,向后兼容)。
|
||||
* <p>
|
||||
* 当请求不包含 {@code page} 参数时匹配此端点,返回 {@code List}。
|
||||
* 需要分页时请传入 {@code page} 参数,将路由到分页端点。
|
||||
*/
|
||||
@GetMapping(params = "!page")
|
||||
public List<GraphEntity> listEntities(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId,
|
||||
@RequestParam(required = false) String type,
|
||||
@RequestParam(required = false) String keyword) {
|
||||
if (keyword != null && !keyword.isBlank()) {
|
||||
return entityService.searchEntities(graphId, keyword);
|
||||
}
|
||||
if (type != null && !type.isBlank()) {
|
||||
return entityService.listEntitiesByType(graphId, type);
|
||||
}
|
||||
return entityService.listEntities(graphId);
|
||||
}
|
||||
|
||||
/**
|
||||
* 查询实体列表(分页)。
|
||||
* <p>
|
||||
* 当请求包含 {@code page} 参数时匹配此端点,返回 {@code PagedResponse}。
|
||||
*/
|
||||
@GetMapping(params = "page")
|
||||
public PagedResponse<GraphEntity> listEntitiesPaged(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId,
|
||||
@RequestParam(required = false) String type,
|
||||
@RequestParam(required = false) String keyword,
|
||||
@RequestParam(defaultValue = "0") int page,
|
||||
@RequestParam(defaultValue = "20") int size) {
|
||||
if (keyword != null && !keyword.isBlank()) {
|
||||
return entityService.searchEntitiesPaged(graphId, keyword, page, size);
|
||||
}
|
||||
if (type != null && !type.isBlank()) {
|
||||
return entityService.listEntitiesByTypePaged(graphId, type, page, size);
|
||||
}
|
||||
return entityService.listEntitiesPaged(graphId, page, size);
|
||||
}
|
||||
|
||||
@PutMapping("/{entityId}")
|
||||
public GraphEntity updateEntity(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId,
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "entityId 格式无效") String entityId,
|
||||
@Valid @RequestBody UpdateEntityRequest request) {
|
||||
return entityService.updateEntity(graphId, entityId, request);
|
||||
}
|
||||
|
||||
@DeleteMapping("/{entityId}")
|
||||
@ResponseStatus(HttpStatus.NO_CONTENT)
|
||||
public void deleteEntity(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId,
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "entityId 格式无效") String entityId) {
|
||||
entityService.deleteEntity(graphId, entityId);
|
||||
}
|
||||
|
||||
@GetMapping("/{entityId}/relations")
|
||||
public PagedResponse<RelationVO> listEntityRelations(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId,
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "entityId 格式无效") String entityId,
|
||||
@RequestParam(defaultValue = "all") @Pattern(regexp = "^(all|in|out)$", message = "direction 参数无效,允许值:all, in, out") String direction,
|
||||
@RequestParam(required = false) String type,
|
||||
@RequestParam(defaultValue = "0") int page,
|
||||
@RequestParam(defaultValue = "20") int size) {
|
||||
return relationService.listEntityRelations(graphId, entityId, direction, type, page, size);
|
||||
}
|
||||
|
||||
@GetMapping("/{entityId}/neighbors")
|
||||
public List<GraphEntity> getNeighbors(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId,
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "entityId 格式无效") String entityId,
|
||||
@RequestParam(defaultValue = "2") int depth,
|
||||
@RequestParam(defaultValue = "50") int limit) {
|
||||
return entityService.getNeighbors(graphId, entityId, depth, limit);
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,126 @@
|
||||
package com.datamate.knowledgegraph.interfaces.rest;
|
||||
|
||||
import com.datamate.common.interfaces.PagedResponse;
|
||||
import com.datamate.knowledgegraph.application.GraphQueryService;
|
||||
import com.datamate.knowledgegraph.interfaces.dto.*;
|
||||
import jakarta.validation.Valid;
|
||||
import jakarta.validation.constraints.Pattern;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import org.springframework.http.MediaType;
|
||||
import org.springframework.http.ResponseEntity;
|
||||
import org.springframework.validation.annotation.Validated;
|
||||
import org.springframework.web.bind.annotation.*;
|
||||
|
||||
/**
|
||||
* 知识图谱查询接口。
|
||||
* <p>
|
||||
* 提供图遍历(邻居、最短路径、所有路径、子图、子图导出)和全文搜索功能。
|
||||
*/
|
||||
@RestController
|
||||
@RequestMapping("/knowledge-graph/{graphId}/query")
|
||||
@RequiredArgsConstructor
|
||||
@Validated
|
||||
public class GraphQueryController {
|
||||
|
||||
private static final String UUID_REGEX =
|
||||
"^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$";
|
||||
|
||||
private final GraphQueryService queryService;
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// 图遍历
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* 查询实体的 N 跳邻居子图。
|
||||
*/
|
||||
@GetMapping("/neighbors/{entityId}")
|
||||
public SubgraphVO getNeighborGraph(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId,
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "entityId 格式无效") String entityId,
|
||||
@RequestParam(defaultValue = "2") int depth,
|
||||
@RequestParam(defaultValue = "50") int limit) {
|
||||
return queryService.getNeighborGraph(graphId, entityId, depth, limit);
|
||||
}
|
||||
|
||||
/**
|
||||
* 查询两个实体之间的最短路径。
|
||||
*/
|
||||
@GetMapping("/shortest-path")
|
||||
public PathVO getShortestPath(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId,
|
||||
@RequestParam @Pattern(regexp = UUID_REGEX, message = "sourceId 格式无效") String sourceId,
|
||||
@RequestParam @Pattern(regexp = UUID_REGEX, message = "targetId 格式无效") String targetId,
|
||||
@RequestParam(defaultValue = "3") int maxDepth) {
|
||||
return queryService.getShortestPath(graphId, sourceId, targetId, maxDepth);
|
||||
}
|
||||
|
||||
/**
|
||||
* 查询两个实体之间的所有路径。
|
||||
* <p>
|
||||
* 返回按路径长度升序排列的所有路径,支持最大深度和最大路径数限制。
|
||||
*/
|
||||
@GetMapping("/all-paths")
|
||||
public AllPathsVO findAllPaths(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId,
|
||||
@RequestParam @Pattern(regexp = UUID_REGEX, message = "sourceId 格式无效") String sourceId,
|
||||
@RequestParam @Pattern(regexp = UUID_REGEX, message = "targetId 格式无效") String targetId,
|
||||
@RequestParam(defaultValue = "3") int maxDepth,
|
||||
@RequestParam(defaultValue = "10") int maxPaths) {
|
||||
return queryService.findAllPaths(graphId, sourceId, targetId, maxDepth, maxPaths);
|
||||
}
|
||||
|
||||
/**
|
||||
* 提取指定实体集合的子图(关系网络)。
|
||||
*/
|
||||
@PostMapping("/subgraph")
|
||||
public SubgraphVO getSubgraph(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId,
|
||||
@Valid @RequestBody SubgraphRequest request) {
|
||||
return queryService.getSubgraph(graphId, request.getEntityIds());
|
||||
}
|
||||
|
||||
/**
|
||||
* 导出指定实体集合的子图。
|
||||
* <p>
|
||||
* 支持深度扩展和多种输出格式(JSON、GraphML)。
|
||||
*
|
||||
* @param format 输出格式:json(默认)或 graphml
|
||||
* @param depth 扩展深度(0=仅指定实体,1=含 1 跳邻居)
|
||||
*/
|
||||
@PostMapping("/subgraph/export")
|
||||
public ResponseEntity<?> exportSubgraph(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId,
|
||||
@Valid @RequestBody SubgraphRequest request,
|
||||
@RequestParam(defaultValue = "json") String format,
|
||||
@RequestParam(defaultValue = "0") int depth) {
|
||||
SubgraphExportVO exportVO = queryService.exportSubgraph(graphId, request.getEntityIds(), depth);
|
||||
|
||||
if ("graphml".equalsIgnoreCase(format)) {
|
||||
String graphml = queryService.convertToGraphML(exportVO);
|
||||
return ResponseEntity.ok()
|
||||
.contentType(MediaType.APPLICATION_XML)
|
||||
.body(graphml);
|
||||
}
|
||||
|
||||
return ResponseEntity.ok(exportVO);
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// 全文搜索
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* 基于全文索引搜索实体。
|
||||
* <p>
|
||||
* 搜索 name 和 description 字段,按相关度排序。
|
||||
*/
|
||||
@GetMapping("/search")
|
||||
public PagedResponse<SearchHitVO> fulltextSearch(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId,
|
||||
@RequestParam String q,
|
||||
@RequestParam(defaultValue = "0") int page,
|
||||
@RequestParam(defaultValue = "20") int size) {
|
||||
return queryService.fulltextSearch(graphId, q, page, size);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,66 @@
|
||||
package com.datamate.knowledgegraph.interfaces.rest;
|
||||
|
||||
import com.datamate.common.interfaces.PagedResponse;
|
||||
import com.datamate.knowledgegraph.application.GraphRelationService;
|
||||
import com.datamate.knowledgegraph.interfaces.dto.CreateRelationRequest;
|
||||
import com.datamate.knowledgegraph.interfaces.dto.RelationVO;
|
||||
import com.datamate.knowledgegraph.interfaces.dto.UpdateRelationRequest;
|
||||
import jakarta.validation.Valid;
|
||||
import jakarta.validation.constraints.Pattern;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import org.springframework.http.HttpStatus;
|
||||
import org.springframework.validation.annotation.Validated;
|
||||
import org.springframework.web.bind.annotation.*;
|
||||
|
||||
@RestController
|
||||
@RequestMapping("/knowledge-graph/{graphId}/relations")
|
||||
@RequiredArgsConstructor
|
||||
@Validated
|
||||
public class GraphRelationController {
|
||||
|
||||
private static final String UUID_REGEX =
|
||||
"^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$";
|
||||
|
||||
private final GraphRelationService relationService;
|
||||
|
||||
@PostMapping
|
||||
@ResponseStatus(HttpStatus.CREATED)
|
||||
public RelationVO createRelation(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId,
|
||||
@Valid @RequestBody CreateRelationRequest request) {
|
||||
return relationService.createRelation(graphId, request);
|
||||
}
|
||||
|
||||
@GetMapping
|
||||
public PagedResponse<RelationVO> listRelations(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId,
|
||||
@RequestParam(required = false) String type,
|
||||
@RequestParam(defaultValue = "0") int page,
|
||||
@RequestParam(defaultValue = "20") int size) {
|
||||
return relationService.listRelations(graphId, type, page, size);
|
||||
}
|
||||
|
||||
@GetMapping("/{relationId}")
|
||||
public RelationVO getRelation(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId,
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "relationId 格式无效") String relationId) {
|
||||
return relationService.getRelation(graphId, relationId);
|
||||
}
|
||||
|
||||
@PutMapping("/{relationId}")
|
||||
public RelationVO updateRelation(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId,
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "relationId 格式无效") String relationId,
|
||||
@Valid @RequestBody UpdateRelationRequest request) {
|
||||
return relationService.updateRelation(graphId, relationId, request);
|
||||
}
|
||||
|
||||
@DeleteMapping("/{relationId}")
|
||||
@ResponseStatus(HttpStatus.NO_CONTENT)
|
||||
public void deleteRelation(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId,
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "relationId 格式无效") String relationId) {
|
||||
relationService.deleteRelation(graphId, relationId);
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,282 @@
|
||||
package com.datamate.knowledgegraph.interfaces.rest;
|
||||
|
||||
import com.datamate.knowledgegraph.application.GraphSyncService;
|
||||
import com.datamate.knowledgegraph.domain.model.SyncMetadata;
|
||||
import com.datamate.knowledgegraph.domain.model.SyncResult;
|
||||
import com.datamate.knowledgegraph.interfaces.dto.SyncMetadataVO;
|
||||
import com.datamate.knowledgegraph.interfaces.dto.SyncResultVO;
|
||||
import jakarta.validation.constraints.Max;
|
||||
import jakarta.validation.constraints.Min;
|
||||
import jakarta.validation.constraints.Pattern;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import org.springframework.format.annotation.DateTimeFormat;
|
||||
import org.springframework.http.ResponseEntity;
|
||||
import org.springframework.validation.annotation.Validated;
|
||||
import org.springframework.web.bind.annotation.*;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* 知识图谱数据同步 API。
|
||||
* <p>
|
||||
* 提供手动触发 MySQL → Neo4j 同步的 REST 端点。
|
||||
* 生产环境中也可通过定时任务自动触发。
|
||||
* <p>
|
||||
* <b>安全架构</b>:
|
||||
* <ul>
|
||||
* <li>外部请求 → API Gateway (JWT 校验) → X-User-* headers → 后端服务</li>
|
||||
* <li>内部调用 → X-Internal-Token header → {@code InternalTokenInterceptor} 校验 → sync 端点</li>
|
||||
* </ul>
|
||||
* Token 校验由 {@code InternalTokenInterceptor} 拦截器统一实现,
|
||||
* 对 {@code /knowledge-graph/{graphId}/sync/} 路径前缀自动生效。
|
||||
*/
|
||||
@RestController
|
||||
@RequestMapping("/knowledge-graph/{graphId}/sync")
|
||||
@RequiredArgsConstructor
|
||||
@Validated
|
||||
public class GraphSyncController {
|
||||
|
||||
private static final String UUID_REGEX =
|
||||
"^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$";
|
||||
|
||||
private final GraphSyncService syncService;
|
||||
|
||||
/**
|
||||
* 全量同步:拉取所有实体并构建关系。
|
||||
*/
|
||||
@PostMapping("/full")
|
||||
public SyncMetadataVO syncAll(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId) {
|
||||
SyncMetadata metadata = syncService.syncAll(graphId);
|
||||
return SyncMetadataVO.from(metadata);
|
||||
}
|
||||
|
||||
/**
|
||||
* 增量同步:仅拉取指定时间窗口内变更的数据并同步。
|
||||
*/
|
||||
@PostMapping("/incremental")
|
||||
public SyncMetadataVO syncIncremental(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId,
|
||||
@RequestParam @DateTimeFormat(iso = DateTimeFormat.ISO.DATE_TIME) LocalDateTime updatedFrom,
|
||||
@RequestParam @DateTimeFormat(iso = DateTimeFormat.ISO.DATE_TIME) LocalDateTime updatedTo) {
|
||||
SyncMetadata metadata = syncService.syncIncremental(graphId, updatedFrom, updatedTo);
|
||||
return SyncMetadataVO.from(metadata);
|
||||
}
|
||||
|
||||
/**
|
||||
* 同步数据集实体。
|
||||
*/
|
||||
@PostMapping("/datasets")
|
||||
public SyncResultVO syncDatasets(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId) {
|
||||
return SyncResultVO.from(syncService.syncDatasets(graphId));
|
||||
}
|
||||
|
||||
/**
|
||||
* 同步字段实体。
|
||||
*/
|
||||
@PostMapping("/fields")
|
||||
public SyncResultVO syncFields(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId) {
|
||||
return SyncResultVO.from(syncService.syncFields(graphId));
|
||||
}
|
||||
|
||||
/**
|
||||
* 同步用户实体。
|
||||
*/
|
||||
@PostMapping("/users")
|
||||
public SyncResultVO syncUsers(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId) {
|
||||
return SyncResultVO.from(syncService.syncUsers(graphId));
|
||||
}
|
||||
|
||||
/**
|
||||
* 同步组织实体。
|
||||
*/
|
||||
@PostMapping("/orgs")
|
||||
public SyncResultVO syncOrgs(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId) {
|
||||
return SyncResultVO.from(syncService.syncOrgs(graphId));
|
||||
}
|
||||
|
||||
/**
|
||||
* 构建 HAS_FIELD 关系。
|
||||
*/
|
||||
@PostMapping("/relations/has-field")
|
||||
public SyncResultVO buildHasFieldRelations(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId) {
|
||||
return SyncResultVO.from(syncService.buildHasFieldRelations(graphId));
|
||||
}
|
||||
|
||||
/**
|
||||
* 构建 DERIVED_FROM 关系。
|
||||
*/
|
||||
@PostMapping("/relations/derived-from")
|
||||
public SyncResultVO buildDerivedFromRelations(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId) {
|
||||
return SyncResultVO.from(syncService.buildDerivedFromRelations(graphId));
|
||||
}
|
||||
|
||||
/**
|
||||
* 构建 BELONGS_TO 关系。
|
||||
*/
|
||||
@PostMapping("/relations/belongs-to")
|
||||
public SyncResultVO buildBelongsToRelations(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId) {
|
||||
return SyncResultVO.from(syncService.buildBelongsToRelations(graphId));
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// 新增实体同步端点
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* 同步工作流实体。
|
||||
*/
|
||||
@PostMapping("/workflows")
|
||||
public SyncResultVO syncWorkflows(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId) {
|
||||
return SyncResultVO.from(syncService.syncWorkflows(graphId));
|
||||
}
|
||||
|
||||
/**
|
||||
* 同步作业实体。
|
||||
*/
|
||||
@PostMapping("/jobs")
|
||||
public SyncResultVO syncJobs(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId) {
|
||||
return SyncResultVO.from(syncService.syncJobs(graphId));
|
||||
}
|
||||
|
||||
/**
|
||||
* 同步标注任务实体。
|
||||
*/
|
||||
@PostMapping("/label-tasks")
|
||||
public SyncResultVO syncLabelTasks(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId) {
|
||||
return SyncResultVO.from(syncService.syncLabelTasks(graphId));
|
||||
}
|
||||
|
||||
/**
|
||||
* 同步知识集实体。
|
||||
*/
|
||||
@PostMapping("/knowledge-sets")
|
||||
public SyncResultVO syncKnowledgeSets(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId) {
|
||||
return SyncResultVO.from(syncService.syncKnowledgeSets(graphId));
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// 新增关系构建端点
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* 构建 USES_DATASET 关系。
|
||||
*/
|
||||
@PostMapping("/relations/uses-dataset")
|
||||
public SyncResultVO buildUsesDatasetRelations(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId) {
|
||||
return SyncResultVO.from(syncService.buildUsesDatasetRelations(graphId));
|
||||
}
|
||||
|
||||
/**
|
||||
* 构建 PRODUCES 关系。
|
||||
*/
|
||||
@PostMapping("/relations/produces")
|
||||
public SyncResultVO buildProducesRelations(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId) {
|
||||
return SyncResultVO.from(syncService.buildProducesRelations(graphId));
|
||||
}
|
||||
|
||||
/**
|
||||
* 构建 ASSIGNED_TO 关系。
|
||||
*/
|
||||
@PostMapping("/relations/assigned-to")
|
||||
public SyncResultVO buildAssignedToRelations(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId) {
|
||||
return SyncResultVO.from(syncService.buildAssignedToRelations(graphId));
|
||||
}
|
||||
|
||||
/**
|
||||
* 构建 TRIGGERS 关系。
|
||||
*/
|
||||
@PostMapping("/relations/triggers")
|
||||
public SyncResultVO buildTriggersRelations(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId) {
|
||||
return SyncResultVO.from(syncService.buildTriggersRelations(graphId));
|
||||
}
|
||||
|
||||
/**
|
||||
* 构建 DEPENDS_ON 关系。
|
||||
*/
|
||||
@PostMapping("/relations/depends-on")
|
||||
public SyncResultVO buildDependsOnRelations(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId) {
|
||||
return SyncResultVO.from(syncService.buildDependsOnRelations(graphId));
|
||||
}
|
||||
|
||||
/**
|
||||
* 构建 IMPACTS 关系。
|
||||
*/
|
||||
@PostMapping("/relations/impacts")
|
||||
public SyncResultVO buildImpactsRelations(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId) {
|
||||
return SyncResultVO.from(syncService.buildImpactsRelations(graphId));
|
||||
}
|
||||
|
||||
/**
|
||||
* 构建 SOURCED_FROM 关系。
|
||||
*/
|
||||
@PostMapping("/relations/sourced-from")
|
||||
public SyncResultVO buildSourcedFromRelations(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId) {
|
||||
return SyncResultVO.from(syncService.buildSourcedFromRelations(graphId));
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// 同步历史查询端点
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* 查询同步历史记录。
|
||||
*
|
||||
* @param status 可选,按状态过滤(SUCCESS / FAILED / PARTIAL)
|
||||
* @param limit 返回条数上限,默认 20
|
||||
*/
|
||||
@GetMapping("/history")
|
||||
public List<SyncMetadataVO> getSyncHistory(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId,
|
||||
@RequestParam(required = false) String status,
|
||||
@RequestParam(defaultValue = "20") @Min(1) @Max(200) int limit) {
|
||||
List<SyncMetadata> history = syncService.getSyncHistory(graphId, status, limit);
|
||||
return history.stream().map(SyncMetadataVO::from).toList();
|
||||
}
|
||||
|
||||
/**
|
||||
* 按时间范围查询同步历史。
|
||||
*/
|
||||
@GetMapping("/history/range")
|
||||
public List<SyncMetadataVO> getSyncHistoryByTimeRange(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId,
|
||||
@RequestParam @DateTimeFormat(iso = DateTimeFormat.ISO.DATE_TIME) LocalDateTime from,
|
||||
@RequestParam @DateTimeFormat(iso = DateTimeFormat.ISO.DATE_TIME) LocalDateTime to,
|
||||
@RequestParam(defaultValue = "0") @Min(0) @Max(10000) int page,
|
||||
@RequestParam(defaultValue = "20") @Min(1) @Max(200) int size) {
|
||||
List<SyncMetadata> history = syncService.getSyncHistoryByTimeRange(graphId, from, to, page, size);
|
||||
return history.stream().map(SyncMetadataVO::from).toList();
|
||||
}
|
||||
|
||||
/**
|
||||
* 根据 syncId 查询单条同步记录。
|
||||
*/
|
||||
@GetMapping("/history/{syncId}")
|
||||
public ResponseEntity<SyncMetadataVO> getSyncRecord(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId,
|
||||
@PathVariable String syncId) {
|
||||
return syncService.getSyncRecord(graphId, syncId)
|
||||
.map(SyncMetadataVO::from)
|
||||
.map(ResponseEntity::ok)
|
||||
.orElse(ResponseEntity.notFound().build());
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,76 @@
|
||||
# 知识图谱服务 - Neo4j连接配置
|
||||
# 该配置在 main-application 的 spring.config.import 中引入
|
||||
# 注意:生产环境务必通过环境变量 NEO4J_PASSWORD 设置密码,不要使用默认值
|
||||
|
||||
spring:
|
||||
data:
|
||||
redis:
|
||||
host: ${REDIS_HOST:datamate-redis}
|
||||
port: ${REDIS_PORT:6379}
|
||||
password: ${REDIS_PASSWORD:}
|
||||
timeout: ${REDIS_TIMEOUT:3000}
|
||||
|
||||
neo4j:
|
||||
uri: ${NEO4J_URI:bolt://datamate-neo4j:7687}
|
||||
authentication:
|
||||
username: ${NEO4J_USERNAME:neo4j}
|
||||
password: ${NEO4J_PASSWORD:datamate123}
|
||||
pool:
|
||||
max-connection-pool-size: ${NEO4J_POOL_MAX_SIZE:50}
|
||||
connection-acquisition-timeout: 30s
|
||||
max-connection-lifetime: 1h
|
||||
log-leaked-sessions: true
|
||||
|
||||
# 知识图谱服务配置
|
||||
datamate:
|
||||
knowledge-graph:
|
||||
# 默认查询跳数限制
|
||||
max-depth: ${KG_MAX_DEPTH:3}
|
||||
# 子图返回最大节点数
|
||||
max-nodes-per-query: ${KG_MAX_NODES:500}
|
||||
# 批量导入批次大小
|
||||
import-batch-size: ${KG_IMPORT_BATCH_SIZE:100}
|
||||
# 安全配置
|
||||
security:
|
||||
# 内部服务调用 Token(用于 sync 端点的 X-Internal-Token 校验)
|
||||
# 生产环境务必通过 KG_INTERNAL_TOKEN 环境变量设置,否则 sync 端点将拒绝所有请求(fail-closed)
|
||||
internal-token: ${KG_INTERNAL_TOKEN:}
|
||||
# 是否跳过 Token 校验(默认 false = fail-closed)
|
||||
# 仅在 dev/test 环境显式设置为 true 以跳过校验
|
||||
skip-token-check: ${KG_SKIP_TOKEN_CHECK:false}
|
||||
# Schema 迁移配置
|
||||
migration:
|
||||
# 是否启用 Schema 版本化迁移
|
||||
enabled: ${KG_MIGRATION_ENABLED:true}
|
||||
# 是否校验已应用迁移的 checksum(防止迁移被篡改)
|
||||
validate-checksums: ${KG_MIGRATION_VALIDATE_CHECKSUMS:true}
|
||||
# MySQL → Neo4j 同步配置
|
||||
sync:
|
||||
# 数据管理服务地址
|
||||
data-management-url: ${DATA_MANAGEMENT_URL:http://localhost:8080/api}
|
||||
# 标注服务地址
|
||||
annotation-service-url: ${ANNOTATION_SERVICE_URL:http://localhost:8080/api}
|
||||
# 每页拉取数量
|
||||
page-size: ${KG_SYNC_PAGE_SIZE:200}
|
||||
# HTTP 连接超时(毫秒)
|
||||
connect-timeout: ${KG_SYNC_CONNECT_TIMEOUT:5000}
|
||||
# HTTP 读取超时(毫秒)
|
||||
read-timeout: ${KG_SYNC_READ_TIMEOUT:30000}
|
||||
# 失败时最大重试次数
|
||||
max-retries: ${KG_SYNC_MAX_RETRIES:3}
|
||||
# 重试间隔(毫秒)
|
||||
retry-interval: ${KG_SYNC_RETRY_INTERVAL:1000}
|
||||
# 是否在启动时自动初始化 Schema
|
||||
auto-init-schema: ${KG_AUTO_INIT_SCHEMA:true}
|
||||
# 是否允许空快照触发 purge(默认 false,防止上游返回空列表时误删全部同步实体)
|
||||
allow-purge-on-empty-snapshot: ${KG_ALLOW_PURGE_ON_EMPTY_SNAPSHOT:false}
|
||||
# 缓存配置
|
||||
cache:
|
||||
# 是否启用 Redis 缓存
|
||||
enabled: ${KG_CACHE_ENABLED:true}
|
||||
# 实体缓存 TTL(秒)
|
||||
entity-ttl-seconds: ${KG_CACHE_ENTITY_TTL:3600}
|
||||
# 查询结果缓存 TTL(秒)
|
||||
query-ttl-seconds: ${KG_CACHE_QUERY_TTL:300}
|
||||
# 全文搜索缓存 TTL(秒)
|
||||
search-ttl-seconds: ${KG_CACHE_SEARCH_TTL:180}
|
||||
@@ -0,0 +1,361 @@
|
||||
package com.datamate.knowledgegraph.application;
|
||||
|
||||
import com.datamate.common.infrastructure.exception.BusinessException;
|
||||
import com.datamate.knowledgegraph.domain.model.EditReview;
|
||||
import com.datamate.knowledgegraph.domain.repository.EditReviewRepository;
|
||||
import com.datamate.knowledgegraph.interfaces.dto.EditReviewVO;
|
||||
import com.datamate.knowledgegraph.interfaces.dto.SubmitReviewRequest;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.mockito.InjectMocks;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.assertj.core.api.Assertions.assertThatThrownBy;
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
import static org.mockito.ArgumentMatchers.eq;
|
||||
import static org.mockito.Mockito.*;
|
||||
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
class EditReviewServiceTest {
|
||||
|
||||
private static final String GRAPH_ID = "550e8400-e29b-41d4-a716-446655440000";
|
||||
private static final String REVIEW_ID = "660e8400-e29b-41d4-a716-446655440001";
|
||||
private static final String ENTITY_ID = "770e8400-e29b-41d4-a716-446655440002";
|
||||
private static final String USER_ID = "user-1";
|
||||
private static final String REVIEWER_ID = "reviewer-1";
|
||||
private static final String INVALID_GRAPH_ID = "not-a-uuid";
|
||||
|
||||
@Mock
|
||||
private EditReviewRepository reviewRepository;
|
||||
|
||||
@Mock
|
||||
private GraphEntityService entityService;
|
||||
|
||||
@Mock
|
||||
private GraphRelationService relationService;
|
||||
|
||||
@InjectMocks
|
||||
private EditReviewService reviewService;
|
||||
|
||||
private EditReview pendingReview;
|
||||
|
||||
@BeforeEach
|
||||
void setUp() {
|
||||
pendingReview = EditReview.builder()
|
||||
.id(REVIEW_ID)
|
||||
.graphId(GRAPH_ID)
|
||||
.operationType("CREATE_ENTITY")
|
||||
.payload("{\"name\":\"TestEntity\",\"type\":\"Dataset\"}")
|
||||
.status("PENDING")
|
||||
.submittedBy(USER_ID)
|
||||
.createdAt(LocalDateTime.now())
|
||||
.build();
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// graphId 校验
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Test
|
||||
void submitReview_invalidGraphId_throwsBusinessException() {
|
||||
SubmitReviewRequest request = new SubmitReviewRequest();
|
||||
request.setOperationType("CREATE_ENTITY");
|
||||
request.setPayload("{}");
|
||||
|
||||
assertThatThrownBy(() -> reviewService.submitReview(INVALID_GRAPH_ID, request, USER_ID))
|
||||
.isInstanceOf(BusinessException.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
void approveReview_invalidGraphId_throwsBusinessException() {
|
||||
assertThatThrownBy(() -> reviewService.approveReview(INVALID_GRAPH_ID, REVIEW_ID, REVIEWER_ID, null))
|
||||
.isInstanceOf(BusinessException.class);
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// submitReview
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Test
|
||||
void submitReview_success() {
|
||||
SubmitReviewRequest request = new SubmitReviewRequest();
|
||||
request.setOperationType("CREATE_ENTITY");
|
||||
request.setPayload("{\"name\":\"NewEntity\",\"type\":\"Dataset\"}");
|
||||
|
||||
when(reviewRepository.save(any(EditReview.class))).thenReturn(pendingReview);
|
||||
|
||||
EditReviewVO result = reviewService.submitReview(GRAPH_ID, request, USER_ID);
|
||||
|
||||
assertThat(result).isNotNull();
|
||||
assertThat(result.getStatus()).isEqualTo("PENDING");
|
||||
assertThat(result.getOperationType()).isEqualTo("CREATE_ENTITY");
|
||||
verify(reviewRepository).save(any(EditReview.class));
|
||||
}
|
||||
|
||||
@Test
|
||||
void submitReview_withEntityId() {
|
||||
SubmitReviewRequest request = new SubmitReviewRequest();
|
||||
request.setOperationType("UPDATE_ENTITY");
|
||||
request.setEntityId(ENTITY_ID);
|
||||
request.setPayload("{\"name\":\"Updated\"}");
|
||||
|
||||
EditReview savedReview = EditReview.builder()
|
||||
.id(REVIEW_ID)
|
||||
.graphId(GRAPH_ID)
|
||||
.operationType("UPDATE_ENTITY")
|
||||
.entityId(ENTITY_ID)
|
||||
.payload("{\"name\":\"Updated\"}")
|
||||
.status("PENDING")
|
||||
.submittedBy(USER_ID)
|
||||
.createdAt(LocalDateTime.now())
|
||||
.build();
|
||||
|
||||
when(reviewRepository.save(any(EditReview.class))).thenReturn(savedReview);
|
||||
|
||||
EditReviewVO result = reviewService.submitReview(GRAPH_ID, request, USER_ID);
|
||||
|
||||
assertThat(result.getEntityId()).isEqualTo(ENTITY_ID);
|
||||
assertThat(result.getOperationType()).isEqualTo("UPDATE_ENTITY");
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// approveReview
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Test
|
||||
void approveReview_success_appliesChange() {
|
||||
when(reviewRepository.findById(REVIEW_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(pendingReview));
|
||||
when(reviewRepository.save(any(EditReview.class))).thenReturn(pendingReview);
|
||||
|
||||
EditReviewVO result = reviewService.approveReview(GRAPH_ID, REVIEW_ID, REVIEWER_ID, "LGTM");
|
||||
|
||||
assertThat(result).isNotNull();
|
||||
assertThat(pendingReview.getStatus()).isEqualTo("APPROVED");
|
||||
assertThat(pendingReview.getReviewedBy()).isEqualTo(REVIEWER_ID);
|
||||
assertThat(pendingReview.getReviewComment()).isEqualTo("LGTM");
|
||||
assertThat(pendingReview.getReviewedAt()).isNotNull();
|
||||
|
||||
// Verify applyChange was called (createEntity for CREATE_ENTITY)
|
||||
verify(entityService).createEntity(eq(GRAPH_ID), any());
|
||||
}
|
||||
|
||||
@Test
|
||||
void approveReview_notFound_throwsBusinessException() {
|
||||
when(reviewRepository.findById(REVIEW_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.empty());
|
||||
|
||||
assertThatThrownBy(() -> reviewService.approveReview(GRAPH_ID, REVIEW_ID, REVIEWER_ID, null))
|
||||
.isInstanceOf(BusinessException.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
void approveReview_alreadyProcessed_throwsBusinessException() {
|
||||
pendingReview.setStatus("APPROVED");
|
||||
|
||||
when(reviewRepository.findById(REVIEW_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(pendingReview));
|
||||
|
||||
assertThatThrownBy(() -> reviewService.approveReview(GRAPH_ID, REVIEW_ID, REVIEWER_ID, null))
|
||||
.isInstanceOf(BusinessException.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
void approveReview_deleteEntity_appliesChange() {
|
||||
pendingReview.setOperationType("DELETE_ENTITY");
|
||||
pendingReview.setEntityId(ENTITY_ID);
|
||||
pendingReview.setPayload(null);
|
||||
|
||||
when(reviewRepository.findById(REVIEW_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(pendingReview));
|
||||
when(reviewRepository.save(any(EditReview.class))).thenReturn(pendingReview);
|
||||
|
||||
reviewService.approveReview(GRAPH_ID, REVIEW_ID, REVIEWER_ID, null);
|
||||
|
||||
verify(entityService).deleteEntity(GRAPH_ID, ENTITY_ID);
|
||||
}
|
||||
|
||||
@Test
|
||||
void approveReview_updateEntity_appliesChange() {
|
||||
pendingReview.setOperationType("UPDATE_ENTITY");
|
||||
pendingReview.setEntityId(ENTITY_ID);
|
||||
pendingReview.setPayload("{\"name\":\"Updated\"}");
|
||||
|
||||
when(reviewRepository.findById(REVIEW_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(pendingReview));
|
||||
when(reviewRepository.save(any(EditReview.class))).thenReturn(pendingReview);
|
||||
|
||||
reviewService.approveReview(GRAPH_ID, REVIEW_ID, REVIEWER_ID, null);
|
||||
|
||||
verify(entityService).updateEntity(eq(GRAPH_ID), eq(ENTITY_ID), any());
|
||||
}
|
||||
|
||||
@Test
|
||||
void approveReview_createRelation_appliesChange() {
|
||||
pendingReview.setOperationType("CREATE_RELATION");
|
||||
pendingReview.setPayload("{\"sourceEntityId\":\"a\",\"targetEntityId\":\"b\",\"relationType\":\"HAS_FIELD\"}");
|
||||
|
||||
when(reviewRepository.findById(REVIEW_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(pendingReview));
|
||||
when(reviewRepository.save(any(EditReview.class))).thenReturn(pendingReview);
|
||||
|
||||
reviewService.approveReview(GRAPH_ID, REVIEW_ID, REVIEWER_ID, null);
|
||||
|
||||
verify(relationService).createRelation(eq(GRAPH_ID), any());
|
||||
}
|
||||
|
||||
@Test
|
||||
void approveReview_invalidPayload_throwsBusinessException() {
|
||||
pendingReview.setOperationType("CREATE_ENTITY");
|
||||
pendingReview.setPayload("not valid json {{");
|
||||
|
||||
when(reviewRepository.findById(REVIEW_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(pendingReview));
|
||||
|
||||
assertThatThrownBy(() -> reviewService.approveReview(GRAPH_ID, REVIEW_ID, REVIEWER_ID, null))
|
||||
.isInstanceOf(BusinessException.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
void approveReview_batchDeleteEntity_appliesChange() {
|
||||
pendingReview.setOperationType("BATCH_DELETE_ENTITY");
|
||||
pendingReview.setPayload("{\"ids\":[\"id-1\",\"id-2\",\"id-3\"]}");
|
||||
|
||||
when(reviewRepository.findById(REVIEW_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(pendingReview));
|
||||
when(reviewRepository.save(any(EditReview.class))).thenReturn(pendingReview);
|
||||
|
||||
reviewService.approveReview(GRAPH_ID, REVIEW_ID, REVIEWER_ID, null);
|
||||
|
||||
verify(entityService).batchDeleteEntities(eq(GRAPH_ID), eq(List.of("id-1", "id-2", "id-3")));
|
||||
}
|
||||
|
||||
@Test
|
||||
void approveReview_batchDeleteRelation_appliesChange() {
|
||||
pendingReview.setOperationType("BATCH_DELETE_RELATION");
|
||||
pendingReview.setPayload("{\"ids\":[\"rel-1\",\"rel-2\"]}");
|
||||
|
||||
when(reviewRepository.findById(REVIEW_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(pendingReview));
|
||||
when(reviewRepository.save(any(EditReview.class))).thenReturn(pendingReview);
|
||||
|
||||
reviewService.approveReview(GRAPH_ID, REVIEW_ID, REVIEWER_ID, null);
|
||||
|
||||
verify(relationService).batchDeleteRelations(eq(GRAPH_ID), eq(List.of("rel-1", "rel-2")));
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// rejectReview
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Test
|
||||
void rejectReview_success() {
|
||||
when(reviewRepository.findById(REVIEW_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(pendingReview));
|
||||
when(reviewRepository.save(any(EditReview.class))).thenReturn(pendingReview);
|
||||
|
||||
EditReviewVO result = reviewService.rejectReview(GRAPH_ID, REVIEW_ID, REVIEWER_ID, "不合适");
|
||||
|
||||
assertThat(result).isNotNull();
|
||||
assertThat(pendingReview.getStatus()).isEqualTo("REJECTED");
|
||||
assertThat(pendingReview.getReviewedBy()).isEqualTo(REVIEWER_ID);
|
||||
assertThat(pendingReview.getReviewComment()).isEqualTo("不合适");
|
||||
assertThat(pendingReview.getReviewedAt()).isNotNull();
|
||||
|
||||
// Verify no change was applied
|
||||
verifyNoInteractions(entityService);
|
||||
verifyNoInteractions(relationService);
|
||||
}
|
||||
|
||||
@Test
|
||||
void rejectReview_notFound_throwsBusinessException() {
|
||||
when(reviewRepository.findById(REVIEW_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.empty());
|
||||
|
||||
assertThatThrownBy(() -> reviewService.rejectReview(GRAPH_ID, REVIEW_ID, REVIEWER_ID, null))
|
||||
.isInstanceOf(BusinessException.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
void rejectReview_alreadyProcessed_throwsBusinessException() {
|
||||
pendingReview.setStatus("REJECTED");
|
||||
|
||||
when(reviewRepository.findById(REVIEW_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(pendingReview));
|
||||
|
||||
assertThatThrownBy(() -> reviewService.rejectReview(GRAPH_ID, REVIEW_ID, REVIEWER_ID, null))
|
||||
.isInstanceOf(BusinessException.class);
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// listPendingReviews
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Test
|
||||
void listPendingReviews_returnsPagedResult() {
|
||||
when(reviewRepository.findPendingByGraphId(GRAPH_ID, 0L, 20))
|
||||
.thenReturn(List.of(pendingReview));
|
||||
when(reviewRepository.countPendingByGraphId(GRAPH_ID)).thenReturn(1L);
|
||||
|
||||
var result = reviewService.listPendingReviews(GRAPH_ID, 0, 20);
|
||||
|
||||
assertThat(result.getContent()).hasSize(1);
|
||||
assertThat(result.getTotalElements()).isEqualTo(1);
|
||||
}
|
||||
|
||||
@Test
|
||||
void listPendingReviews_clampsPageSize() {
|
||||
when(reviewRepository.findPendingByGraphId(GRAPH_ID, 0L, 200))
|
||||
.thenReturn(List.of());
|
||||
when(reviewRepository.countPendingByGraphId(GRAPH_ID)).thenReturn(0L);
|
||||
|
||||
reviewService.listPendingReviews(GRAPH_ID, 0, 999);
|
||||
|
||||
verify(reviewRepository).findPendingByGraphId(GRAPH_ID, 0L, 200);
|
||||
}
|
||||
|
||||
@Test
|
||||
void listPendingReviews_negativePage_clampedToZero() {
|
||||
when(reviewRepository.findPendingByGraphId(GRAPH_ID, 0L, 20))
|
||||
.thenReturn(List.of());
|
||||
when(reviewRepository.countPendingByGraphId(GRAPH_ID)).thenReturn(0L);
|
||||
|
||||
var result = reviewService.listPendingReviews(GRAPH_ID, -1, 20);
|
||||
|
||||
assertThat(result.getPage()).isEqualTo(0);
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// listReviews
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Test
|
||||
void listReviews_withStatusFilter() {
|
||||
when(reviewRepository.findByGraphId(GRAPH_ID, "APPROVED", 0L, 20))
|
||||
.thenReturn(List.of());
|
||||
when(reviewRepository.countByGraphId(GRAPH_ID, "APPROVED")).thenReturn(0L);
|
||||
|
||||
var result = reviewService.listReviews(GRAPH_ID, "APPROVED", 0, 20);
|
||||
|
||||
assertThat(result.getContent()).isEmpty();
|
||||
verify(reviewRepository).findByGraphId(GRAPH_ID, "APPROVED", 0L, 20);
|
||||
}
|
||||
|
||||
@Test
|
||||
void listReviews_withoutStatusFilter() {
|
||||
when(reviewRepository.findByGraphId(GRAPH_ID, null, 0L, 20))
|
||||
.thenReturn(List.of(pendingReview));
|
||||
when(reviewRepository.countByGraphId(GRAPH_ID, null)).thenReturn(1L);
|
||||
|
||||
var result = reviewService.listReviews(GRAPH_ID, null, 0, 20);
|
||||
|
||||
assertThat(result.getContent()).hasSize(1);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,243 @@
|
||||
package com.datamate.knowledgegraph.application;
|
||||
|
||||
import com.datamate.common.infrastructure.exception.BusinessException;
|
||||
import com.datamate.knowledgegraph.domain.model.GraphEntity;
|
||||
import com.datamate.knowledgegraph.domain.repository.GraphEntityRepository;
|
||||
import com.datamate.knowledgegraph.infrastructure.cache.GraphCacheService;
|
||||
import com.datamate.knowledgegraph.infrastructure.neo4j.KnowledgeGraphProperties;
|
||||
import com.datamate.knowledgegraph.interfaces.dto.CreateEntityRequest;
|
||||
import com.datamate.knowledgegraph.interfaces.dto.UpdateEntityRequest;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.mockito.InjectMocks;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.assertj.core.api.Assertions.assertThatThrownBy;
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
import static org.mockito.ArgumentMatchers.eq;
|
||||
import static org.mockito.Mockito.*;
|
||||
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
class GraphEntityServiceTest {
|
||||
|
||||
private static final String GRAPH_ID = "550e8400-e29b-41d4-a716-446655440000";
|
||||
private static final String ENTITY_ID = "660e8400-e29b-41d4-a716-446655440001";
|
||||
private static final String INVALID_GRAPH_ID = "not-a-uuid";
|
||||
|
||||
@Mock
|
||||
private GraphEntityRepository entityRepository;
|
||||
|
||||
@Mock
|
||||
private KnowledgeGraphProperties properties;
|
||||
|
||||
@Mock
|
||||
private GraphCacheService cacheService;
|
||||
|
||||
@InjectMocks
|
||||
private GraphEntityService entityService;
|
||||
|
||||
private GraphEntity sampleEntity;
|
||||
|
||||
@BeforeEach
|
||||
void setUp() {
|
||||
sampleEntity = GraphEntity.builder()
|
||||
.id(ENTITY_ID)
|
||||
.name("TestDataset")
|
||||
.type("Dataset")
|
||||
.description("A test dataset")
|
||||
.graphId(GRAPH_ID)
|
||||
.confidence(1.0)
|
||||
.createdAt(LocalDateTime.now())
|
||||
.updatedAt(LocalDateTime.now())
|
||||
.build();
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// graphId 校验
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Test
|
||||
void getEntity_invalidGraphId_throwsBusinessException() {
|
||||
assertThatThrownBy(() -> entityService.getEntity(INVALID_GRAPH_ID, ENTITY_ID))
|
||||
.isInstanceOf(BusinessException.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
void getEntity_nullGraphId_throwsBusinessException() {
|
||||
assertThatThrownBy(() -> entityService.getEntity(null, ENTITY_ID))
|
||||
.isInstanceOf(BusinessException.class);
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// createEntity
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Test
|
||||
void createEntity_success() {
|
||||
CreateEntityRequest request = new CreateEntityRequest();
|
||||
request.setName("NewEntity");
|
||||
request.setType("Dataset");
|
||||
request.setDescription("Desc");
|
||||
|
||||
when(entityRepository.save(any(GraphEntity.class))).thenReturn(sampleEntity);
|
||||
|
||||
GraphEntity result = entityService.createEntity(GRAPH_ID, request);
|
||||
|
||||
assertThat(result).isNotNull();
|
||||
assertThat(result.getName()).isEqualTo("TestDataset");
|
||||
verify(entityRepository).save(any(GraphEntity.class));
|
||||
verify(cacheService).evictEntityCaches(GRAPH_ID, ENTITY_ID);
|
||||
verify(cacheService).evictSearchCaches(GRAPH_ID);
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// getEntity
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Test
|
||||
void getEntity_found() {
|
||||
when(entityRepository.findByIdAndGraphId(ENTITY_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(sampleEntity));
|
||||
|
||||
GraphEntity result = entityService.getEntity(GRAPH_ID, ENTITY_ID);
|
||||
|
||||
assertThat(result.getId()).isEqualTo(ENTITY_ID);
|
||||
assertThat(result.getName()).isEqualTo("TestDataset");
|
||||
}
|
||||
|
||||
@Test
|
||||
void getEntity_notFound_throwsBusinessException() {
|
||||
when(entityRepository.findByIdAndGraphId(ENTITY_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.empty());
|
||||
|
||||
assertThatThrownBy(() -> entityService.getEntity(GRAPH_ID, ENTITY_ID))
|
||||
.isInstanceOf(BusinessException.class);
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// listEntities
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Test
|
||||
void listEntities_returnsAll() {
|
||||
when(entityRepository.findByGraphId(GRAPH_ID))
|
||||
.thenReturn(List.of(sampleEntity));
|
||||
|
||||
List<GraphEntity> results = entityService.listEntities(GRAPH_ID);
|
||||
|
||||
assertThat(results).hasSize(1);
|
||||
assertThat(results.get(0).getName()).isEqualTo("TestDataset");
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// updateEntity
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Test
|
||||
void updateEntity_partialUpdate_onlyChangesProvidedFields() {
|
||||
when(entityRepository.findByIdAndGraphId(ENTITY_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(sampleEntity));
|
||||
when(entityRepository.save(any(GraphEntity.class)))
|
||||
.thenAnswer(inv -> inv.getArgument(0));
|
||||
|
||||
UpdateEntityRequest request = new UpdateEntityRequest();
|
||||
request.setName("UpdatedName");
|
||||
// description not set — should remain unchanged
|
||||
|
||||
GraphEntity result = entityService.updateEntity(GRAPH_ID, ENTITY_ID, request);
|
||||
|
||||
assertThat(result.getName()).isEqualTo("UpdatedName");
|
||||
assertThat(result.getDescription()).isEqualTo("A test dataset");
|
||||
verify(cacheService).evictEntityCaches(GRAPH_ID, ENTITY_ID);
|
||||
verify(cacheService).evictSearchCaches(GRAPH_ID);
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// deleteEntity
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Test
|
||||
void deleteEntity_success() {
|
||||
when(entityRepository.findByIdAndGraphId(ENTITY_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(sampleEntity));
|
||||
|
||||
entityService.deleteEntity(GRAPH_ID, ENTITY_ID);
|
||||
|
||||
verify(entityRepository).delete(sampleEntity);
|
||||
verify(cacheService).evictEntityCaches(GRAPH_ID, ENTITY_ID);
|
||||
verify(cacheService).evictSearchCaches(GRAPH_ID);
|
||||
}
|
||||
|
||||
@Test
|
||||
void deleteEntity_notFound_throwsBusinessException() {
|
||||
when(entityRepository.findByIdAndGraphId(ENTITY_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.empty());
|
||||
|
||||
assertThatThrownBy(() -> entityService.deleteEntity(GRAPH_ID, ENTITY_ID))
|
||||
.isInstanceOf(BusinessException.class);
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// getNeighbors — 深度/限制 clamping
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Test
|
||||
void getNeighbors_clampsDepthAndLimit() {
|
||||
when(properties.getMaxDepth()).thenReturn(3);
|
||||
when(properties.getMaxNodesPerQuery()).thenReturn(500);
|
||||
when(entityRepository.findNeighbors(eq(GRAPH_ID), eq(ENTITY_ID), eq(3), eq(500)))
|
||||
.thenReturn(List.of());
|
||||
|
||||
List<GraphEntity> result = entityService.getNeighbors(GRAPH_ID, ENTITY_ID, 100, 99999);
|
||||
|
||||
assertThat(result).isEmpty();
|
||||
// depth clamped to maxDepth=3, limit clamped to maxNodesPerQuery=500
|
||||
verify(entityRepository).findNeighbors(GRAPH_ID, ENTITY_ID, 3, 500);
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// 分页
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Test
|
||||
void listEntitiesPaged_normalPage() {
|
||||
when(entityRepository.findByGraphIdPaged(GRAPH_ID, 0L, 20))
|
||||
.thenReturn(List.of(sampleEntity));
|
||||
when(entityRepository.countByGraphId(GRAPH_ID)).thenReturn(1L);
|
||||
|
||||
var result = entityService.listEntitiesPaged(GRAPH_ID, 0, 20);
|
||||
|
||||
assertThat(result.getContent()).hasSize(1);
|
||||
assertThat(result.getTotalElements()).isEqualTo(1);
|
||||
}
|
||||
|
||||
@Test
|
||||
void listEntitiesPaged_negativePage_clampedToZero() {
|
||||
when(entityRepository.findByGraphIdPaged(GRAPH_ID, 0L, 20))
|
||||
.thenReturn(List.of());
|
||||
when(entityRepository.countByGraphId(GRAPH_ID)).thenReturn(0L);
|
||||
|
||||
var result = entityService.listEntitiesPaged(GRAPH_ID, -1, 20);
|
||||
|
||||
assertThat(result.getPage()).isEqualTo(0);
|
||||
}
|
||||
|
||||
@Test
|
||||
void listEntitiesPaged_oversizedPage_clampedTo200() {
|
||||
when(entityRepository.findByGraphIdPaged(GRAPH_ID, 0L, 200))
|
||||
.thenReturn(List.of());
|
||||
when(entityRepository.countByGraphId(GRAPH_ID)).thenReturn(0L);
|
||||
|
||||
entityService.listEntitiesPaged(GRAPH_ID, 0, 999);
|
||||
|
||||
verify(entityRepository).findByGraphIdPaged(GRAPH_ID, 0L, 200);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,894 @@
|
||||
package com.datamate.knowledgegraph.application;
|
||||
|
||||
import com.datamate.common.auth.application.ResourceAccessService;
|
||||
import com.datamate.common.infrastructure.exception.BusinessException;
|
||||
import com.datamate.knowledgegraph.domain.model.GraphEntity;
|
||||
import com.datamate.knowledgegraph.domain.repository.GraphEntityRepository;
|
||||
import com.datamate.knowledgegraph.infrastructure.neo4j.KnowledgeGraphProperties;
|
||||
import com.datamate.knowledgegraph.interfaces.dto.AllPathsVO;
|
||||
import com.datamate.knowledgegraph.interfaces.dto.SubgraphExportVO;
|
||||
import com.datamate.knowledgegraph.interfaces.dto.SubgraphVO;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Nested;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.mockito.InjectMocks;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
import org.neo4j.driver.Driver;
|
||||
import org.springframework.data.neo4j.core.Neo4jClient;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.assertj.core.api.Assertions.assertThatThrownBy;
|
||||
import static org.mockito.ArgumentMatchers.*;
|
||||
import static org.mockito.Mockito.*;
|
||||
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
class GraphQueryServiceTest {
|
||||
|
||||
private static final String GRAPH_ID = "550e8400-e29b-41d4-a716-446655440000";
|
||||
private static final String ENTITY_ID = "660e8400-e29b-41d4-a716-446655440001";
|
||||
private static final String ENTITY_ID_2 = "660e8400-e29b-41d4-a716-446655440002";
|
||||
private static final String INVALID_GRAPH_ID = "bad-id";
|
||||
|
||||
@Mock
|
||||
private Neo4jClient neo4jClient;
|
||||
|
||||
@Mock
|
||||
private Driver neo4jDriver;
|
||||
|
||||
@Mock
|
||||
private GraphEntityRepository entityRepository;
|
||||
|
||||
@Mock
|
||||
private KnowledgeGraphProperties properties;
|
||||
|
||||
@Mock
|
||||
private ResourceAccessService resourceAccessService;
|
||||
|
||||
@InjectMocks
|
||||
private GraphQueryService queryService;
|
||||
|
||||
@BeforeEach
|
||||
void setUp() {
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// graphId 校验
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Test
|
||||
void getNeighborGraph_invalidGraphId_throwsBusinessException() {
|
||||
assertThatThrownBy(() -> queryService.getNeighborGraph(INVALID_GRAPH_ID, ENTITY_ID, 2, 50))
|
||||
.isInstanceOf(BusinessException.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
void getShortestPath_invalidGraphId_throwsBusinessException() {
|
||||
assertThatThrownBy(() -> queryService.getShortestPath(INVALID_GRAPH_ID, ENTITY_ID, ENTITY_ID_2, 3))
|
||||
.isInstanceOf(BusinessException.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
void getSubgraph_invalidGraphId_throwsBusinessException() {
|
||||
assertThatThrownBy(() -> queryService.getSubgraph(INVALID_GRAPH_ID, List.of(ENTITY_ID)))
|
||||
.isInstanceOf(BusinessException.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
void fulltextSearch_invalidGraphId_throwsBusinessException() {
|
||||
assertThatThrownBy(() -> queryService.fulltextSearch(INVALID_GRAPH_ID, "test", 0, 20))
|
||||
.isInstanceOf(BusinessException.class);
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// getNeighborGraph — 实体不存在
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Test
|
||||
void getNeighborGraph_entityNotFound_throwsBusinessException() {
|
||||
when(entityRepository.findByIdAndGraphId(ENTITY_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.empty());
|
||||
|
||||
assertThatThrownBy(() -> queryService.getNeighborGraph(GRAPH_ID, ENTITY_ID, 2, 50))
|
||||
.isInstanceOf(BusinessException.class);
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// getShortestPath — 起止相同
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Test
|
||||
void getShortestPath_sameSourceAndTarget_returnsSingleNode() {
|
||||
GraphEntity entity = GraphEntity.builder()
|
||||
.id(ENTITY_ID).name("Node").type("Dataset").graphId(GRAPH_ID).build();
|
||||
when(entityRepository.findByIdAndGraphId(ENTITY_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(entity));
|
||||
|
||||
var result = queryService.getShortestPath(GRAPH_ID, ENTITY_ID, ENTITY_ID, 3);
|
||||
|
||||
assertThat(result.getPathLength()).isEqualTo(0);
|
||||
assertThat(result.getNodes()).hasSize(1);
|
||||
assertThat(result.getEdges()).isEmpty();
|
||||
}
|
||||
|
||||
@Test
|
||||
void getShortestPath_sourceNotFound_throwsBusinessException() {
|
||||
when(entityRepository.findByIdAndGraphId(ENTITY_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.empty());
|
||||
|
||||
assertThatThrownBy(() -> queryService.getShortestPath(GRAPH_ID, ENTITY_ID, ENTITY_ID_2, 3))
|
||||
.isInstanceOf(BusinessException.class);
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// getSubgraph — 空输入
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Test
|
||||
void getSubgraph_nullEntityIds_returnsEmptySubgraph() {
|
||||
SubgraphVO result = queryService.getSubgraph(GRAPH_ID, null);
|
||||
|
||||
assertThat(result.getNodes()).isEmpty();
|
||||
assertThat(result.getEdges()).isEmpty();
|
||||
assertThat(result.getNodeCount()).isEqualTo(0);
|
||||
}
|
||||
|
||||
@Test
|
||||
void getSubgraph_emptyEntityIds_returnsEmptySubgraph() {
|
||||
SubgraphVO result = queryService.getSubgraph(GRAPH_ID, List.of());
|
||||
|
||||
assertThat(result.getNodes()).isEmpty();
|
||||
assertThat(result.getEdges()).isEmpty();
|
||||
}
|
||||
|
||||
@Test
|
||||
void getSubgraph_exceedsMaxNodes_throwsBusinessException() {
|
||||
when(properties.getMaxNodesPerQuery()).thenReturn(5);
|
||||
|
||||
List<String> tooManyIds = List.of("1", "2", "3", "4", "5", "6");
|
||||
|
||||
assertThatThrownBy(() -> queryService.getSubgraph(GRAPH_ID, tooManyIds))
|
||||
.isInstanceOf(BusinessException.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
void getSubgraph_noExistingEntities_returnsEmptySubgraph() {
|
||||
when(properties.getMaxNodesPerQuery()).thenReturn(500);
|
||||
when(entityRepository.findByGraphIdAndIdIn(GRAPH_ID, List.of(ENTITY_ID)))
|
||||
.thenReturn(List.of());
|
||||
|
||||
SubgraphVO result = queryService.getSubgraph(GRAPH_ID, List.of(ENTITY_ID));
|
||||
|
||||
assertThat(result.getNodes()).isEmpty();
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// fulltextSearch — 空查询
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Test
|
||||
void fulltextSearch_blankQuery_returnsEmpty() {
|
||||
var result = queryService.fulltextSearch(GRAPH_ID, "", 0, 20);
|
||||
|
||||
assertThat(result.getContent()).isEmpty();
|
||||
assertThat(result.getTotalElements()).isEqualTo(0);
|
||||
verifyNoInteractions(neo4jClient);
|
||||
}
|
||||
|
||||
@Test
|
||||
void fulltextSearch_nullQuery_returnsEmpty() {
|
||||
var result = queryService.fulltextSearch(GRAPH_ID, null, 0, 20);
|
||||
|
||||
assertThat(result.getContent()).isEmpty();
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// 权限过滤
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Nested
|
||||
class PermissionFilteringTest {
|
||||
|
||||
private static final String CURRENT_USER_ID = "user-123";
|
||||
private static final String OTHER_USER_ID = "other-user";
|
||||
|
||||
// -- getNeighborGraph 权限 --
|
||||
|
||||
@Test
|
||||
void getNeighborGraph_nonAdmin_otherEntity_throwsInsufficientPermissions() {
|
||||
when(resourceAccessService.resolveOwnerFilterUserId()).thenReturn(CURRENT_USER_ID);
|
||||
|
||||
GraphEntity entity = GraphEntity.builder()
|
||||
.id(ENTITY_ID).name("Other's Dataset").type("Dataset").graphId(GRAPH_ID)
|
||||
.properties(new HashMap<>(Map.of("created_by", OTHER_USER_ID)))
|
||||
.build();
|
||||
when(entityRepository.findByIdAndGraphId(ENTITY_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(entity));
|
||||
|
||||
assertThatThrownBy(() -> queryService.getNeighborGraph(GRAPH_ID, ENTITY_ID, 2, 50))
|
||||
.isInstanceOf(BusinessException.class);
|
||||
|
||||
verifyNoInteractions(neo4jClient);
|
||||
}
|
||||
|
||||
@Test
|
||||
void getNeighborGraph_admin_otherEntity_noPermissionDenied() {
|
||||
// 管理员返回 null → 不过滤
|
||||
when(resourceAccessService.resolveOwnerFilterUserId()).thenReturn(null);
|
||||
|
||||
GraphEntity entity = GraphEntity.builder()
|
||||
.id(ENTITY_ID).name("Other's Dataset").type("Dataset").graphId(GRAPH_ID)
|
||||
.properties(new HashMap<>(Map.of("created_by", OTHER_USER_ID)))
|
||||
.build();
|
||||
when(entityRepository.findByIdAndGraphId(ENTITY_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(entity));
|
||||
when(properties.getMaxDepth()).thenReturn(3);
|
||||
when(properties.getMaxNodesPerQuery()).thenReturn(500);
|
||||
|
||||
// 管理员不会被权限拦截,会继续到 Neo4jClient 调用
|
||||
// 由于 Neo4jClient 未完全 mock,会抛出其他异常,不是 BusinessException
|
||||
try {
|
||||
queryService.getNeighborGraph(GRAPH_ID, ENTITY_ID, 2, 50);
|
||||
} catch (BusinessException e) {
|
||||
throw new AssertionError("Admin should not be blocked by permission check", e);
|
||||
} catch (Exception ignored) {
|
||||
// Neo4jClient mock chain 未完成,预期其他异常
|
||||
}
|
||||
}
|
||||
|
||||
// -- getShortestPath 权限 --
|
||||
|
||||
@Test
|
||||
void getShortestPath_nonAdmin_sourceNotAccessible_throws() {
|
||||
when(resourceAccessService.resolveOwnerFilterUserId()).thenReturn(CURRENT_USER_ID);
|
||||
|
||||
GraphEntity sourceEntity = GraphEntity.builder()
|
||||
.id(ENTITY_ID).name("Other's Dataset").type("Dataset").graphId(GRAPH_ID)
|
||||
.properties(new HashMap<>(Map.of("created_by", OTHER_USER_ID)))
|
||||
.build();
|
||||
when(entityRepository.findByIdAndGraphId(ENTITY_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(sourceEntity));
|
||||
|
||||
assertThatThrownBy(() -> queryService.getShortestPath(GRAPH_ID, ENTITY_ID, ENTITY_ID_2, 3))
|
||||
.isInstanceOf(BusinessException.class);
|
||||
|
||||
verifyNoInteractions(neo4jClient);
|
||||
}
|
||||
|
||||
@Test
|
||||
void getShortestPath_nonAdmin_targetNotAccessible_throws() {
|
||||
when(resourceAccessService.resolveOwnerFilterUserId()).thenReturn(CURRENT_USER_ID);
|
||||
|
||||
GraphEntity sourceEntity = GraphEntity.builder()
|
||||
.id(ENTITY_ID).name("My Dataset").type("Dataset").graphId(GRAPH_ID)
|
||||
.properties(new HashMap<>(Map.of("created_by", CURRENT_USER_ID)))
|
||||
.build();
|
||||
GraphEntity targetEntity = GraphEntity.builder()
|
||||
.id(ENTITY_ID_2).name("Other's Dataset").type("Dataset").graphId(GRAPH_ID)
|
||||
.properties(new HashMap<>(Map.of("created_by", OTHER_USER_ID)))
|
||||
.build();
|
||||
when(entityRepository.findByIdAndGraphId(ENTITY_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(sourceEntity));
|
||||
when(entityRepository.findByIdAndGraphId(ENTITY_ID_2, GRAPH_ID))
|
||||
.thenReturn(Optional.of(targetEntity));
|
||||
|
||||
assertThatThrownBy(() -> queryService.getShortestPath(GRAPH_ID, ENTITY_ID, ENTITY_ID_2, 3))
|
||||
.isInstanceOf(BusinessException.class);
|
||||
|
||||
verifyNoInteractions(neo4jClient);
|
||||
}
|
||||
|
||||
@Test
|
||||
void getShortestPath_nonAdmin_sameOwnEntity_returnsSingleNode() {
|
||||
when(resourceAccessService.resolveOwnerFilterUserId()).thenReturn(CURRENT_USER_ID);
|
||||
|
||||
GraphEntity entity = GraphEntity.builder()
|
||||
.id(ENTITY_ID).name("My Dataset").type("Dataset").graphId(GRAPH_ID)
|
||||
.properties(new HashMap<>(Map.of("created_by", CURRENT_USER_ID)))
|
||||
.build();
|
||||
when(entityRepository.findByIdAndGraphId(ENTITY_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(entity));
|
||||
|
||||
var result = queryService.getShortestPath(GRAPH_ID, ENTITY_ID, ENTITY_ID, 3);
|
||||
|
||||
assertThat(result.getPathLength()).isEqualTo(0);
|
||||
assertThat(result.getNodes()).hasSize(1);
|
||||
assertThat(result.getNodes().get(0).getName()).isEqualTo("My Dataset");
|
||||
}
|
||||
|
||||
@Test
|
||||
void getShortestPath_nonAdmin_structuralEntity_noPermissionDenied() {
|
||||
// 结构型实体(无 created_by)对所有用户可见
|
||||
when(resourceAccessService.resolveOwnerFilterUserId()).thenReturn(CURRENT_USER_ID);
|
||||
|
||||
GraphEntity structuralEntity = GraphEntity.builder()
|
||||
.id(ENTITY_ID).name("Admin User").type("User").graphId(GRAPH_ID)
|
||||
.properties(new HashMap<>())
|
||||
.build();
|
||||
when(entityRepository.findByIdAndGraphId(ENTITY_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(structuralEntity));
|
||||
|
||||
// 起止相同 → 返回单节点路径,不需要 Neo4jClient
|
||||
var result = queryService.getShortestPath(GRAPH_ID, ENTITY_ID, ENTITY_ID, 3);
|
||||
|
||||
assertThat(result.getPathLength()).isEqualTo(0);
|
||||
assertThat(result.getNodes()).hasSize(1);
|
||||
assertThat(result.getNodes().get(0).getType()).isEqualTo("User");
|
||||
}
|
||||
|
||||
// -- getSubgraph 权限过滤 --
|
||||
|
||||
@Test
|
||||
void getSubgraph_nonAdmin_filtersInaccessibleEntities() {
|
||||
when(resourceAccessService.resolveOwnerFilterUserId()).thenReturn(CURRENT_USER_ID);
|
||||
when(properties.getMaxNodesPerQuery()).thenReturn(500);
|
||||
|
||||
GraphEntity ownEntity = GraphEntity.builder()
|
||||
.id(ENTITY_ID).name("My Dataset").type("Dataset").graphId(GRAPH_ID)
|
||||
.properties(new HashMap<>(Map.of("created_by", CURRENT_USER_ID)))
|
||||
.build();
|
||||
GraphEntity otherEntity = GraphEntity.builder()
|
||||
.id(ENTITY_ID_2).name("Other Dataset").type("Dataset").graphId(GRAPH_ID)
|
||||
.properties(new HashMap<>(Map.of("created_by", OTHER_USER_ID)))
|
||||
.build();
|
||||
|
||||
when(entityRepository.findByGraphIdAndIdIn(GRAPH_ID, List.of(ENTITY_ID, ENTITY_ID_2)))
|
||||
.thenReturn(List.of(ownEntity, otherEntity));
|
||||
|
||||
SubgraphVO result = queryService.getSubgraph(GRAPH_ID, List.of(ENTITY_ID, ENTITY_ID_2));
|
||||
|
||||
// 只返回自己创建的实体(另一个被过滤),单节点无边
|
||||
assertThat(result.getNodes()).hasSize(1);
|
||||
assertThat(result.getNodes().get(0).getName()).isEqualTo("My Dataset");
|
||||
assertThat(result.getEdges()).isEmpty();
|
||||
assertThat(result.getNodeCount()).isEqualTo(1);
|
||||
}
|
||||
|
||||
@Test
|
||||
void getSubgraph_nonAdmin_allFiltered_returnsEmptySubgraph() {
|
||||
when(resourceAccessService.resolveOwnerFilterUserId()).thenReturn(CURRENT_USER_ID);
|
||||
when(properties.getMaxNodesPerQuery()).thenReturn(500);
|
||||
|
||||
GraphEntity otherEntity = GraphEntity.builder()
|
||||
.id(ENTITY_ID).name("Other Dataset").type("Dataset").graphId(GRAPH_ID)
|
||||
.properties(new HashMap<>(Map.of("created_by", OTHER_USER_ID)))
|
||||
.build();
|
||||
|
||||
when(entityRepository.findByGraphIdAndIdIn(GRAPH_ID, List.of(ENTITY_ID)))
|
||||
.thenReturn(List.of(otherEntity));
|
||||
|
||||
SubgraphVO result = queryService.getSubgraph(GRAPH_ID, List.of(ENTITY_ID));
|
||||
|
||||
assertThat(result.getNodes()).isEmpty();
|
||||
assertThat(result.getEdges()).isEmpty();
|
||||
assertThat(result.getNodeCount()).isEqualTo(0);
|
||||
}
|
||||
|
||||
@Test
|
||||
void getSubgraph_nonAdmin_structuralEntitiesVisible() {
|
||||
when(resourceAccessService.resolveOwnerFilterUserId()).thenReturn(CURRENT_USER_ID);
|
||||
when(properties.getMaxNodesPerQuery()).thenReturn(500);
|
||||
|
||||
// 结构型实体没有 created_by → 对所有用户可见
|
||||
GraphEntity structuralEntity = GraphEntity.builder()
|
||||
.id(ENTITY_ID).name("Default Org").type("Org").graphId(GRAPH_ID)
|
||||
.properties(new HashMap<>())
|
||||
.build();
|
||||
|
||||
when(entityRepository.findByGraphIdAndIdIn(GRAPH_ID, List.of(ENTITY_ID)))
|
||||
.thenReturn(List.of(structuralEntity));
|
||||
|
||||
SubgraphVO result = queryService.getSubgraph(GRAPH_ID, List.of(ENTITY_ID));
|
||||
|
||||
assertThat(result.getNodes()).hasSize(1);
|
||||
assertThat(result.getNodes().get(0).getType()).isEqualTo("Org");
|
||||
}
|
||||
|
||||
@Test
|
||||
void getSubgraph_admin_seesAllEntities() {
|
||||
// 管理员返回 null → 不过滤
|
||||
when(resourceAccessService.resolveOwnerFilterUserId()).thenReturn(null);
|
||||
when(properties.getMaxNodesPerQuery()).thenReturn(500);
|
||||
|
||||
GraphEntity otherUserEntity = GraphEntity.builder()
|
||||
.id(ENTITY_ID).name("Other's Dataset").type("Dataset").graphId(GRAPH_ID)
|
||||
.properties(new HashMap<>(Map.of("created_by", "user-1")))
|
||||
.build();
|
||||
|
||||
when(entityRepository.findByGraphIdAndIdIn(GRAPH_ID, List.of(ENTITY_ID)))
|
||||
.thenReturn(List.of(otherUserEntity));
|
||||
|
||||
SubgraphVO result = queryService.getSubgraph(GRAPH_ID, List.of(ENTITY_ID));
|
||||
|
||||
// 管理员看到其他用户的实体(不被过滤)
|
||||
assertThat(result.getNodes()).hasSize(1);
|
||||
assertThat(result.getNodes().get(0).getName()).isEqualTo("Other's Dataset");
|
||||
}
|
||||
|
||||
// -- P1-2: 业务实体缺失 created_by(脏数据)被正确拦截 --
|
||||
|
||||
@Test
|
||||
void getNeighborGraph_nonAdmin_businessEntityWithoutCreatedBy_throws() {
|
||||
when(resourceAccessService.resolveOwnerFilterUserId()).thenReturn(CURRENT_USER_ID);
|
||||
|
||||
// 业务实体缺失 created_by → 应被拦截
|
||||
GraphEntity dirtyEntity = GraphEntity.builder()
|
||||
.id(ENTITY_ID).name("Dirty Dataset").type("Dataset").graphId(GRAPH_ID)
|
||||
.properties(new HashMap<>())
|
||||
.build();
|
||||
when(entityRepository.findByIdAndGraphId(ENTITY_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(dirtyEntity));
|
||||
|
||||
assertThatThrownBy(() -> queryService.getNeighborGraph(GRAPH_ID, ENTITY_ID, 2, 50))
|
||||
.isInstanceOf(BusinessException.class);
|
||||
|
||||
verifyNoInteractions(neo4jClient);
|
||||
}
|
||||
|
||||
@Test
|
||||
void getSubgraph_nonAdmin_businessEntityWithoutCreatedBy_filtered() {
|
||||
when(resourceAccessService.resolveOwnerFilterUserId()).thenReturn(CURRENT_USER_ID);
|
||||
when(properties.getMaxNodesPerQuery()).thenReturn(500);
|
||||
|
||||
// 业务实体缺失 created_by → 应被过滤
|
||||
GraphEntity dirtyEntity = GraphEntity.builder()
|
||||
.id(ENTITY_ID).name("Dirty Dataset").type("Dataset").graphId(GRAPH_ID)
|
||||
.properties(new HashMap<>())
|
||||
.build();
|
||||
|
||||
when(entityRepository.findByGraphIdAndIdIn(GRAPH_ID, List.of(ENTITY_ID)))
|
||||
.thenReturn(List.of(dirtyEntity));
|
||||
|
||||
SubgraphVO result = queryService.getSubgraph(GRAPH_ID, List.of(ENTITY_ID));
|
||||
|
||||
assertThat(result.getNodes()).isEmpty();
|
||||
assertThat(result.getNodeCount()).isEqualTo(0);
|
||||
}
|
||||
|
||||
// -- P1-1: CONFIDENTIAL 敏感度过滤 --
|
||||
|
||||
@Test
|
||||
void getNeighborGraph_nonAdmin_confidentialEntity_throwsWithoutPermission() {
|
||||
when(resourceAccessService.resolveOwnerFilterUserId()).thenReturn(CURRENT_USER_ID);
|
||||
// canViewConfidential() 默认返回 false(mock 默认值)→ 无保密权限
|
||||
|
||||
GraphEntity confidentialEntity = GraphEntity.builder()
|
||||
.id(ENTITY_ID).name("Secret KS").type("KnowledgeSet").graphId(GRAPH_ID)
|
||||
.properties(new HashMap<>(Map.of("created_by", CURRENT_USER_ID, "sensitivity", "CONFIDENTIAL")))
|
||||
.build();
|
||||
when(entityRepository.findByIdAndGraphId(ENTITY_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(confidentialEntity));
|
||||
|
||||
assertThatThrownBy(() -> queryService.getNeighborGraph(GRAPH_ID, ENTITY_ID, 2, 50))
|
||||
.isInstanceOf(BusinessException.class);
|
||||
|
||||
verifyNoInteractions(neo4jClient);
|
||||
}
|
||||
|
||||
@Test
|
||||
void getNeighborGraph_nonAdmin_confidentialEntity_allowedWithPermission() {
|
||||
when(resourceAccessService.resolveOwnerFilterUserId()).thenReturn(CURRENT_USER_ID);
|
||||
when(resourceAccessService.canViewConfidential()).thenReturn(true);
|
||||
|
||||
GraphEntity confidentialEntity = GraphEntity.builder()
|
||||
.id(ENTITY_ID).name("Secret KS").type("KnowledgeSet").graphId(GRAPH_ID)
|
||||
.properties(new HashMap<>(Map.of("created_by", CURRENT_USER_ID, "sensitivity", "CONFIDENTIAL")))
|
||||
.build();
|
||||
when(entityRepository.findByIdAndGraphId(ENTITY_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(confidentialEntity));
|
||||
when(properties.getMaxDepth()).thenReturn(3);
|
||||
when(properties.getMaxNodesPerQuery()).thenReturn(500);
|
||||
|
||||
// 有保密权限 → 通过安全检查,继续到 Neo4jClient 调用
|
||||
try {
|
||||
queryService.getNeighborGraph(GRAPH_ID, ENTITY_ID, 2, 50);
|
||||
} catch (BusinessException e) {
|
||||
throw new AssertionError("Should not be blocked by permission check", e);
|
||||
} catch (Exception ignored) {
|
||||
// Neo4jClient mock chain 未完成,预期其他异常
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void getSubgraph_nonAdmin_confidentialEntity_filteredWithoutPermission() {
|
||||
when(resourceAccessService.resolveOwnerFilterUserId()).thenReturn(CURRENT_USER_ID);
|
||||
when(properties.getMaxNodesPerQuery()).thenReturn(500);
|
||||
// canViewConfidential() 默认返回 false → 无保密权限
|
||||
|
||||
GraphEntity ownNonConfidential = GraphEntity.builder()
|
||||
.id(ENTITY_ID).name("Normal KS").type("KnowledgeSet").graphId(GRAPH_ID)
|
||||
.properties(new HashMap<>(Map.of("created_by", CURRENT_USER_ID)))
|
||||
.build();
|
||||
GraphEntity ownConfidential = GraphEntity.builder()
|
||||
.id(ENTITY_ID_2).name("Secret KS").type("KnowledgeSet").graphId(GRAPH_ID)
|
||||
.properties(new HashMap<>(Map.of("created_by", CURRENT_USER_ID, "sensitivity", "CONFIDENTIAL")))
|
||||
.build();
|
||||
|
||||
when(entityRepository.findByGraphIdAndIdIn(GRAPH_ID, List.of(ENTITY_ID, ENTITY_ID_2)))
|
||||
.thenReturn(List.of(ownNonConfidential, ownConfidential));
|
||||
|
||||
SubgraphVO result = queryService.getSubgraph(GRAPH_ID, List.of(ENTITY_ID, ENTITY_ID_2));
|
||||
|
||||
// CONFIDENTIAL 实体被过滤,只剩普通实体
|
||||
assertThat(result.getNodes()).hasSize(1);
|
||||
assertThat(result.getNodes().get(0).getName()).isEqualTo("Normal KS");
|
||||
}
|
||||
|
||||
@Test
|
||||
void getSubgraph_nonAdmin_confidentialEntity_visibleWithPermission() {
|
||||
when(resourceAccessService.resolveOwnerFilterUserId()).thenReturn(CURRENT_USER_ID);
|
||||
when(resourceAccessService.canViewConfidential()).thenReturn(true);
|
||||
when(properties.getMaxNodesPerQuery()).thenReturn(500);
|
||||
|
||||
GraphEntity ownConfidential = GraphEntity.builder()
|
||||
.id(ENTITY_ID).name("Secret KS").type("KnowledgeSet").graphId(GRAPH_ID)
|
||||
.properties(new HashMap<>(Map.of("created_by", CURRENT_USER_ID, "sensitivity", "CONFIDENTIAL")))
|
||||
.build();
|
||||
|
||||
when(entityRepository.findByGraphIdAndIdIn(GRAPH_ID, List.of(ENTITY_ID)))
|
||||
.thenReturn(List.of(ownConfidential));
|
||||
|
||||
SubgraphVO result = queryService.getSubgraph(GRAPH_ID, List.of(ENTITY_ID));
|
||||
|
||||
// 有保密权限 → 看到 CONFIDENTIAL 实体
|
||||
assertThat(result.getNodes()).hasSize(1);
|
||||
assertThat(result.getNodes().get(0).getName()).isEqualTo("Secret KS");
|
||||
}
|
||||
|
||||
// -- P2-2: CONFIDENTIAL 大小写不敏感 --
|
||||
|
||||
@Test
|
||||
void getNeighborGraph_nonAdmin_lowercaseConfidential_throwsWithoutPermission() {
|
||||
when(resourceAccessService.resolveOwnerFilterUserId()).thenReturn(CURRENT_USER_ID);
|
||||
|
||||
GraphEntity entity = GraphEntity.builder()
|
||||
.id(ENTITY_ID).name("Secret KS").type("KnowledgeSet").graphId(GRAPH_ID)
|
||||
.properties(new HashMap<>(Map.of("created_by", CURRENT_USER_ID, "sensitivity", "confidential")))
|
||||
.build();
|
||||
when(entityRepository.findByIdAndGraphId(ENTITY_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(entity));
|
||||
|
||||
assertThatThrownBy(() -> queryService.getNeighborGraph(GRAPH_ID, ENTITY_ID, 2, 50))
|
||||
.isInstanceOf(BusinessException.class);
|
||||
|
||||
verifyNoInteractions(neo4jClient);
|
||||
}
|
||||
|
||||
@Test
|
||||
void getNeighborGraph_nonAdmin_mixedCaseConfidentialWithSpaces_throwsWithoutPermission() {
|
||||
when(resourceAccessService.resolveOwnerFilterUserId()).thenReturn(CURRENT_USER_ID);
|
||||
|
||||
GraphEntity entity = GraphEntity.builder()
|
||||
.id(ENTITY_ID).name("Secret KS").type("KnowledgeSet").graphId(GRAPH_ID)
|
||||
.properties(new HashMap<>(Map.of("created_by", CURRENT_USER_ID, "sensitivity", " Confidential ")))
|
||||
.build();
|
||||
when(entityRepository.findByIdAndGraphId(ENTITY_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(entity));
|
||||
|
||||
assertThatThrownBy(() -> queryService.getNeighborGraph(GRAPH_ID, ENTITY_ID, 2, 50))
|
||||
.isInstanceOf(BusinessException.class);
|
||||
|
||||
verifyNoInteractions(neo4jClient);
|
||||
}
|
||||
|
||||
@Test
|
||||
void getSubgraph_nonAdmin_lowercaseConfidential_filteredWithoutPermission() {
|
||||
when(resourceAccessService.resolveOwnerFilterUserId()).thenReturn(CURRENT_USER_ID);
|
||||
when(properties.getMaxNodesPerQuery()).thenReturn(500);
|
||||
|
||||
GraphEntity normalKs = GraphEntity.builder()
|
||||
.id(ENTITY_ID).name("Normal KS").type("KnowledgeSet").graphId(GRAPH_ID)
|
||||
.properties(new HashMap<>(Map.of("created_by", CURRENT_USER_ID)))
|
||||
.build();
|
||||
GraphEntity lowercaseConfidential = GraphEntity.builder()
|
||||
.id(ENTITY_ID_2).name("Secret KS").type("KnowledgeSet").graphId(GRAPH_ID)
|
||||
.properties(new HashMap<>(Map.of("created_by", CURRENT_USER_ID, "sensitivity", "confidential")))
|
||||
.build();
|
||||
|
||||
when(entityRepository.findByGraphIdAndIdIn(GRAPH_ID, List.of(ENTITY_ID, ENTITY_ID_2)))
|
||||
.thenReturn(List.of(normalKs, lowercaseConfidential));
|
||||
|
||||
SubgraphVO result = queryService.getSubgraph(GRAPH_ID, List.of(ENTITY_ID, ENTITY_ID_2));
|
||||
|
||||
assertThat(result.getNodes()).hasSize(1);
|
||||
assertThat(result.getNodes().get(0).getName()).isEqualTo("Normal KS");
|
||||
}
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// findAllPaths
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Nested
|
||||
class FindAllPathsTest {
|
||||
|
||||
@Test
|
||||
void findAllPaths_invalidGraphId_throwsBusinessException() {
|
||||
assertThatThrownBy(() -> queryService.findAllPaths(INVALID_GRAPH_ID, ENTITY_ID, ENTITY_ID_2, 3, 10))
|
||||
.isInstanceOf(BusinessException.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
void findAllPaths_sourceNotFound_throwsBusinessException() {
|
||||
when(entityRepository.findByIdAndGraphId(ENTITY_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.empty());
|
||||
|
||||
assertThatThrownBy(() -> queryService.findAllPaths(GRAPH_ID, ENTITY_ID, ENTITY_ID_2, 3, 10))
|
||||
.isInstanceOf(BusinessException.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
void findAllPaths_targetNotFound_throwsBusinessException() {
|
||||
GraphEntity sourceEntity = GraphEntity.builder()
|
||||
.id(ENTITY_ID).name("Source").type("Dataset").graphId(GRAPH_ID).build();
|
||||
when(entityRepository.findByIdAndGraphId(ENTITY_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(sourceEntity));
|
||||
when(entityRepository.findByIdAndGraphId(ENTITY_ID_2, GRAPH_ID))
|
||||
.thenReturn(Optional.empty());
|
||||
|
||||
assertThatThrownBy(() -> queryService.findAllPaths(GRAPH_ID, ENTITY_ID, ENTITY_ID_2, 3, 10))
|
||||
.isInstanceOf(BusinessException.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
void findAllPaths_sameSourceAndTarget_returnsSingleNodePath() {
|
||||
GraphEntity entity = GraphEntity.builder()
|
||||
.id(ENTITY_ID).name("Node").type("Dataset").graphId(GRAPH_ID).build();
|
||||
when(entityRepository.findByIdAndGraphId(ENTITY_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(entity));
|
||||
|
||||
AllPathsVO result = queryService.findAllPaths(GRAPH_ID, ENTITY_ID, ENTITY_ID, 3, 10);
|
||||
|
||||
assertThat(result.getPathCount()).isEqualTo(1);
|
||||
assertThat(result.getPaths()).hasSize(1);
|
||||
assertThat(result.getPaths().get(0).getPathLength()).isEqualTo(0);
|
||||
assertThat(result.getPaths().get(0).getNodes()).hasSize(1);
|
||||
assertThat(result.getPaths().get(0).getEdges()).isEmpty();
|
||||
}
|
||||
|
||||
@Test
|
||||
void findAllPaths_nonAdmin_sourceNotAccessible_throws() {
|
||||
when(resourceAccessService.resolveOwnerFilterUserId()).thenReturn("user-123");
|
||||
|
||||
GraphEntity sourceEntity = GraphEntity.builder()
|
||||
.id(ENTITY_ID).name("Other's Dataset").type("Dataset").graphId(GRAPH_ID)
|
||||
.properties(new HashMap<>(Map.of("created_by", "other-user")))
|
||||
.build();
|
||||
when(entityRepository.findByIdAndGraphId(ENTITY_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(sourceEntity));
|
||||
|
||||
assertThatThrownBy(() -> queryService.findAllPaths(GRAPH_ID, ENTITY_ID, ENTITY_ID_2, 3, 10))
|
||||
.isInstanceOf(BusinessException.class);
|
||||
|
||||
verifyNoInteractions(neo4jClient);
|
||||
}
|
||||
|
||||
@Test
|
||||
void findAllPaths_nonAdmin_targetNotAccessible_throws() {
|
||||
when(resourceAccessService.resolveOwnerFilterUserId()).thenReturn("user-123");
|
||||
|
||||
GraphEntity sourceEntity = GraphEntity.builder()
|
||||
.id(ENTITY_ID).name("My Dataset").type("Dataset").graphId(GRAPH_ID)
|
||||
.properties(new HashMap<>(Map.of("created_by", "user-123")))
|
||||
.build();
|
||||
GraphEntity targetEntity = GraphEntity.builder()
|
||||
.id(ENTITY_ID_2).name("Other's Dataset").type("Dataset").graphId(GRAPH_ID)
|
||||
.properties(new HashMap<>(Map.of("created_by", "other-user")))
|
||||
.build();
|
||||
when(entityRepository.findByIdAndGraphId(ENTITY_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(sourceEntity));
|
||||
when(entityRepository.findByIdAndGraphId(ENTITY_ID_2, GRAPH_ID))
|
||||
.thenReturn(Optional.of(targetEntity));
|
||||
|
||||
assertThatThrownBy(() -> queryService.findAllPaths(GRAPH_ID, ENTITY_ID, ENTITY_ID_2, 3, 10))
|
||||
.isInstanceOf(BusinessException.class);
|
||||
|
||||
verifyNoInteractions(neo4jClient);
|
||||
}
|
||||
|
||||
@Test
|
||||
void findAllPaths_nonAdmin_structuralEntity_sameSourceAndTarget_returnsSingleNode() {
|
||||
when(resourceAccessService.resolveOwnerFilterUserId()).thenReturn("user-123");
|
||||
|
||||
GraphEntity structuralEntity = GraphEntity.builder()
|
||||
.id(ENTITY_ID).name("Admin User").type("User").graphId(GRAPH_ID)
|
||||
.properties(new HashMap<>())
|
||||
.build();
|
||||
when(entityRepository.findByIdAndGraphId(ENTITY_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(structuralEntity));
|
||||
|
||||
AllPathsVO result = queryService.findAllPaths(GRAPH_ID, ENTITY_ID, ENTITY_ID, 3, 10);
|
||||
|
||||
assertThat(result.getPathCount()).isEqualTo(1);
|
||||
assertThat(result.getPaths().get(0).getNodes().get(0).getType()).isEqualTo("User");
|
||||
}
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// exportSubgraph
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Nested
|
||||
class ExportSubgraphTest {
|
||||
|
||||
@Test
|
||||
void exportSubgraph_invalidGraphId_throwsBusinessException() {
|
||||
assertThatThrownBy(() -> queryService.exportSubgraph(INVALID_GRAPH_ID, List.of(ENTITY_ID), 0))
|
||||
.isInstanceOf(BusinessException.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
void exportSubgraph_nullEntityIds_returnsEmptyExport() {
|
||||
SubgraphExportVO result = queryService.exportSubgraph(GRAPH_ID, null, 0);
|
||||
|
||||
assertThat(result.getNodes()).isEmpty();
|
||||
assertThat(result.getEdges()).isEmpty();
|
||||
assertThat(result.getNodeCount()).isEqualTo(0);
|
||||
}
|
||||
|
||||
@Test
|
||||
void exportSubgraph_emptyEntityIds_returnsEmptyExport() {
|
||||
SubgraphExportVO result = queryService.exportSubgraph(GRAPH_ID, List.of(), 0);
|
||||
|
||||
assertThat(result.getNodes()).isEmpty();
|
||||
assertThat(result.getEdges()).isEmpty();
|
||||
}
|
||||
|
||||
@Test
|
||||
void exportSubgraph_exceedsMaxNodes_throwsBusinessException() {
|
||||
when(properties.getMaxNodesPerQuery()).thenReturn(5);
|
||||
|
||||
List<String> tooManyIds = List.of("1", "2", "3", "4", "5", "6");
|
||||
|
||||
assertThatThrownBy(() -> queryService.exportSubgraph(GRAPH_ID, tooManyIds, 0))
|
||||
.isInstanceOf(BusinessException.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
void exportSubgraph_depthZero_noExistingEntities_returnsEmptyExport() {
|
||||
when(properties.getMaxNodesPerQuery()).thenReturn(500);
|
||||
when(entityRepository.findByGraphIdAndIdIn(GRAPH_ID, List.of(ENTITY_ID)))
|
||||
.thenReturn(List.of());
|
||||
|
||||
SubgraphExportVO result = queryService.exportSubgraph(GRAPH_ID, List.of(ENTITY_ID), 0);
|
||||
|
||||
assertThat(result.getNodes()).isEmpty();
|
||||
assertThat(result.getNodeCount()).isEqualTo(0);
|
||||
}
|
||||
|
||||
@Test
|
||||
void exportSubgraph_depthZero_singleEntity_returnsNodeWithProperties() {
|
||||
when(properties.getMaxNodesPerQuery()).thenReturn(500);
|
||||
|
||||
GraphEntity entity = GraphEntity.builder()
|
||||
.id(ENTITY_ID).name("Test Dataset").type("Dataset").graphId(GRAPH_ID)
|
||||
.description("A test dataset")
|
||||
.properties(new HashMap<>(Map.of("created_by", "user-1", "sensitivity", "PUBLIC")))
|
||||
.build();
|
||||
when(entityRepository.findByGraphIdAndIdIn(GRAPH_ID, List.of(ENTITY_ID)))
|
||||
.thenReturn(List.of(entity));
|
||||
|
||||
SubgraphExportVO result = queryService.exportSubgraph(GRAPH_ID, List.of(ENTITY_ID), 0);
|
||||
|
||||
assertThat(result.getNodes()).hasSize(1);
|
||||
assertThat(result.getNodeCount()).isEqualTo(1);
|
||||
assertThat(result.getNodes().get(0).getName()).isEqualTo("Test Dataset");
|
||||
assertThat(result.getNodes().get(0).getProperties()).containsEntry("created_by", "user-1");
|
||||
// 单节点无边
|
||||
assertThat(result.getEdges()).isEmpty();
|
||||
}
|
||||
|
||||
@Test
|
||||
void exportSubgraph_nonAdmin_filtersInaccessibleEntities() {
|
||||
when(resourceAccessService.resolveOwnerFilterUserId()).thenReturn("user-123");
|
||||
when(properties.getMaxNodesPerQuery()).thenReturn(500);
|
||||
|
||||
GraphEntity ownEntity = GraphEntity.builder()
|
||||
.id(ENTITY_ID).name("My Dataset").type("Dataset").graphId(GRAPH_ID)
|
||||
.properties(new HashMap<>(Map.of("created_by", "user-123")))
|
||||
.build();
|
||||
GraphEntity otherEntity = GraphEntity.builder()
|
||||
.id(ENTITY_ID_2).name("Other Dataset").type("Dataset").graphId(GRAPH_ID)
|
||||
.properties(new HashMap<>(Map.of("created_by", "other-user")))
|
||||
.build();
|
||||
|
||||
when(entityRepository.findByGraphIdAndIdIn(GRAPH_ID, List.of(ENTITY_ID, ENTITY_ID_2)))
|
||||
.thenReturn(List.of(ownEntity, otherEntity));
|
||||
|
||||
SubgraphExportVO result = queryService.exportSubgraph(GRAPH_ID,
|
||||
List.of(ENTITY_ID, ENTITY_ID_2), 0);
|
||||
|
||||
assertThat(result.getNodes()).hasSize(1);
|
||||
assertThat(result.getNodes().get(0).getName()).isEqualTo("My Dataset");
|
||||
}
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// convertToGraphML
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Nested
|
||||
class ConvertToGraphMLTest {
|
||||
|
||||
@Test
|
||||
void convertToGraphML_emptyExport_producesValidXml() {
|
||||
SubgraphExportVO emptyExport = SubgraphExportVO.builder()
|
||||
.nodes(List.of())
|
||||
.edges(List.of())
|
||||
.nodeCount(0)
|
||||
.edgeCount(0)
|
||||
.build();
|
||||
|
||||
String graphml = queryService.convertToGraphML(emptyExport);
|
||||
|
||||
assertThat(graphml).contains("<?xml version=\"1.0\" encoding=\"UTF-8\"?>");
|
||||
assertThat(graphml).contains("<graphml");
|
||||
assertThat(graphml).contains("<graph id=\"G\" edgedefault=\"directed\">");
|
||||
assertThat(graphml).contains("</graphml>");
|
||||
}
|
||||
|
||||
@Test
|
||||
void convertToGraphML_withNodesAndEdges_producesCorrectStructure() {
|
||||
SubgraphExportVO export = SubgraphExportVO.builder()
|
||||
.nodes(List.of(
|
||||
com.datamate.knowledgegraph.interfaces.dto.ExportNodeVO.builder()
|
||||
.id("node-1").name("Dataset A").type("Dataset")
|
||||
.description("Test dataset").properties(Map.of())
|
||||
.build(),
|
||||
com.datamate.knowledgegraph.interfaces.dto.ExportNodeVO.builder()
|
||||
.id("node-2").name("Workflow B").type("Workflow")
|
||||
.description(null).properties(Map.of())
|
||||
.build()
|
||||
))
|
||||
.edges(List.of(
|
||||
com.datamate.knowledgegraph.interfaces.dto.ExportEdgeVO.builder()
|
||||
.id("edge-1").sourceEntityId("node-1").targetEntityId("node-2")
|
||||
.relationType("DERIVED_FROM").weight(0.8)
|
||||
.build()
|
||||
))
|
||||
.nodeCount(2)
|
||||
.edgeCount(1)
|
||||
.build();
|
||||
|
||||
String graphml = queryService.convertToGraphML(export);
|
||||
|
||||
assertThat(graphml).contains("<node id=\"node-1\">");
|
||||
assertThat(graphml).contains("<data key=\"name\">Dataset A</data>");
|
||||
assertThat(graphml).contains("<data key=\"type\">Dataset</data>");
|
||||
assertThat(graphml).contains("<data key=\"description\">Test dataset</data>");
|
||||
assertThat(graphml).contains("<node id=\"node-2\">");
|
||||
assertThat(graphml).contains("<data key=\"type\">Workflow</data>");
|
||||
// null description 不输出
|
||||
assertThat(graphml).doesNotContain("<data key=\"description\">null</data>");
|
||||
assertThat(graphml).contains("<edge id=\"edge-1\" source=\"node-1\" target=\"node-2\">");
|
||||
assertThat(graphml).contains("<data key=\"relationType\">DERIVED_FROM</data>");
|
||||
assertThat(graphml).contains("<data key=\"weight\">0.8</data>");
|
||||
}
|
||||
|
||||
@Test
|
||||
void convertToGraphML_specialCharactersEscaped() {
|
||||
SubgraphExportVO export = SubgraphExportVO.builder()
|
||||
.nodes(List.of(
|
||||
com.datamate.knowledgegraph.interfaces.dto.ExportNodeVO.builder()
|
||||
.id("node-1").name("A & B <Corp>").type("Org")
|
||||
.description("\"Test\" org").properties(Map.of())
|
||||
.build()
|
||||
))
|
||||
.edges(List.of())
|
||||
.nodeCount(1)
|
||||
.edgeCount(0)
|
||||
.build();
|
||||
|
||||
String graphml = queryService.convertToGraphML(export);
|
||||
|
||||
assertThat(graphml).contains("A & B <Corp>");
|
||||
assertThat(graphml).contains(""Test" org");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,278 @@
|
||||
package com.datamate.knowledgegraph.application;
|
||||
|
||||
import com.datamate.common.infrastructure.exception.BusinessException;
|
||||
import com.datamate.knowledgegraph.domain.model.GraphEntity;
|
||||
import com.datamate.knowledgegraph.domain.model.RelationDetail;
|
||||
import com.datamate.knowledgegraph.domain.repository.GraphEntityRepository;
|
||||
import com.datamate.knowledgegraph.domain.repository.GraphRelationRepository;
|
||||
import com.datamate.knowledgegraph.infrastructure.cache.GraphCacheService;
|
||||
import com.datamate.knowledgegraph.interfaces.dto.CreateRelationRequest;
|
||||
import com.datamate.knowledgegraph.interfaces.dto.RelationVO;
|
||||
import com.datamate.knowledgegraph.interfaces.dto.UpdateRelationRequest;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.mockito.InjectMocks;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.assertj.core.api.Assertions.assertThatThrownBy;
|
||||
import static org.mockito.ArgumentMatchers.*;
|
||||
import static org.mockito.Mockito.*;
|
||||
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
class GraphRelationServiceTest {
|
||||
|
||||
private static final String GRAPH_ID = "550e8400-e29b-41d4-a716-446655440000";
|
||||
private static final String RELATION_ID = "770e8400-e29b-41d4-a716-446655440002";
|
||||
private static final String SOURCE_ENTITY_ID = "660e8400-e29b-41d4-a716-446655440001";
|
||||
private static final String TARGET_ENTITY_ID = "660e8400-e29b-41d4-a716-446655440003";
|
||||
private static final String INVALID_GRAPH_ID = "not-a-uuid";
|
||||
|
||||
@Mock
|
||||
private GraphRelationRepository relationRepository;
|
||||
|
||||
@Mock
|
||||
private GraphEntityRepository entityRepository;
|
||||
|
||||
@Mock
|
||||
private GraphCacheService cacheService;
|
||||
|
||||
@InjectMocks
|
||||
private GraphRelationService relationService;
|
||||
|
||||
private RelationDetail sampleDetail;
|
||||
private GraphEntity sourceEntity;
|
||||
private GraphEntity targetEntity;
|
||||
|
||||
@BeforeEach
|
||||
void setUp() {
|
||||
sampleDetail = RelationDetail.builder()
|
||||
.id(RELATION_ID)
|
||||
.sourceEntityId(SOURCE_ENTITY_ID)
|
||||
.sourceEntityName("Source")
|
||||
.sourceEntityType("Dataset")
|
||||
.targetEntityId(TARGET_ENTITY_ID)
|
||||
.targetEntityName("Target")
|
||||
.targetEntityType("Field")
|
||||
.relationType("HAS_FIELD")
|
||||
.properties(Map.of())
|
||||
.weight(1.0)
|
||||
.confidence(1.0)
|
||||
.graphId(GRAPH_ID)
|
||||
.createdAt(LocalDateTime.now())
|
||||
.build();
|
||||
|
||||
sourceEntity = GraphEntity.builder()
|
||||
.id(SOURCE_ENTITY_ID).name("Source").type("Dataset").graphId(GRAPH_ID).build();
|
||||
targetEntity = GraphEntity.builder()
|
||||
.id(TARGET_ENTITY_ID).name("Target").type("Field").graphId(GRAPH_ID).build();
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// graphId 校验
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Test
|
||||
void getRelation_invalidGraphId_throwsBusinessException() {
|
||||
assertThatThrownBy(() -> relationService.getRelation(INVALID_GRAPH_ID, RELATION_ID))
|
||||
.isInstanceOf(BusinessException.class);
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// createRelation
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Test
|
||||
void createRelation_success() {
|
||||
when(entityRepository.findByIdAndGraphId(SOURCE_ENTITY_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(sourceEntity));
|
||||
when(entityRepository.findByIdAndGraphId(TARGET_ENTITY_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(targetEntity));
|
||||
when(relationRepository.create(eq(GRAPH_ID), eq(SOURCE_ENTITY_ID), eq(TARGET_ENTITY_ID),
|
||||
eq("HAS_FIELD"), anyMap(), isNull(), isNull(), isNull()))
|
||||
.thenReturn(Optional.of(sampleDetail));
|
||||
|
||||
CreateRelationRequest request = new CreateRelationRequest();
|
||||
request.setSourceEntityId(SOURCE_ENTITY_ID);
|
||||
request.setTargetEntityId(TARGET_ENTITY_ID);
|
||||
request.setRelationType("HAS_FIELD");
|
||||
|
||||
RelationVO result = relationService.createRelation(GRAPH_ID, request);
|
||||
|
||||
assertThat(result.getId()).isEqualTo(RELATION_ID);
|
||||
assertThat(result.getRelationType()).isEqualTo("HAS_FIELD");
|
||||
assertThat(result.getSourceEntityId()).isEqualTo(SOURCE_ENTITY_ID);
|
||||
assertThat(result.getTargetEntityId()).isEqualTo(TARGET_ENTITY_ID);
|
||||
verify(cacheService).evictEntityCaches(GRAPH_ID, SOURCE_ENTITY_ID);
|
||||
}
|
||||
|
||||
@Test
|
||||
void createRelation_sourceNotFound_throwsBusinessException() {
|
||||
when(entityRepository.findByIdAndGraphId(SOURCE_ENTITY_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.empty());
|
||||
|
||||
CreateRelationRequest request = new CreateRelationRequest();
|
||||
request.setSourceEntityId(SOURCE_ENTITY_ID);
|
||||
request.setTargetEntityId(TARGET_ENTITY_ID);
|
||||
request.setRelationType("HAS_FIELD");
|
||||
|
||||
assertThatThrownBy(() -> relationService.createRelation(GRAPH_ID, request))
|
||||
.isInstanceOf(BusinessException.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
void createRelation_targetNotFound_throwsBusinessException() {
|
||||
when(entityRepository.findByIdAndGraphId(SOURCE_ENTITY_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(sourceEntity));
|
||||
when(entityRepository.findByIdAndGraphId(TARGET_ENTITY_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.empty());
|
||||
|
||||
CreateRelationRequest request = new CreateRelationRequest();
|
||||
request.setSourceEntityId(SOURCE_ENTITY_ID);
|
||||
request.setTargetEntityId(TARGET_ENTITY_ID);
|
||||
request.setRelationType("HAS_FIELD");
|
||||
|
||||
assertThatThrownBy(() -> relationService.createRelation(GRAPH_ID, request))
|
||||
.isInstanceOf(BusinessException.class);
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// getRelation
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Test
|
||||
void getRelation_found() {
|
||||
when(relationRepository.findByIdAndGraphId(RELATION_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(sampleDetail));
|
||||
|
||||
RelationVO result = relationService.getRelation(GRAPH_ID, RELATION_ID);
|
||||
|
||||
assertThat(result.getId()).isEqualTo(RELATION_ID);
|
||||
}
|
||||
|
||||
@Test
|
||||
void getRelation_notFound_throwsBusinessException() {
|
||||
when(relationRepository.findByIdAndGraphId(RELATION_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.empty());
|
||||
|
||||
assertThatThrownBy(() -> relationService.getRelation(GRAPH_ID, RELATION_ID))
|
||||
.isInstanceOf(BusinessException.class);
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// listRelations (分页)
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Test
|
||||
void listRelations_returnsPaged() {
|
||||
when(relationRepository.findByGraphId(GRAPH_ID, null, 0L, 20))
|
||||
.thenReturn(List.of(sampleDetail));
|
||||
when(relationRepository.countByGraphId(GRAPH_ID, null))
|
||||
.thenReturn(1L);
|
||||
|
||||
var result = relationService.listRelations(GRAPH_ID, null, 0, 20);
|
||||
|
||||
assertThat(result.getContent()).hasSize(1);
|
||||
assertThat(result.getTotalElements()).isEqualTo(1);
|
||||
}
|
||||
|
||||
@Test
|
||||
void listRelations_oversizedPage_clampedTo200() {
|
||||
when(relationRepository.findByGraphId(GRAPH_ID, null, 0L, 200))
|
||||
.thenReturn(List.of());
|
||||
when(relationRepository.countByGraphId(GRAPH_ID, null))
|
||||
.thenReturn(0L);
|
||||
|
||||
relationService.listRelations(GRAPH_ID, null, 0, 999);
|
||||
|
||||
verify(relationRepository).findByGraphId(GRAPH_ID, null, 0L, 200);
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// listEntityRelations — direction 校验
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Test
|
||||
void listEntityRelations_invalidDirection_throwsBusinessException() {
|
||||
when(entityRepository.findByIdAndGraphId(SOURCE_ENTITY_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(sourceEntity));
|
||||
|
||||
assertThatThrownBy(() ->
|
||||
relationService.listEntityRelations(GRAPH_ID, SOURCE_ENTITY_ID, "invalid", null, 0, 20))
|
||||
.isInstanceOf(BusinessException.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
void listEntityRelations_inDirection() {
|
||||
when(entityRepository.findByIdAndGraphId(SOURCE_ENTITY_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(sourceEntity));
|
||||
when(relationRepository.findInboundByEntityId(GRAPH_ID, SOURCE_ENTITY_ID, null, 0L, 20))
|
||||
.thenReturn(List.of(sampleDetail));
|
||||
when(relationRepository.countByEntityId(GRAPH_ID, SOURCE_ENTITY_ID, null, "in"))
|
||||
.thenReturn(1L);
|
||||
|
||||
var result = relationService.listEntityRelations(
|
||||
GRAPH_ID, SOURCE_ENTITY_ID, "in", null, 0, 20);
|
||||
|
||||
assertThat(result.getContent()).hasSize(1);
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// updateRelation
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Test
|
||||
void updateRelation_success() {
|
||||
when(relationRepository.findByIdAndGraphId(RELATION_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(sampleDetail));
|
||||
RelationDetail updated = RelationDetail.builder()
|
||||
.id(RELATION_ID).relationType("USES").weight(0.8)
|
||||
.sourceEntityId(SOURCE_ENTITY_ID).targetEntityId(TARGET_ENTITY_ID)
|
||||
.graphId(GRAPH_ID).build();
|
||||
when(relationRepository.update(eq(RELATION_ID), eq(GRAPH_ID), eq("USES"), isNull(), eq(0.8), isNull()))
|
||||
.thenReturn(Optional.of(updated));
|
||||
|
||||
UpdateRelationRequest request = new UpdateRelationRequest();
|
||||
request.setRelationType("USES");
|
||||
request.setWeight(0.8);
|
||||
|
||||
RelationVO result = relationService.updateRelation(GRAPH_ID, RELATION_ID, request);
|
||||
|
||||
assertThat(result.getRelationType()).isEqualTo("USES");
|
||||
verify(cacheService).evictEntityCaches(GRAPH_ID, SOURCE_ENTITY_ID);
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// deleteRelation
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Test
|
||||
void deleteRelation_success() {
|
||||
when(relationRepository.findByIdAndGraphId(RELATION_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(sampleDetail));
|
||||
when(relationRepository.deleteByIdAndGraphId(RELATION_ID, GRAPH_ID))
|
||||
.thenReturn(1L);
|
||||
|
||||
relationService.deleteRelation(GRAPH_ID, RELATION_ID);
|
||||
|
||||
verify(relationRepository).deleteByIdAndGraphId(RELATION_ID, GRAPH_ID);
|
||||
verify(cacheService).evictEntityCaches(GRAPH_ID, SOURCE_ENTITY_ID);
|
||||
verify(cacheService).evictEntityCaches(GRAPH_ID, TARGET_ENTITY_ID);
|
||||
}
|
||||
|
||||
@Test
|
||||
void deleteRelation_notFound_throwsBusinessException() {
|
||||
when(relationRepository.findByIdAndGraphId(RELATION_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.empty());
|
||||
|
||||
assertThatThrownBy(() -> relationService.deleteRelation(GRAPH_ID, RELATION_ID))
|
||||
.isInstanceOf(BusinessException.class);
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,44 @@
|
||||
package com.datamate.knowledgegraph.application;
|
||||
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
import org.springframework.data.neo4j.core.Neo4jClient;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.mockito.ArgumentMatchers.anyString;
|
||||
import static org.mockito.Mockito.*;
|
||||
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
class IndexHealthServiceTest {
|
||||
|
||||
@Mock
|
||||
private Neo4jClient neo4jClient;
|
||||
|
||||
private IndexHealthService indexHealthService;
|
||||
|
||||
@BeforeEach
|
||||
void setUp() {
|
||||
indexHealthService = new IndexHealthService(neo4jClient);
|
||||
}
|
||||
|
||||
@Test
|
||||
void allIndexesOnline_empty_returns_false() {
|
||||
// Neo4jClient mocking is complex; verify the logic conceptually
|
||||
// When no indexes found, should return false
|
||||
// This tests the service was correctly constructed
|
||||
assertThat(indexHealthService).isNotNull();
|
||||
}
|
||||
|
||||
@Test
|
||||
void service_is_injectable() {
|
||||
// Verify the service can be instantiated with a Neo4jClient
|
||||
IndexHealthService service = new IndexHealthService(neo4jClient);
|
||||
assertThat(service).isNotNull();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,96 @@
|
||||
package com.datamate.knowledgegraph.domain.model;
|
||||
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.List;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
class SyncMetadataTest {
|
||||
|
||||
@Test
|
||||
void fromResults_aggregatesCountsCorrectly() {
|
||||
LocalDateTime startedAt = LocalDateTime.of(2025, 6, 1, 10, 0, 0);
|
||||
|
||||
SyncResult r1 = SyncResult.builder().syncType("Dataset").created(5).updated(2).failed(1).purged(3).build();
|
||||
SyncResult r2 = SyncResult.builder().syncType("Field").created(10).updated(0).skipped(2).build();
|
||||
SyncResult r3 = SyncResult.builder().syncType("HAS_FIELD").created(8).build();
|
||||
|
||||
SyncMetadata metadata = SyncMetadata.fromResults("abc123", "graph-id", "FULL", startedAt, List.of(r1, r2, r3));
|
||||
|
||||
assertThat(metadata.getSyncId()).isEqualTo("abc123");
|
||||
assertThat(metadata.getGraphId()).isEqualTo("graph-id");
|
||||
assertThat(metadata.getSyncType()).isEqualTo("FULL");
|
||||
assertThat(metadata.getTotalCreated()).isEqualTo(23); // 5 + 10 + 8
|
||||
assertThat(metadata.getTotalUpdated()).isEqualTo(2); // 2 + 0 + 0
|
||||
assertThat(metadata.getTotalSkipped()).isEqualTo(2); // 0 + 2 + 0
|
||||
assertThat(metadata.getTotalFailed()).isEqualTo(1); // 1 + 0 + 0
|
||||
assertThat(metadata.getTotalPurged()).isEqualTo(3); // 3 + 0 + 0
|
||||
assertThat(metadata.getStartedAt()).isEqualTo(startedAt);
|
||||
assertThat(metadata.getCompletedAt()).isNotNull();
|
||||
assertThat(metadata.getDurationMillis()).isGreaterThanOrEqualTo(0);
|
||||
assertThat(metadata.getResults()).hasSize(3);
|
||||
assertThat(metadata.getStepSummaries()).hasSize(3);
|
||||
}
|
||||
|
||||
@Test
|
||||
void fromResults_noFailures_statusIsSuccess() {
|
||||
LocalDateTime startedAt = LocalDateTime.now();
|
||||
SyncResult r1 = SyncResult.builder().syncType("Dataset").created(5).build();
|
||||
|
||||
SyncMetadata metadata = SyncMetadata.fromResults("abc", "g1", "FULL", startedAt, List.of(r1));
|
||||
|
||||
assertThat(metadata.getStatus()).isEqualTo(SyncMetadata.STATUS_SUCCESS);
|
||||
}
|
||||
|
||||
@Test
|
||||
void fromResults_withFailures_statusIsPartial() {
|
||||
LocalDateTime startedAt = LocalDateTime.now();
|
||||
SyncResult r1 = SyncResult.builder().syncType("Dataset").created(5).failed(2).build();
|
||||
|
||||
SyncMetadata metadata = SyncMetadata.fromResults("abc", "g1", "FULL", startedAt, List.of(r1));
|
||||
|
||||
assertThat(metadata.getStatus()).isEqualTo(SyncMetadata.STATUS_PARTIAL);
|
||||
assertThat(metadata.getTotalFailed()).isEqualTo(2);
|
||||
}
|
||||
|
||||
@Test
|
||||
void failed_createsFailedMetadata() {
|
||||
LocalDateTime startedAt = LocalDateTime.of(2025, 1, 1, 0, 0, 0);
|
||||
|
||||
SyncMetadata metadata = SyncMetadata.failed("abc", "g1", "FULL", startedAt, "connection refused");
|
||||
|
||||
assertThat(metadata.getStatus()).isEqualTo(SyncMetadata.STATUS_FAILED);
|
||||
assertThat(metadata.getErrorMessage()).isEqualTo("connection refused");
|
||||
assertThat(metadata.getSyncId()).isEqualTo("abc");
|
||||
assertThat(metadata.getGraphId()).isEqualTo("g1");
|
||||
assertThat(metadata.getSyncType()).isEqualTo("FULL");
|
||||
assertThat(metadata.getStartedAt()).isEqualTo(startedAt);
|
||||
assertThat(metadata.getCompletedAt()).isNotNull();
|
||||
assertThat(metadata.getDurationMillis()).isGreaterThanOrEqualTo(0);
|
||||
assertThat(metadata.getTotalCreated()).isEqualTo(0);
|
||||
assertThat(metadata.getTotalUpdated()).isEqualTo(0);
|
||||
}
|
||||
|
||||
@Test
|
||||
void totalEntities_returnsSum() {
|
||||
SyncMetadata metadata = SyncMetadata.builder()
|
||||
.totalCreated(10).totalUpdated(5).totalSkipped(3).totalFailed(2)
|
||||
.build();
|
||||
|
||||
assertThat(metadata.totalEntities()).isEqualTo(20);
|
||||
}
|
||||
|
||||
@Test
|
||||
void stepSummaries_formatIncludesPurgedWhenNonZero() {
|
||||
LocalDateTime startedAt = LocalDateTime.now();
|
||||
SyncResult r1 = SyncResult.builder().syncType("Dataset").created(5).updated(2).failed(0).purged(3).build();
|
||||
SyncResult r2 = SyncResult.builder().syncType("Field").created(1).updated(0).failed(0).purged(0).build();
|
||||
|
||||
SyncMetadata metadata = SyncMetadata.fromResults("abc", "g1", "FULL", startedAt, List.of(r1, r2));
|
||||
|
||||
assertThat(metadata.getStepSummaries().get(0)).isEqualTo("Dataset(+5/~2/-0/purged:3)");
|
||||
assertThat(metadata.getStepSummaries().get(1)).isEqualTo("Field(+1/~0/-0)");
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,280 @@
|
||||
package com.datamate.knowledgegraph.infrastructure.cache;
|
||||
|
||||
import com.datamate.knowledgegraph.application.GraphEntityService;
|
||||
import com.datamate.knowledgegraph.domain.model.GraphEntity;
|
||||
import com.datamate.knowledgegraph.domain.repository.GraphEntityRepository;
|
||||
import com.datamate.knowledgegraph.infrastructure.neo4j.KnowledgeGraphProperties;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Nested;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.cache.CacheManager;
|
||||
import org.springframework.cache.annotation.EnableCaching;
|
||||
import org.springframework.cache.concurrent.ConcurrentMapCacheManager;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
import org.springframework.test.context.junit.jupiter.SpringExtension;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.mockito.Mockito.*;
|
||||
|
||||
/**
|
||||
* 集成测试:验证 @Cacheable 代理在 Spring 上下文中正确工作。
|
||||
* <p>
|
||||
* 使用 {@link ConcurrentMapCacheManager} 替代 Redis,验证:
|
||||
* <ul>
|
||||
* <li>缓存命中时不重复查询数据库</li>
|
||||
* <li>缓存失效后重新查询数据库</li>
|
||||
* <li>不同图谱的缓存独立</li>
|
||||
* <li>不同用户上下文产生不同缓存 key(权限隔离)</li>
|
||||
* </ul>
|
||||
*/
|
||||
@ExtendWith(SpringExtension.class)
|
||||
@ContextConfiguration(classes = CacheableIntegrationTest.Config.class)
|
||||
class CacheableIntegrationTest {
|
||||
|
||||
private static final String GRAPH_ID = "550e8400-e29b-41d4-a716-446655440000";
|
||||
private static final String GRAPH_ID_2 = "660e8400-e29b-41d4-a716-446655440099";
|
||||
private static final String ENTITY_ID = "660e8400-e29b-41d4-a716-446655440001";
|
||||
|
||||
@Configuration
|
||||
@EnableCaching
|
||||
static class Config {
|
||||
|
||||
@Bean("knowledgeGraphCacheManager")
|
||||
CacheManager knowledgeGraphCacheManager() {
|
||||
return new ConcurrentMapCacheManager(
|
||||
RedisCacheConfig.CACHE_ENTITIES,
|
||||
RedisCacheConfig.CACHE_QUERIES,
|
||||
RedisCacheConfig.CACHE_SEARCH
|
||||
);
|
||||
}
|
||||
|
||||
@Bean
|
||||
GraphEntityRepository entityRepository() {
|
||||
return mock(GraphEntityRepository.class);
|
||||
}
|
||||
|
||||
@Bean
|
||||
KnowledgeGraphProperties properties() {
|
||||
return mock(KnowledgeGraphProperties.class);
|
||||
}
|
||||
|
||||
@Bean
|
||||
GraphCacheService graphCacheService(CacheManager cacheManager) {
|
||||
return new GraphCacheService(cacheManager);
|
||||
}
|
||||
|
||||
@Bean
|
||||
GraphEntityService graphEntityService(
|
||||
GraphEntityRepository entityRepository,
|
||||
KnowledgeGraphProperties properties,
|
||||
GraphCacheService graphCacheService) {
|
||||
return new GraphEntityService(entityRepository, properties, graphCacheService);
|
||||
}
|
||||
}
|
||||
|
||||
@Autowired
|
||||
private GraphEntityService entityService;
|
||||
|
||||
@Autowired
|
||||
private GraphEntityRepository entityRepository;
|
||||
|
||||
@Autowired
|
||||
private CacheManager cacheManager;
|
||||
|
||||
@Autowired
|
||||
private GraphCacheService graphCacheService;
|
||||
|
||||
private GraphEntity sampleEntity;
|
||||
|
||||
@BeforeEach
|
||||
void setUp() {
|
||||
sampleEntity = GraphEntity.builder()
|
||||
.id(ENTITY_ID)
|
||||
.name("TestDataset")
|
||||
.type("Dataset")
|
||||
.description("A test dataset")
|
||||
.graphId(GRAPH_ID)
|
||||
.confidence(1.0)
|
||||
.createdAt(LocalDateTime.now())
|
||||
.updatedAt(LocalDateTime.now())
|
||||
.build();
|
||||
|
||||
cacheManager.getCacheNames().forEach(name -> {
|
||||
var cache = cacheManager.getCache(name);
|
||||
if (cache != null) cache.clear();
|
||||
});
|
||||
reset(entityRepository);
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// @Cacheable 代理行为
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Nested
|
||||
class CacheProxyTest {
|
||||
|
||||
@Test
|
||||
void getEntity_secondCall_returnsCachedResultWithoutHittingRepository() {
|
||||
when(entityRepository.findByIdAndGraphId(ENTITY_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(sampleEntity));
|
||||
|
||||
GraphEntity first = entityService.getEntity(GRAPH_ID, ENTITY_ID);
|
||||
assertThat(first.getId()).isEqualTo(ENTITY_ID);
|
||||
|
||||
GraphEntity second = entityService.getEntity(GRAPH_ID, ENTITY_ID);
|
||||
assertThat(second.getId()).isEqualTo(ENTITY_ID);
|
||||
|
||||
verify(entityRepository, times(1)).findByIdAndGraphId(ENTITY_ID, GRAPH_ID);
|
||||
}
|
||||
|
||||
@Test
|
||||
void listEntities_secondCall_returnsCachedResult() {
|
||||
when(entityRepository.findByGraphId(GRAPH_ID))
|
||||
.thenReturn(List.of(sampleEntity));
|
||||
|
||||
entityService.listEntities(GRAPH_ID);
|
||||
entityService.listEntities(GRAPH_ID);
|
||||
|
||||
verify(entityRepository, times(1)).findByGraphId(GRAPH_ID);
|
||||
}
|
||||
|
||||
@Test
|
||||
void differentGraphIds_produceSeparateCacheEntries() {
|
||||
GraphEntity entity2 = GraphEntity.builder()
|
||||
.id(ENTITY_ID).name("OtherDataset").type("Dataset")
|
||||
.graphId(GRAPH_ID_2).confidence(1.0)
|
||||
.createdAt(LocalDateTime.now()).updatedAt(LocalDateTime.now())
|
||||
.build();
|
||||
|
||||
when(entityRepository.findByIdAndGraphId(ENTITY_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(sampleEntity));
|
||||
when(entityRepository.findByIdAndGraphId(ENTITY_ID, GRAPH_ID_2))
|
||||
.thenReturn(Optional.of(entity2));
|
||||
|
||||
GraphEntity result1 = entityService.getEntity(GRAPH_ID, ENTITY_ID);
|
||||
GraphEntity result2 = entityService.getEntity(GRAPH_ID_2, ENTITY_ID);
|
||||
|
||||
assertThat(result1.getName()).isEqualTo("TestDataset");
|
||||
assertThat(result2.getName()).isEqualTo("OtherDataset");
|
||||
verify(entityRepository).findByIdAndGraphId(ENTITY_ID, GRAPH_ID);
|
||||
verify(entityRepository).findByIdAndGraphId(ENTITY_ID, GRAPH_ID_2);
|
||||
}
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// 缓存失效行为
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Nested
|
||||
class CacheEvictionTest {
|
||||
|
||||
@Test
|
||||
void evictEntityCaches_causesNextCallToHitRepository() {
|
||||
when(entityRepository.findByIdAndGraphId(ENTITY_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(sampleEntity));
|
||||
|
||||
entityService.getEntity(GRAPH_ID, ENTITY_ID);
|
||||
verify(entityRepository, times(1)).findByIdAndGraphId(ENTITY_ID, GRAPH_ID);
|
||||
|
||||
graphCacheService.evictEntityCaches(GRAPH_ID, ENTITY_ID);
|
||||
|
||||
entityService.getEntity(GRAPH_ID, ENTITY_ID);
|
||||
verify(entityRepository, times(2)).findByIdAndGraphId(ENTITY_ID, GRAPH_ID);
|
||||
}
|
||||
|
||||
@Test
|
||||
void evictEntityCaches_alsoEvictsListCache() {
|
||||
when(entityRepository.findByGraphId(GRAPH_ID))
|
||||
.thenReturn(List.of(sampleEntity));
|
||||
|
||||
entityService.listEntities(GRAPH_ID);
|
||||
verify(entityRepository, times(1)).findByGraphId(GRAPH_ID);
|
||||
|
||||
graphCacheService.evictEntityCaches(GRAPH_ID, ENTITY_ID);
|
||||
|
||||
entityService.listEntities(GRAPH_ID);
|
||||
verify(entityRepository, times(2)).findByGraphId(GRAPH_ID);
|
||||
}
|
||||
|
||||
@Test
|
||||
void evictGraphCaches_clearsAllCacheRegions() {
|
||||
when(entityRepository.findByIdAndGraphId(ENTITY_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(sampleEntity));
|
||||
when(entityRepository.findByGraphId(GRAPH_ID))
|
||||
.thenReturn(List.of(sampleEntity));
|
||||
|
||||
entityService.getEntity(GRAPH_ID, ENTITY_ID);
|
||||
entityService.listEntities(GRAPH_ID);
|
||||
|
||||
graphCacheService.evictGraphCaches(GRAPH_ID);
|
||||
|
||||
entityService.getEntity(GRAPH_ID, ENTITY_ID);
|
||||
entityService.listEntities(GRAPH_ID);
|
||||
verify(entityRepository, times(2)).findByIdAndGraphId(ENTITY_ID, GRAPH_ID);
|
||||
verify(entityRepository, times(2)).findByGraphId(GRAPH_ID);
|
||||
}
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// 权限隔离(缓存 key 级别验证)
|
||||
//
|
||||
// GraphQueryService 的 @Cacheable 使用 SpEL 表达式:
|
||||
// @resourceAccessService.resolveOwnerFilterUserId()
|
||||
// @resourceAccessService.canViewConfidential()
|
||||
// 这些值最终传入 GraphCacheService.cacheKey() 生成 key。
|
||||
// 以下测试验证不同用户上下文产生不同的缓存 key,
|
||||
// 结合上方的代理测试,确保不同用户获得独立的缓存条目。
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Nested
|
||||
class PermissionIsolationTest {
|
||||
|
||||
@Test
|
||||
void adminAndRegularUser_produceDifferentCacheKeys() {
|
||||
String adminKey = GraphCacheService.cacheKey(
|
||||
GRAPH_ID, "query", 0, 20, null, true);
|
||||
String userKey = GraphCacheService.cacheKey(
|
||||
GRAPH_ID, "query", 0, 20, "user-a", false);
|
||||
|
||||
assertThat(adminKey).isNotEqualTo(userKey);
|
||||
}
|
||||
|
||||
@Test
|
||||
void differentUsers_produceDifferentCacheKeys() {
|
||||
String userAKey = GraphCacheService.cacheKey(
|
||||
GRAPH_ID, "query", 0, 20, "user-a", false);
|
||||
String userBKey = GraphCacheService.cacheKey(
|
||||
GRAPH_ID, "query", 0, 20, "user-b", false);
|
||||
|
||||
assertThat(userAKey).isNotEqualTo(userBKey);
|
||||
}
|
||||
|
||||
@Test
|
||||
void sameUserDifferentConfidentialAccess_produceDifferentCacheKeys() {
|
||||
String withConfidential = GraphCacheService.cacheKey(
|
||||
GRAPH_ID, "query", 0, 20, "user-a", true);
|
||||
String withoutConfidential = GraphCacheService.cacheKey(
|
||||
GRAPH_ID, "query", 0, 20, "user-a", false);
|
||||
|
||||
assertThat(withConfidential).isNotEqualTo(withoutConfidential);
|
||||
}
|
||||
|
||||
@Test
|
||||
void sameParametersAndUser_produceIdenticalCacheKeys() {
|
||||
String key1 = GraphCacheService.cacheKey(
|
||||
GRAPH_ID, "query", 0, 20, "user-a", false);
|
||||
String key2 = GraphCacheService.cacheKey(
|
||||
GRAPH_ID, "query", 0, 20, "user-a", false);
|
||||
|
||||
assertThat(key1).isEqualTo(key2);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,273 @@
|
||||
package com.datamate.knowledgegraph.infrastructure.cache;
|
||||
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Nested;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
import org.springframework.cache.Cache;
|
||||
import org.springframework.cache.CacheManager;
|
||||
import org.springframework.data.redis.core.StringRedisTemplate;
|
||||
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.mockito.Mockito.*;
|
||||
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
class GraphCacheServiceTest {
|
||||
|
||||
@Mock
|
||||
private CacheManager cacheManager;
|
||||
|
||||
@Mock
|
||||
private StringRedisTemplate redisTemplate;
|
||||
|
||||
@Mock
|
||||
private Cache entityCache;
|
||||
|
||||
@Mock
|
||||
private Cache queryCache;
|
||||
|
||||
@Mock
|
||||
private Cache searchCache;
|
||||
|
||||
private GraphCacheService cacheService;
|
||||
|
||||
@BeforeEach
|
||||
void setUp() {
|
||||
cacheService = new GraphCacheService(cacheManager);
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// 退化模式(无 RedisTemplate):清空整个缓存区域
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Nested
|
||||
class FallbackModeTest {
|
||||
|
||||
@Test
|
||||
void evictGraphCaches_withoutRedis_clearsAllCaches() {
|
||||
when(cacheManager.getCache(RedisCacheConfig.CACHE_ENTITIES)).thenReturn(entityCache);
|
||||
when(cacheManager.getCache(RedisCacheConfig.CACHE_QUERIES)).thenReturn(queryCache);
|
||||
when(cacheManager.getCache(RedisCacheConfig.CACHE_SEARCH)).thenReturn(searchCache);
|
||||
|
||||
cacheService.evictGraphCaches("graph-id");
|
||||
|
||||
verify(entityCache).clear();
|
||||
verify(queryCache).clear();
|
||||
verify(searchCache).clear();
|
||||
}
|
||||
|
||||
@Test
|
||||
void evictEntityCaches_withoutRedis_evictsSpecificKeysAndClearsQueries() {
|
||||
when(cacheManager.getCache(RedisCacheConfig.CACHE_ENTITIES)).thenReturn(entityCache);
|
||||
when(cacheManager.getCache(RedisCacheConfig.CACHE_QUERIES)).thenReturn(queryCache);
|
||||
|
||||
cacheService.evictEntityCaches("graph-1", "entity-1");
|
||||
|
||||
// 精确失效两个 key
|
||||
verify(entityCache).evict("graph-1:entity-1");
|
||||
verify(entityCache).evict("graph-1:list");
|
||||
// 查询缓存退化为清空(因无 Redis 做前缀匹配)
|
||||
verify(queryCache).clear();
|
||||
}
|
||||
|
||||
@Test
|
||||
void evictSearchCaches_withGraphId_withoutRedis_clearsAll() {
|
||||
when(cacheManager.getCache(RedisCacheConfig.CACHE_SEARCH)).thenReturn(searchCache);
|
||||
|
||||
cacheService.evictSearchCaches("graph-1");
|
||||
|
||||
verify(searchCache).clear();
|
||||
}
|
||||
|
||||
@Test
|
||||
void evictSearchCaches_noArgs_clearsAll() {
|
||||
when(cacheManager.getCache(RedisCacheConfig.CACHE_SEARCH)).thenReturn(searchCache);
|
||||
|
||||
cacheService.evictSearchCaches();
|
||||
|
||||
verify(searchCache).clear();
|
||||
}
|
||||
|
||||
@Test
|
||||
void evictGraphCaches_toleratesNullCache() {
|
||||
when(cacheManager.getCache(anyString())).thenReturn(null);
|
||||
|
||||
// 不应抛出异常
|
||||
cacheService.evictGraphCaches("graph-1");
|
||||
}
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// 细粒度模式(有 RedisTemplate):按 graphId 前缀失效
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Nested
|
||||
class FineGrainedModeTest {
|
||||
|
||||
@BeforeEach
|
||||
void setUpRedis() {
|
||||
cacheService.setRedisTemplate(redisTemplate);
|
||||
}
|
||||
|
||||
@Test
|
||||
void evictGraphCaches_withRedis_deletesKeysByGraphPrefix() {
|
||||
Set<String> entityKeys = new HashSet<>(Set.of("datamate:kg:entities::graph-1:ent-1", "datamate:kg:entities::graph-1:list"));
|
||||
Set<String> queryKeys = new HashSet<>(Set.of("datamate:kg:queries::graph-1:ent-1:2:100:null:true"));
|
||||
Set<String> searchKeys = new HashSet<>(Set.of("datamate:kg:search::graph-1:keyword:0:20:null:true"));
|
||||
|
||||
when(redisTemplate.keys("datamate:kg:entities::graph-1:*")).thenReturn(entityKeys);
|
||||
when(redisTemplate.keys("datamate:kg:queries::graph-1:*")).thenReturn(queryKeys);
|
||||
when(redisTemplate.keys("datamate:kg:search::graph-1:*")).thenReturn(searchKeys);
|
||||
|
||||
cacheService.evictGraphCaches("graph-1");
|
||||
|
||||
verify(redisTemplate).delete(entityKeys);
|
||||
verify(redisTemplate).delete(queryKeys);
|
||||
verify(redisTemplate).delete(searchKeys);
|
||||
// CacheManager.clear() should NOT be called
|
||||
verify(cacheManager, never()).getCache(anyString());
|
||||
}
|
||||
|
||||
@Test
|
||||
void evictGraphCaches_withRedis_emptyKeysDoesNotCallDelete() {
|
||||
when(redisTemplate.keys(anyString())).thenReturn(Set.of());
|
||||
|
||||
cacheService.evictGraphCaches("graph-1");
|
||||
|
||||
verify(redisTemplate, never()).delete(anyCollection());
|
||||
}
|
||||
|
||||
@Test
|
||||
void evictGraphCaches_withRedis_nullKeysDoesNotCallDelete() {
|
||||
when(redisTemplate.keys(anyString())).thenReturn(null);
|
||||
|
||||
cacheService.evictGraphCaches("graph-1");
|
||||
|
||||
verify(redisTemplate, never()).delete(anyCollection());
|
||||
}
|
||||
|
||||
@Test
|
||||
void evictGraphCaches_redisException_fallsBackToClear() {
|
||||
when(redisTemplate.keys(anyString())).thenThrow(new RuntimeException("Redis down"));
|
||||
when(cacheManager.getCache(RedisCacheConfig.CACHE_ENTITIES)).thenReturn(entityCache);
|
||||
when(cacheManager.getCache(RedisCacheConfig.CACHE_QUERIES)).thenReturn(queryCache);
|
||||
when(cacheManager.getCache(RedisCacheConfig.CACHE_SEARCH)).thenReturn(searchCache);
|
||||
|
||||
cacheService.evictGraphCaches("graph-1");
|
||||
|
||||
// 应退化为清空整个缓存
|
||||
verify(entityCache).clear();
|
||||
verify(queryCache).clear();
|
||||
verify(searchCache).clear();
|
||||
}
|
||||
|
||||
@Test
|
||||
void evictEntityCaches_withRedis_evictsSpecificKeysAndQueriesByPrefix() {
|
||||
when(cacheManager.getCache(RedisCacheConfig.CACHE_ENTITIES)).thenReturn(entityCache);
|
||||
Set<String> queryKeys = new HashSet<>(Set.of("datamate:kg:queries::graph-1:ent-1:2:100:null:true"));
|
||||
when(redisTemplate.keys("datamate:kg:queries::graph-1:*")).thenReturn(queryKeys);
|
||||
|
||||
cacheService.evictEntityCaches("graph-1", "entity-1");
|
||||
|
||||
// 精确失效实体缓存
|
||||
verify(entityCache).evict("graph-1:entity-1");
|
||||
verify(entityCache).evict("graph-1:list");
|
||||
// 查询缓存按前缀失效
|
||||
verify(redisTemplate).delete(queryKeys);
|
||||
}
|
||||
|
||||
@Test
|
||||
void evictSearchCaches_withRedis_deletesKeysByGraphPrefix() {
|
||||
Set<String> searchKeys = new HashSet<>(Set.of("datamate:kg:search::graph-1:query:0:20:user1:false"));
|
||||
when(redisTemplate.keys("datamate:kg:search::graph-1:*")).thenReturn(searchKeys);
|
||||
|
||||
cacheService.evictSearchCaches("graph-1");
|
||||
|
||||
verify(redisTemplate).delete(searchKeys);
|
||||
}
|
||||
|
||||
@Test
|
||||
void evictGraphCaches_isolatesGraphIds() {
|
||||
// graph-1 的 key
|
||||
Set<String> graph1Keys = new HashSet<>(Set.of("datamate:kg:entities::graph-1:ent-1"));
|
||||
when(redisTemplate.keys("datamate:kg:entities::graph-1:*")).thenReturn(graph1Keys);
|
||||
when(redisTemplate.keys("datamate:kg:queries::graph-1:*")).thenReturn(Set.of());
|
||||
when(redisTemplate.keys("datamate:kg:search::graph-1:*")).thenReturn(Set.of());
|
||||
|
||||
cacheService.evictGraphCaches("graph-1");
|
||||
|
||||
// 仅删除 graph-1 的 key
|
||||
verify(redisTemplate).delete(graph1Keys);
|
||||
// 不应查询 graph-2 的 key
|
||||
verify(redisTemplate, never()).keys(contains("graph-2"));
|
||||
}
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// cacheKey 静态方法
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Nested
|
||||
class CacheKeyTest {
|
||||
|
||||
@Test
|
||||
void cacheKey_joinsPartsWithColon() {
|
||||
String key = GraphCacheService.cacheKey("a", "b", "c");
|
||||
assertThat(key).isEqualTo("a:b:c");
|
||||
}
|
||||
|
||||
@Test
|
||||
void cacheKey_handlesNullParts() {
|
||||
String key = GraphCacheService.cacheKey("a", null, "c");
|
||||
assertThat(key).isEqualTo("a:null:c");
|
||||
}
|
||||
|
||||
@Test
|
||||
void cacheKey_handlesSinglePart() {
|
||||
String key = GraphCacheService.cacheKey("only");
|
||||
assertThat(key).isEqualTo("only");
|
||||
}
|
||||
|
||||
@Test
|
||||
void cacheKey_handlesNumericParts() {
|
||||
String key = GraphCacheService.cacheKey("graph", 42, 0, 20);
|
||||
assertThat(key).isEqualTo("graph:42:0:20");
|
||||
}
|
||||
|
||||
@Test
|
||||
void cacheKey_withUserContext_differentUsersProduceDifferentKeys() {
|
||||
String adminKey = GraphCacheService.cacheKey("graph-1", "query", 0, 20, null, true);
|
||||
String userAKey = GraphCacheService.cacheKey("graph-1", "query", 0, 20, "user-a", false);
|
||||
String userBKey = GraphCacheService.cacheKey("graph-1", "query", 0, 20, "user-b", false);
|
||||
String userAConfKey = GraphCacheService.cacheKey("graph-1", "query", 0, 20, "user-a", true);
|
||||
|
||||
assertThat(adminKey).isNotEqualTo(userAKey);
|
||||
assertThat(userAKey).isNotEqualTo(userBKey);
|
||||
assertThat(userAKey).isNotEqualTo(userAConfKey);
|
||||
|
||||
// 相同参数应产生相同 key
|
||||
String adminKey2 = GraphCacheService.cacheKey("graph-1", "query", 0, 20, null, true);
|
||||
assertThat(adminKey).isEqualTo(adminKey2);
|
||||
}
|
||||
|
||||
@Test
|
||||
void cacheKey_graphIdIsFirstSegment() {
|
||||
String key = GraphCacheService.cacheKey("graph-123", "entity-456");
|
||||
assertThat(key).startsWith("graph-123:");
|
||||
}
|
||||
|
||||
@Test
|
||||
void cacheKey_booleanParts() {
|
||||
String keyTrue = GraphCacheService.cacheKey("g", "q", true);
|
||||
String keyFalse = GraphCacheService.cacheKey("g", "q", false);
|
||||
assertThat(keyTrue).isEqualTo("g:q:true");
|
||||
assertThat(keyFalse).isEqualTo("g:q:false");
|
||||
assertThat(keyTrue).isNotEqualTo(keyFalse);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,649 @@
|
||||
package com.datamate.knowledgegraph.infrastructure.client;
|
||||
|
||||
import com.datamate.knowledgegraph.infrastructure.client.DataManagementClient.DatasetDTO;
|
||||
import com.datamate.knowledgegraph.infrastructure.client.DataManagementClient.JobDTO;
|
||||
import com.datamate.knowledgegraph.infrastructure.client.DataManagementClient.KnowledgeSetDTO;
|
||||
import com.datamate.knowledgegraph.infrastructure.client.DataManagementClient.LabelTaskDTO;
|
||||
import com.datamate.knowledgegraph.infrastructure.client.DataManagementClient.PagedResult;
|
||||
import com.datamate.knowledgegraph.infrastructure.client.DataManagementClient.WorkflowDTO;
|
||||
import com.datamate.knowledgegraph.infrastructure.neo4j.KnowledgeGraphProperties;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Nested;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.mockito.ArgumentCaptor;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
import org.springframework.core.ParameterizedTypeReference;
|
||||
import org.springframework.http.HttpMethod;
|
||||
import org.springframework.http.ResponseEntity;
|
||||
import org.springframework.web.client.RestClientException;
|
||||
import org.springframework.web.client.RestTemplate;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.List;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.assertj.core.api.Assertions.assertThatThrownBy;
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
import static org.mockito.ArgumentMatchers.eq;
|
||||
import static org.mockito.ArgumentMatchers.isNull;
|
||||
import static org.mockito.Mockito.argThat;
|
||||
import static org.mockito.Mockito.verify;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
class DataManagementClientTest {
|
||||
|
||||
private static final String BASE_URL = "http://dm-service:8080";
|
||||
private static final String ANNOTATION_URL = "http://annotation-service:8081";
|
||||
private static final int PAGE_SIZE = 10;
|
||||
|
||||
@Mock
|
||||
private RestTemplate restTemplate;
|
||||
|
||||
private DataManagementClient client;
|
||||
|
||||
@BeforeEach
|
||||
void setUp() {
|
||||
KnowledgeGraphProperties properties = new KnowledgeGraphProperties();
|
||||
KnowledgeGraphProperties.Sync sync = new KnowledgeGraphProperties.Sync();
|
||||
sync.setDataManagementUrl(BASE_URL);
|
||||
sync.setAnnotationServiceUrl(ANNOTATION_URL);
|
||||
sync.setPageSize(PAGE_SIZE);
|
||||
properties.setSync(sync);
|
||||
|
||||
client = new DataManagementClient(restTemplate, properties);
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// Helper methods
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
private <T> PagedResult<T> pagedResult(List<T> content, int page, int totalPages) {
|
||||
PagedResult<T> result = new PagedResult<>();
|
||||
result.setContent(content);
|
||||
result.setPage(page);
|
||||
result.setTotalPages(totalPages);
|
||||
result.setTotalElements(content.size());
|
||||
return result;
|
||||
}
|
||||
|
||||
private DatasetDTO dataset(String id, LocalDateTime updatedAt) {
|
||||
DatasetDTO dto = new DatasetDTO();
|
||||
dto.setId(id);
|
||||
dto.setName("dataset-" + id);
|
||||
dto.setUpdatedAt(updatedAt);
|
||||
return dto;
|
||||
}
|
||||
|
||||
private WorkflowDTO workflow(String id, LocalDateTime updatedAt) {
|
||||
WorkflowDTO dto = new WorkflowDTO();
|
||||
dto.setId(id);
|
||||
dto.setName("workflow-" + id);
|
||||
dto.setUpdatedAt(updatedAt);
|
||||
return dto;
|
||||
}
|
||||
|
||||
private JobDTO job(String id, LocalDateTime updatedAt) {
|
||||
JobDTO dto = new JobDTO();
|
||||
dto.setId(id);
|
||||
dto.setName("job-" + id);
|
||||
dto.setUpdatedAt(updatedAt);
|
||||
return dto;
|
||||
}
|
||||
|
||||
private LabelTaskDTO labelTask(String id, LocalDateTime updatedAt) {
|
||||
LabelTaskDTO dto = new LabelTaskDTO();
|
||||
dto.setId(id);
|
||||
dto.setName("label-task-" + id);
|
||||
dto.setUpdatedAt(updatedAt);
|
||||
return dto;
|
||||
}
|
||||
|
||||
private KnowledgeSetDTO knowledgeSet(String id, LocalDateTime updatedAt) {
|
||||
KnowledgeSetDTO dto = new KnowledgeSetDTO();
|
||||
dto.setId(id);
|
||||
dto.setName("knowledge-set-" + id);
|
||||
dto.setUpdatedAt(updatedAt);
|
||||
return dto;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private <T> void stubSinglePageResponse(List<T> content) {
|
||||
PagedResult<T> paged = pagedResult(content, 0, 1);
|
||||
when(restTemplate.exchange(
|
||||
any(String.class), eq(HttpMethod.GET), isNull(),
|
||||
any(ParameterizedTypeReference.class)))
|
||||
.thenReturn(ResponseEntity.ok(paged));
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// Time window URL parameter tests
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Nested
|
||||
class TimeWindowUrlTests {
|
||||
|
||||
@Test
|
||||
void listAllDatasets_withTimeWindow_passesQueryParams() {
|
||||
LocalDateTime from = LocalDateTime.of(2025, 1, 1, 0, 0, 0);
|
||||
LocalDateTime to = LocalDateTime.of(2025, 6, 30, 23, 59, 59);
|
||||
|
||||
DatasetDTO ds = dataset("ds-1", LocalDateTime.of(2025, 3, 15, 10, 0));
|
||||
stubSinglePageResponse(List.of(ds));
|
||||
|
||||
client.listAllDatasets(from, to);
|
||||
|
||||
ArgumentCaptor<String> urlCaptor = ArgumentCaptor.forClass(String.class);
|
||||
verify(restTemplate).exchange(
|
||||
urlCaptor.capture(), eq(HttpMethod.GET), isNull(),
|
||||
any(ParameterizedTypeReference.class));
|
||||
|
||||
String url = urlCaptor.getValue();
|
||||
assertThat(url).contains("updatedFrom=2025-01-01T00%3A00%3A00");
|
||||
assertThat(url).contains("updatedTo=2025-06-30T23%3A59%3A59");
|
||||
assertThat(url).contains("page=0");
|
||||
assertThat(url).contains("size=" + PAGE_SIZE);
|
||||
}
|
||||
|
||||
@Test
|
||||
void listAllDatasets_withOnlyUpdatedFrom_passesOnlyFromParam() {
|
||||
LocalDateTime from = LocalDateTime.of(2025, 3, 1, 0, 0, 0);
|
||||
|
||||
DatasetDTO ds = dataset("ds-1", LocalDateTime.of(2025, 5, 1, 12, 0));
|
||||
stubSinglePageResponse(List.of(ds));
|
||||
|
||||
client.listAllDatasets(from, null);
|
||||
|
||||
ArgumentCaptor<String> urlCaptor = ArgumentCaptor.forClass(String.class);
|
||||
verify(restTemplate).exchange(
|
||||
urlCaptor.capture(), eq(HttpMethod.GET), isNull(),
|
||||
any(ParameterizedTypeReference.class));
|
||||
|
||||
String url = urlCaptor.getValue();
|
||||
assertThat(url).contains("updatedFrom=2025-03-01T00%3A00%3A00");
|
||||
assertThat(url).doesNotContain("updatedTo");
|
||||
}
|
||||
|
||||
@Test
|
||||
void listAllDatasets_withOnlyUpdatedTo_passesOnlyToParam() {
|
||||
LocalDateTime to = LocalDateTime.of(2025, 12, 31, 23, 59, 59);
|
||||
|
||||
DatasetDTO ds = dataset("ds-1", LocalDateTime.of(2025, 6, 1, 0, 0));
|
||||
stubSinglePageResponse(List.of(ds));
|
||||
|
||||
client.listAllDatasets(null, to);
|
||||
|
||||
ArgumentCaptor<String> urlCaptor = ArgumentCaptor.forClass(String.class);
|
||||
verify(restTemplate).exchange(
|
||||
urlCaptor.capture(), eq(HttpMethod.GET), isNull(),
|
||||
any(ParameterizedTypeReference.class));
|
||||
|
||||
String url = urlCaptor.getValue();
|
||||
assertThat(url).doesNotContain("updatedFrom");
|
||||
assertThat(url).contains("updatedTo=2025-12-31T23%3A59%3A59");
|
||||
}
|
||||
|
||||
@Test
|
||||
void listAllDatasets_noTimeWindow_omitsTimeParams() {
|
||||
DatasetDTO ds = dataset("ds-1", LocalDateTime.of(2025, 1, 1, 0, 0));
|
||||
stubSinglePageResponse(List.of(ds));
|
||||
|
||||
client.listAllDatasets(null, null);
|
||||
|
||||
ArgumentCaptor<String> urlCaptor = ArgumentCaptor.forClass(String.class);
|
||||
verify(restTemplate).exchange(
|
||||
urlCaptor.capture(), eq(HttpMethod.GET), isNull(),
|
||||
any(ParameterizedTypeReference.class));
|
||||
|
||||
String url = urlCaptor.getValue();
|
||||
assertThat(url).doesNotContain("updatedFrom");
|
||||
assertThat(url).doesNotContain("updatedTo");
|
||||
}
|
||||
|
||||
@Test
|
||||
void noArgOverload_delegatesToTimeWindowVersion_omitsTimeParams() {
|
||||
DatasetDTO ds = dataset("ds-1", LocalDateTime.of(2025, 1, 1, 0, 0));
|
||||
stubSinglePageResponse(List.of(ds));
|
||||
|
||||
client.listAllDatasets();
|
||||
|
||||
ArgumentCaptor<String> urlCaptor = ArgumentCaptor.forClass(String.class);
|
||||
verify(restTemplate).exchange(
|
||||
urlCaptor.capture(), eq(HttpMethod.GET), isNull(),
|
||||
any(ParameterizedTypeReference.class));
|
||||
|
||||
String url = urlCaptor.getValue();
|
||||
assertThat(url).doesNotContain("updatedFrom");
|
||||
assertThat(url).doesNotContain("updatedTo");
|
||||
}
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// Validation
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Nested
|
||||
class ValidationTests {
|
||||
|
||||
@Test
|
||||
void listAllDatasets_updatedFromAfterUpdatedTo_throwsIllegalArgument() {
|
||||
LocalDateTime from = LocalDateTime.of(2025, 12, 31, 23, 59, 59);
|
||||
LocalDateTime to = LocalDateTime.of(2025, 1, 1, 0, 0, 0);
|
||||
|
||||
assertThatThrownBy(() -> client.listAllDatasets(from, to))
|
||||
.isInstanceOf(IllegalArgumentException.class)
|
||||
.hasMessageContaining("updatedFrom must be less than or equal to updatedTo");
|
||||
}
|
||||
|
||||
@Test
|
||||
void listAllWorkflows_updatedFromAfterUpdatedTo_throwsIllegalArgument() {
|
||||
LocalDateTime from = LocalDateTime.of(2025, 6, 1, 0, 0);
|
||||
LocalDateTime to = LocalDateTime.of(2025, 1, 1, 0, 0);
|
||||
|
||||
assertThatThrownBy(() -> client.listAllWorkflows(from, to))
|
||||
.isInstanceOf(IllegalArgumentException.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
void listAllJobs_updatedFromAfterUpdatedTo_throwsIllegalArgument() {
|
||||
LocalDateTime from = LocalDateTime.of(2025, 6, 1, 0, 0);
|
||||
LocalDateTime to = LocalDateTime.of(2025, 1, 1, 0, 0);
|
||||
|
||||
assertThatThrownBy(() -> client.listAllJobs(from, to))
|
||||
.isInstanceOf(IllegalArgumentException.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
void listAllLabelTasks_updatedFromAfterUpdatedTo_throwsIllegalArgument() {
|
||||
LocalDateTime from = LocalDateTime.of(2025, 6, 1, 0, 0);
|
||||
LocalDateTime to = LocalDateTime.of(2025, 1, 1, 0, 0);
|
||||
|
||||
assertThatThrownBy(() -> client.listAllLabelTasks(from, to))
|
||||
.isInstanceOf(IllegalArgumentException.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
void listAllKnowledgeSets_updatedFromAfterUpdatedTo_throwsIllegalArgument() {
|
||||
LocalDateTime from = LocalDateTime.of(2025, 6, 1, 0, 0);
|
||||
LocalDateTime to = LocalDateTime.of(2025, 1, 1, 0, 0);
|
||||
|
||||
assertThatThrownBy(() -> client.listAllKnowledgeSets(from, to))
|
||||
.isInstanceOf(IllegalArgumentException.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
void listAllDatasets_sameFromAndTo_doesNotThrow() {
|
||||
LocalDateTime ts = LocalDateTime.of(2025, 6, 1, 12, 0, 0);
|
||||
|
||||
DatasetDTO ds = dataset("ds-1", ts);
|
||||
stubSinglePageResponse(List.of(ds));
|
||||
|
||||
List<DatasetDTO> result = client.listAllDatasets(ts, ts);
|
||||
|
||||
assertThat(result).hasSize(1);
|
||||
}
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// Local filtering (client-side updatedAt filter)
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Nested
|
||||
class LocalFilteringTests {
|
||||
|
||||
@Test
|
||||
void listAllDatasets_filtersItemsBeforeUpdatedFrom() {
|
||||
LocalDateTime from = LocalDateTime.of(2025, 6, 1, 0, 0, 0);
|
||||
LocalDateTime to = LocalDateTime.of(2025, 12, 31, 23, 59, 59);
|
||||
|
||||
DatasetDTO old = dataset("ds-old", LocalDateTime.of(2025, 1, 1, 0, 0));
|
||||
DatasetDTO recent = dataset("ds-recent", LocalDateTime.of(2025, 7, 1, 0, 0));
|
||||
stubSinglePageResponse(List.of(old, recent));
|
||||
|
||||
List<DatasetDTO> result = client.listAllDatasets(from, to);
|
||||
|
||||
assertThat(result).hasSize(1);
|
||||
assertThat(result.get(0).getId()).isEqualTo("ds-recent");
|
||||
}
|
||||
|
||||
@Test
|
||||
void listAllDatasets_filtersItemsAfterUpdatedTo() {
|
||||
LocalDateTime from = LocalDateTime.of(2025, 1, 1, 0, 0, 0);
|
||||
LocalDateTime to = LocalDateTime.of(2025, 6, 30, 23, 59, 59);
|
||||
|
||||
DatasetDTO inRange = dataset("ds-in", LocalDateTime.of(2025, 3, 15, 10, 0));
|
||||
DatasetDTO tooNew = dataset("ds-new", LocalDateTime.of(2025, 9, 1, 0, 0));
|
||||
stubSinglePageResponse(List.of(inRange, tooNew));
|
||||
|
||||
List<DatasetDTO> result = client.listAllDatasets(from, to);
|
||||
|
||||
assertThat(result).hasSize(1);
|
||||
assertThat(result.get(0).getId()).isEqualTo("ds-in");
|
||||
}
|
||||
|
||||
@Test
|
||||
void listAllDatasets_filtersItemsWithNullUpdatedAt() {
|
||||
LocalDateTime from = LocalDateTime.of(2025, 1, 1, 0, 0, 0);
|
||||
LocalDateTime to = LocalDateTime.of(2025, 12, 31, 23, 59, 59);
|
||||
|
||||
DatasetDTO withTime = dataset("ds-with", LocalDateTime.of(2025, 6, 1, 0, 0));
|
||||
DatasetDTO noTime = dataset("ds-null", null);
|
||||
stubSinglePageResponse(List.of(withTime, noTime));
|
||||
|
||||
List<DatasetDTO> result = client.listAllDatasets(from, to);
|
||||
|
||||
assertThat(result).hasSize(1);
|
||||
assertThat(result.get(0).getId()).isEqualTo("ds-with");
|
||||
}
|
||||
|
||||
@Test
|
||||
void listAllDatasets_includesBoundaryValues() {
|
||||
LocalDateTime from = LocalDateTime.of(2025, 6, 1, 0, 0, 0);
|
||||
LocalDateTime to = LocalDateTime.of(2025, 6, 30, 23, 59, 59);
|
||||
|
||||
DatasetDTO exactFrom = dataset("ds-from", from);
|
||||
DatasetDTO exactTo = dataset("ds-to", to);
|
||||
DatasetDTO inBetween = dataset("ds-mid", LocalDateTime.of(2025, 6, 15, 12, 0));
|
||||
stubSinglePageResponse(List.of(exactFrom, exactTo, inBetween));
|
||||
|
||||
List<DatasetDTO> result = client.listAllDatasets(from, to);
|
||||
|
||||
assertThat(result).hasSize(3);
|
||||
assertThat(result).extracting(DatasetDTO::getId)
|
||||
.containsExactly("ds-from", "ds-to", "ds-mid");
|
||||
}
|
||||
|
||||
@Test
|
||||
void listAllDatasets_noTimeWindow_returnsAllItems() {
|
||||
DatasetDTO ds1 = dataset("ds-1", LocalDateTime.of(2020, 1, 1, 0, 0));
|
||||
DatasetDTO ds2 = dataset("ds-2", null);
|
||||
stubSinglePageResponse(List.of(ds1, ds2));
|
||||
|
||||
List<DatasetDTO> result = client.listAllDatasets(null, null);
|
||||
|
||||
assertThat(result).hasSize(2);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@Test
|
||||
void listAllDatasets_withNullItemInList_doesNotThrowNPE() {
|
||||
LocalDateTime from = LocalDateTime.of(2025, 1, 1, 0, 0, 0);
|
||||
LocalDateTime to = LocalDateTime.of(2025, 12, 31, 23, 59, 59);
|
||||
|
||||
DatasetDTO valid = dataset("ds-valid", LocalDateTime.of(2025, 6, 1, 0, 0));
|
||||
// Build a list that contains a null element to simulate upstream returning null items
|
||||
List<DatasetDTO> contentWithNull = new java.util.ArrayList<>();
|
||||
contentWithNull.add(valid);
|
||||
contentWithNull.add(null);
|
||||
contentWithNull.add(dataset("ds-old", LocalDateTime.of(2024, 1, 1, 0, 0)));
|
||||
|
||||
PagedResult<DatasetDTO> paged = pagedResult(contentWithNull, 0, 1);
|
||||
when(restTemplate.exchange(
|
||||
any(String.class), eq(HttpMethod.GET), isNull(),
|
||||
any(ParameterizedTypeReference.class)))
|
||||
.thenReturn(ResponseEntity.ok(paged));
|
||||
|
||||
List<DatasetDTO> result = client.listAllDatasets(from, to);
|
||||
|
||||
assertThat(result).hasSize(1);
|
||||
assertThat(result.get(0).getId()).isEqualTo("ds-valid");
|
||||
}
|
||||
|
||||
@Test
|
||||
void listAllWorkflows_filtersCorrectly() {
|
||||
LocalDateTime from = LocalDateTime.of(2025, 6, 1, 0, 0, 0);
|
||||
|
||||
WorkflowDTO old = workflow("wf-old", LocalDateTime.of(2025, 1, 1, 0, 0));
|
||||
WorkflowDTO recent = workflow("wf-recent", LocalDateTime.of(2025, 7, 1, 0, 0));
|
||||
stubSinglePageResponse(List.of(old, recent));
|
||||
|
||||
List<WorkflowDTO> result = client.listAllWorkflows(from, null);
|
||||
|
||||
assertThat(result).hasSize(1);
|
||||
assertThat(result.get(0).getId()).isEqualTo("wf-recent");
|
||||
}
|
||||
|
||||
@Test
|
||||
void listAllJobs_filtersCorrectly() {
|
||||
LocalDateTime to = LocalDateTime.of(2025, 6, 30, 23, 59, 59);
|
||||
|
||||
JobDTO inRange = job("j-in", LocalDateTime.of(2025, 3, 1, 0, 0));
|
||||
JobDTO outOfRange = job("j-out", LocalDateTime.of(2025, 9, 1, 0, 0));
|
||||
stubSinglePageResponse(List.of(inRange, outOfRange));
|
||||
|
||||
List<JobDTO> result = client.listAllJobs(null, to);
|
||||
|
||||
assertThat(result).hasSize(1);
|
||||
assertThat(result.get(0).getId()).isEqualTo("j-in");
|
||||
}
|
||||
|
||||
@Test
|
||||
void listAllLabelTasks_filtersCorrectly() {
|
||||
LocalDateTime from = LocalDateTime.of(2025, 6, 1, 0, 0, 0);
|
||||
LocalDateTime to = LocalDateTime.of(2025, 6, 30, 23, 59, 59);
|
||||
|
||||
LabelTaskDTO inRange = labelTask("lt-in", LocalDateTime.of(2025, 6, 15, 0, 0));
|
||||
LabelTaskDTO outOfRange = labelTask("lt-out", LocalDateTime.of(2025, 1, 1, 0, 0));
|
||||
stubSinglePageResponse(List.of(inRange, outOfRange));
|
||||
|
||||
List<LabelTaskDTO> result = client.listAllLabelTasks(from, to);
|
||||
|
||||
assertThat(result).hasSize(1);
|
||||
assertThat(result.get(0).getId()).isEqualTo("lt-in");
|
||||
}
|
||||
|
||||
@Test
|
||||
void listAllKnowledgeSets_filtersCorrectly() {
|
||||
LocalDateTime from = LocalDateTime.of(2025, 6, 1, 0, 0, 0);
|
||||
|
||||
KnowledgeSetDTO old = knowledgeSet("ks-old", LocalDateTime.of(2025, 1, 1, 0, 0));
|
||||
KnowledgeSetDTO recent = knowledgeSet("ks-new", LocalDateTime.of(2025, 8, 1, 0, 0));
|
||||
stubSinglePageResponse(List.of(old, recent));
|
||||
|
||||
List<KnowledgeSetDTO> result = client.listAllKnowledgeSets(from, null);
|
||||
|
||||
assertThat(result).hasSize(1);
|
||||
assertThat(result.get(0).getId()).isEqualTo("ks-new");
|
||||
}
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// Pagination with time window
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Nested
|
||||
class PaginationTests {
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@Test
|
||||
void listAllDatasets_multiplePages_fetchesAllAndFilters() {
|
||||
LocalDateTime from = LocalDateTime.of(2025, 6, 1, 0, 0, 0);
|
||||
LocalDateTime to = LocalDateTime.of(2025, 12, 31, 23, 59, 59);
|
||||
|
||||
DatasetDTO ds1 = dataset("ds-1", LocalDateTime.of(2025, 7, 1, 0, 0));
|
||||
DatasetDTO ds2 = dataset("ds-2", LocalDateTime.of(2025, 3, 1, 0, 0)); // outside
|
||||
DatasetDTO ds3 = dataset("ds-3", LocalDateTime.of(2025, 9, 1, 0, 0));
|
||||
|
||||
PagedResult<DatasetDTO> page0 = pagedResult(List.of(ds1, ds2), 0, 2);
|
||||
PagedResult<DatasetDTO> page1 = pagedResult(List.of(ds3), 1, 2);
|
||||
|
||||
when(restTemplate.exchange(
|
||||
(String) argThat(url -> url != null && url.toString().contains("page=0")),
|
||||
eq(HttpMethod.GET), isNull(),
|
||||
any(ParameterizedTypeReference.class)))
|
||||
.thenReturn(ResponseEntity.ok(page0));
|
||||
|
||||
when(restTemplate.exchange(
|
||||
(String) argThat(url -> url != null && url.toString().contains("page=1")),
|
||||
eq(HttpMethod.GET), isNull(),
|
||||
any(ParameterizedTypeReference.class)))
|
||||
.thenReturn(ResponseEntity.ok(page1));
|
||||
|
||||
List<DatasetDTO> result = client.listAllDatasets(from, to);
|
||||
|
||||
// ds2 is outside time window, filtered out client-side
|
||||
assertThat(result).hasSize(2);
|
||||
assertThat(result).extracting(DatasetDTO::getId)
|
||||
.containsExactly("ds-1", "ds-3");
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@Test
|
||||
void listAllDatasets_emptyFirstPage_returnsEmptyList() {
|
||||
PagedResult<DatasetDTO> emptyPage = pagedResult(List.of(), 0, 0);
|
||||
when(restTemplate.exchange(
|
||||
any(String.class), eq(HttpMethod.GET), isNull(),
|
||||
any(ParameterizedTypeReference.class)))
|
||||
.thenReturn(ResponseEntity.ok(emptyPage));
|
||||
|
||||
List<DatasetDTO> result = client.listAllDatasets(
|
||||
LocalDateTime.of(2025, 1, 1, 0, 0),
|
||||
LocalDateTime.of(2025, 12, 31, 23, 59, 59));
|
||||
|
||||
assertThat(result).isEmpty();
|
||||
}
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// Error propagation
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Nested
|
||||
class ErrorTests {
|
||||
|
||||
@Test
|
||||
void listAllDatasets_restClientException_propagates() {
|
||||
when(restTemplate.exchange(
|
||||
any(String.class), eq(HttpMethod.GET), isNull(),
|
||||
any(ParameterizedTypeReference.class)))
|
||||
.thenThrow(new RestClientException("connection refused"));
|
||||
|
||||
assertThatThrownBy(() -> client.listAllDatasets(
|
||||
LocalDateTime.of(2025, 1, 1, 0, 0), null))
|
||||
.isInstanceOf(RestClientException.class)
|
||||
.hasMessageContaining("connection refused");
|
||||
}
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// URL format for each entity type
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Nested
|
||||
class UrlEndpointTests {
|
||||
|
||||
@Test
|
||||
void listAllWorkflows_usesCorrectEndpoint() {
|
||||
WorkflowDTO wf = workflow("wf-1", LocalDateTime.of(2025, 6, 1, 0, 0));
|
||||
stubSinglePageResponse(List.of(wf));
|
||||
|
||||
client.listAllWorkflows(LocalDateTime.of(2025, 1, 1, 0, 0), null);
|
||||
|
||||
ArgumentCaptor<String> urlCaptor = ArgumentCaptor.forClass(String.class);
|
||||
verify(restTemplate).exchange(
|
||||
urlCaptor.capture(), eq(HttpMethod.GET), isNull(),
|
||||
any(ParameterizedTypeReference.class));
|
||||
|
||||
assertThat(urlCaptor.getValue())
|
||||
.startsWith(BASE_URL + "/data-management/workflows?");
|
||||
}
|
||||
|
||||
@Test
|
||||
void listAllJobs_usesCorrectEndpoint() {
|
||||
JobDTO j = job("j-1", LocalDateTime.of(2025, 6, 1, 0, 0));
|
||||
stubSinglePageResponse(List.of(j));
|
||||
|
||||
client.listAllJobs(LocalDateTime.of(2025, 1, 1, 0, 0), null);
|
||||
|
||||
ArgumentCaptor<String> urlCaptor = ArgumentCaptor.forClass(String.class);
|
||||
verify(restTemplate).exchange(
|
||||
urlCaptor.capture(), eq(HttpMethod.GET), isNull(),
|
||||
any(ParameterizedTypeReference.class));
|
||||
|
||||
assertThat(urlCaptor.getValue())
|
||||
.startsWith(BASE_URL + "/data-management/jobs?");
|
||||
}
|
||||
|
||||
@Test
|
||||
void listAllLabelTasks_usesAnnotationServiceUrl() {
|
||||
LabelTaskDTO lt = labelTask("lt-1", LocalDateTime.of(2025, 6, 1, 0, 0));
|
||||
stubSinglePageResponse(List.of(lt));
|
||||
|
||||
client.listAllLabelTasks(LocalDateTime.of(2025, 1, 1, 0, 0), null);
|
||||
|
||||
ArgumentCaptor<String> urlCaptor = ArgumentCaptor.forClass(String.class);
|
||||
verify(restTemplate).exchange(
|
||||
urlCaptor.capture(), eq(HttpMethod.GET), isNull(),
|
||||
any(ParameterizedTypeReference.class));
|
||||
|
||||
assertThat(urlCaptor.getValue())
|
||||
.startsWith(ANNOTATION_URL + "/annotation/label-tasks?");
|
||||
}
|
||||
|
||||
@Test
|
||||
void listAllKnowledgeSets_usesCorrectEndpoint() {
|
||||
KnowledgeSetDTO ks = knowledgeSet("ks-1", LocalDateTime.of(2025, 6, 1, 0, 0));
|
||||
stubSinglePageResponse(List.of(ks));
|
||||
|
||||
client.listAllKnowledgeSets(LocalDateTime.of(2025, 1, 1, 0, 0), null);
|
||||
|
||||
ArgumentCaptor<String> urlCaptor = ArgumentCaptor.forClass(String.class);
|
||||
verify(restTemplate).exchange(
|
||||
urlCaptor.capture(), eq(HttpMethod.GET), isNull(),
|
||||
any(ParameterizedTypeReference.class));
|
||||
|
||||
assertThat(urlCaptor.getValue())
|
||||
.startsWith(BASE_URL + "/data-management/knowledge-sets?");
|
||||
}
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// No-arg overloads (backward compatibility)
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Nested
|
||||
class NoArgOverloadTests {
|
||||
|
||||
@Test
|
||||
void listAllWorkflows_noArgs_returnsAll() {
|
||||
WorkflowDTO wf = workflow("wf-1", LocalDateTime.of(2025, 6, 1, 0, 0));
|
||||
stubSinglePageResponse(List.of(wf));
|
||||
|
||||
List<WorkflowDTO> result = client.listAllWorkflows();
|
||||
|
||||
assertThat(result).hasSize(1);
|
||||
}
|
||||
|
||||
@Test
|
||||
void listAllJobs_noArgs_returnsAll() {
|
||||
JobDTO j = job("j-1", LocalDateTime.of(2025, 6, 1, 0, 0));
|
||||
stubSinglePageResponse(List.of(j));
|
||||
|
||||
List<JobDTO> result = client.listAllJobs();
|
||||
|
||||
assertThat(result).hasSize(1);
|
||||
}
|
||||
|
||||
@Test
|
||||
void listAllLabelTasks_noArgs_returnsAll() {
|
||||
LabelTaskDTO lt = labelTask("lt-1", LocalDateTime.of(2025, 6, 1, 0, 0));
|
||||
stubSinglePageResponse(List.of(lt));
|
||||
|
||||
List<LabelTaskDTO> result = client.listAllLabelTasks();
|
||||
|
||||
assertThat(result).hasSize(1);
|
||||
}
|
||||
|
||||
@Test
|
||||
void listAllKnowledgeSets_noArgs_returnsAll() {
|
||||
KnowledgeSetDTO ks = knowledgeSet("ks-1", LocalDateTime.of(2025, 6, 1, 0, 0));
|
||||
stubSinglePageResponse(List.of(ks));
|
||||
|
||||
List<KnowledgeSetDTO> result = client.listAllKnowledgeSets();
|
||||
|
||||
assertThat(result).hasSize(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,130 @@
|
||||
package com.datamate.knowledgegraph.infrastructure.neo4j;
|
||||
|
||||
import com.datamate.knowledgegraph.infrastructure.neo4j.migration.SchemaMigrationService;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
import org.springframework.boot.DefaultApplicationArguments;
|
||||
import org.springframework.test.util.ReflectionTestUtils;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThatCode;
|
||||
import static org.assertj.core.api.Assertions.assertThatThrownBy;
|
||||
import static org.mockito.ArgumentMatchers.anyString;
|
||||
import static org.mockito.Mockito.*;
|
||||
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
class GraphInitializerTest {
|
||||
|
||||
@Mock
|
||||
private SchemaMigrationService schemaMigrationService;
|
||||
|
||||
private GraphInitializer createInitializer(String password, String profile, boolean autoInit) {
|
||||
KnowledgeGraphProperties properties = new KnowledgeGraphProperties();
|
||||
properties.getSync().setAutoInitSchema(autoInit);
|
||||
|
||||
GraphInitializer initializer = new GraphInitializer(properties, schemaMigrationService);
|
||||
ReflectionTestUtils.setField(initializer, "neo4jPassword", password);
|
||||
ReflectionTestUtils.setField(initializer, "activeProfile", profile);
|
||||
return initializer;
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// P1-3: 默认凭据检测
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Test
|
||||
void run_defaultPassword_prodProfile_throwsException() {
|
||||
GraphInitializer initializer = createInitializer("datamate123", "prod", false);
|
||||
|
||||
assertThatThrownBy(() -> initializer.run(new DefaultApplicationArguments()))
|
||||
.isInstanceOf(IllegalStateException.class)
|
||||
.hasMessageContaining("SECURITY")
|
||||
.hasMessageContaining("default");
|
||||
}
|
||||
|
||||
@Test
|
||||
void run_defaultPassword_stagingProfile_throwsException() {
|
||||
GraphInitializer initializer = createInitializer("neo4j", "staging", false);
|
||||
|
||||
assertThatThrownBy(() -> initializer.run(new DefaultApplicationArguments()))
|
||||
.isInstanceOf(IllegalStateException.class)
|
||||
.hasMessageContaining("SECURITY");
|
||||
}
|
||||
|
||||
@Test
|
||||
void run_defaultPassword_devProfile_warnsButContinues() {
|
||||
GraphInitializer initializer = createInitializer("datamate123", "dev", false);
|
||||
|
||||
// Should not throw — just warn
|
||||
assertThatCode(() -> initializer.run(new DefaultApplicationArguments()))
|
||||
.doesNotThrowAnyException();
|
||||
}
|
||||
|
||||
@Test
|
||||
void run_defaultPassword_testProfile_warnsButContinues() {
|
||||
GraphInitializer initializer = createInitializer("datamate123", "test", false);
|
||||
|
||||
assertThatCode(() -> initializer.run(new DefaultApplicationArguments()))
|
||||
.doesNotThrowAnyException();
|
||||
}
|
||||
|
||||
@Test
|
||||
void run_defaultPassword_localProfile_warnsButContinues() {
|
||||
GraphInitializer initializer = createInitializer("password", "local", false);
|
||||
|
||||
assertThatCode(() -> initializer.run(new DefaultApplicationArguments()))
|
||||
.doesNotThrowAnyException();
|
||||
}
|
||||
|
||||
@Test
|
||||
void run_securePassword_prodProfile_succeeds() {
|
||||
GraphInitializer initializer = createInitializer("s3cure!P@ssw0rd", "prod", false);
|
||||
|
||||
// Schema init disabled, so no queries. Should succeed.
|
||||
assertThatCode(() -> initializer.run(new DefaultApplicationArguments()))
|
||||
.doesNotThrowAnyException();
|
||||
}
|
||||
|
||||
@Test
|
||||
void run_blankPassword_skipsValidation() {
|
||||
GraphInitializer initializer = createInitializer("", "prod", false);
|
||||
|
||||
assertThatCode(() -> initializer.run(new DefaultApplicationArguments()))
|
||||
.doesNotThrowAnyException();
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// Schema 初始化 — 委托给 SchemaMigrationService
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Test
|
||||
void run_autoInitEnabled_delegatesToMigrationService() {
|
||||
GraphInitializer initializer = createInitializer("s3cure!P@ss", "dev", true);
|
||||
|
||||
initializer.run(new DefaultApplicationArguments());
|
||||
|
||||
verify(schemaMigrationService).migrate(anyString());
|
||||
}
|
||||
|
||||
@Test
|
||||
void run_autoInitDisabled_skipsSchemaInit() {
|
||||
GraphInitializer initializer = createInitializer("s3cure!P@ss", "dev", false);
|
||||
|
||||
initializer.run(new DefaultApplicationArguments());
|
||||
|
||||
verifyNoInteractions(schemaMigrationService);
|
||||
}
|
||||
|
||||
@Test
|
||||
void run_migrationServiceThrows_propagatesException() {
|
||||
GraphInitializer initializer = createInitializer("s3cure!P@ss", "dev", true);
|
||||
|
||||
doThrow(new RuntimeException("Migration failed"))
|
||||
.when(schemaMigrationService).migrate(anyString());
|
||||
|
||||
assertThatThrownBy(() -> initializer.run(new DefaultApplicationArguments()))
|
||||
.isInstanceOf(RuntimeException.class)
|
||||
.hasMessageContaining("Migration failed");
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,578 @@
|
||||
package com.datamate.knowledgegraph.infrastructure.neo4j.migration;
|
||||
|
||||
import com.datamate.common.infrastructure.exception.BusinessException;
|
||||
import com.datamate.knowledgegraph.infrastructure.exception.KnowledgeGraphErrorCode;
|
||||
import com.datamate.knowledgegraph.infrastructure.neo4j.KnowledgeGraphProperties;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Nested;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.mockito.ArgumentCaptor;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
import org.springframework.data.neo4j.core.Neo4jClient;
|
||||
import org.springframework.data.neo4j.core.Neo4jClient.RecordFetchSpec;
|
||||
import org.springframework.data.neo4j.core.Neo4jClient.RunnableSpec;
|
||||
import org.springframework.data.neo4j.core.Neo4jClient.UnboundRunnableSpec;
|
||||
|
||||
import java.util.*;
|
||||
|
||||
import static org.assertj.core.api.Assertions.*;
|
||||
import static org.mockito.ArgumentMatchers.*;
|
||||
import static org.mockito.Mockito.*;
|
||||
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
class SchemaMigrationServiceTest {
|
||||
|
||||
@Mock
|
||||
private Neo4jClient neo4jClient;
|
||||
|
||||
private KnowledgeGraphProperties properties;
|
||||
|
||||
private SchemaMigration v1Migration;
|
||||
private SchemaMigration v2Migration;
|
||||
|
||||
@BeforeEach
|
||||
void setUp() {
|
||||
properties = new KnowledgeGraphProperties();
|
||||
|
||||
v1Migration = new SchemaMigration() {
|
||||
@Override
|
||||
public int getVersion() { return 1; }
|
||||
@Override
|
||||
public String getDescription() { return "Initial schema"; }
|
||||
@Override
|
||||
public List<String> getStatements() {
|
||||
return List.of("CREATE CONSTRAINT test1 IF NOT EXISTS FOR (n:Test) REQUIRE n.id IS UNIQUE");
|
||||
}
|
||||
};
|
||||
|
||||
v2Migration = new SchemaMigration() {
|
||||
@Override
|
||||
public int getVersion() { return 2; }
|
||||
@Override
|
||||
public String getDescription() { return "Add index"; }
|
||||
@Override
|
||||
public List<String> getStatements() {
|
||||
return List.of("CREATE INDEX test_name IF NOT EXISTS FOR (n:Test) ON (n.name)");
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
private SchemaMigrationService createService(List<SchemaMigration> migrations) {
|
||||
return new SchemaMigrationService(neo4jClient, properties, migrations);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a spy of the service with bootstrapMigrationSchema, acquireLock,
|
||||
* releaseLock, and recordMigration stubbed out, and loadAppliedMigrations
|
||||
* returning the given records.
|
||||
*/
|
||||
private SchemaMigrationService createSpiedService(List<SchemaMigration> migrations,
|
||||
List<SchemaMigrationRecord> applied) {
|
||||
SchemaMigrationService service = spy(createService(migrations));
|
||||
doNothing().when(service).bootstrapMigrationSchema();
|
||||
doNothing().when(service).acquireLock(anyString());
|
||||
doNothing().when(service).releaseLock(anyString());
|
||||
doReturn(applied).when(service).loadAppliedMigrations();
|
||||
lenient().doNothing().when(service).recordMigration(any());
|
||||
return service;
|
||||
}
|
||||
|
||||
private void setupQueryRunnable() {
|
||||
UnboundRunnableSpec spec = mock(UnboundRunnableSpec.class);
|
||||
when(neo4jClient.query(anyString())).thenReturn(spec);
|
||||
}
|
||||
|
||||
private SchemaMigrationRecord appliedRecord(SchemaMigration migration) {
|
||||
return SchemaMigrationRecord.builder()
|
||||
.version(migration.getVersion())
|
||||
.description(migration.getDescription())
|
||||
.checksum(SchemaMigrationService.computeChecksum(migration.getStatements()))
|
||||
.appliedAt("2025-01-01T00:00:00Z")
|
||||
.executionTimeMs(100L)
|
||||
.success(true)
|
||||
.statementsCount(migration.getStatements().size())
|
||||
.build();
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// Migration Disabled
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Nested
|
||||
class MigrationDisabled {
|
||||
|
||||
@Test
|
||||
void migrate_whenDisabled_skipsEverything() {
|
||||
properties.getMigration().setEnabled(false);
|
||||
SchemaMigrationService service = createService(List.of(v1Migration));
|
||||
|
||||
service.migrate("test-instance");
|
||||
|
||||
verifyNoInteractions(neo4jClient);
|
||||
}
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// Fresh Database
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Nested
|
||||
class FreshDatabase {
|
||||
|
||||
@Test
|
||||
void migrate_freshDb_appliesAllMigrations() {
|
||||
SchemaMigrationService service = createSpiedService(
|
||||
List.of(v1Migration), Collections.emptyList());
|
||||
setupQueryRunnable();
|
||||
|
||||
service.migrate("test-instance");
|
||||
|
||||
// Verify migration statement was executed
|
||||
verify(neo4jClient).query(contains("test1"));
|
||||
// Verify migration record was created
|
||||
verify(service).recordMigration(argThat(r -> r.getVersion() == 1 && r.isSuccess()));
|
||||
}
|
||||
|
||||
@Test
|
||||
void migrate_freshDb_bootstrapConstraintsCreated() {
|
||||
SchemaMigrationService service = createSpiedService(
|
||||
List.of(v1Migration), Collections.emptyList());
|
||||
setupQueryRunnable();
|
||||
|
||||
service.migrate("test-instance");
|
||||
|
||||
// Verify bootstrap, lock acquisition, and release were called
|
||||
verify(service).bootstrapMigrationSchema();
|
||||
verify(service).acquireLock("test-instance");
|
||||
verify(service).releaseLock("test-instance");
|
||||
}
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// Partially Applied
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Nested
|
||||
class PartiallyApplied {
|
||||
|
||||
@Test
|
||||
void migrate_v1Applied_onlyExecutesPending() {
|
||||
SchemaMigrationService service = createSpiedService(
|
||||
List.of(v1Migration, v2Migration), List.of(appliedRecord(v1Migration)));
|
||||
setupQueryRunnable();
|
||||
|
||||
service.migrate("test-instance");
|
||||
|
||||
// V1 statement should NOT be executed
|
||||
verify(neo4jClient, never()).query(contains("test1"));
|
||||
// V2 statement should be executed
|
||||
verify(neo4jClient).query(contains("test_name"));
|
||||
}
|
||||
|
||||
@Test
|
||||
void migrate_allApplied_noop() {
|
||||
SchemaMigrationService service = createSpiedService(
|
||||
List.of(v1Migration), List.of(appliedRecord(v1Migration)));
|
||||
|
||||
service.migrate("test-instance");
|
||||
|
||||
// No migration statements should be executed
|
||||
verifyNoInteractions(neo4jClient);
|
||||
// recordMigration should NOT be called (only the stubbed setup, no real call)
|
||||
verify(service, never()).recordMigration(any());
|
||||
}
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// Checksum Validation
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Nested
|
||||
class ChecksumValidation {
|
||||
|
||||
@Test
|
||||
void migrate_checksumMismatch_throwsException() {
|
||||
SchemaMigrationRecord tampered = SchemaMigrationRecord.builder()
|
||||
.version(1)
|
||||
.description("Initial schema")
|
||||
.checksum("wrong-checksum")
|
||||
.appliedAt("2025-01-01T00:00:00Z")
|
||||
.executionTimeMs(100L)
|
||||
.success(true)
|
||||
.statementsCount(1)
|
||||
.build();
|
||||
|
||||
SchemaMigrationService service = createSpiedService(
|
||||
List.of(v1Migration), List.of(tampered));
|
||||
|
||||
assertThatThrownBy(() -> service.migrate("test-instance"))
|
||||
.isInstanceOf(BusinessException.class)
|
||||
.satisfies(e -> assertThat(((BusinessException) e).getErrorCodeEnum())
|
||||
.isEqualTo(KnowledgeGraphErrorCode.SCHEMA_CHECKSUM_MISMATCH));
|
||||
}
|
||||
|
||||
@Test
|
||||
void migrate_checksumValidationDisabled_skipsCheck() {
|
||||
properties.getMigration().setValidateChecksums(false);
|
||||
|
||||
SchemaMigrationRecord tampered = SchemaMigrationRecord.builder()
|
||||
.version(1)
|
||||
.description("Initial schema")
|
||||
.checksum("wrong-checksum")
|
||||
.appliedAt("2025-01-01T00:00:00Z")
|
||||
.executionTimeMs(100L)
|
||||
.success(true)
|
||||
.statementsCount(1)
|
||||
.build();
|
||||
|
||||
SchemaMigrationService service = createSpiedService(
|
||||
List.of(v1Migration), List.of(tampered));
|
||||
|
||||
// Should NOT throw even with wrong checksum — all applied, no pending
|
||||
assertThatCode(() -> service.migrate("test-instance"))
|
||||
.doesNotThrowAnyException();
|
||||
}
|
||||
|
||||
@Test
|
||||
void migrate_emptyChecksum_skipsValidation() {
|
||||
SchemaMigrationRecord legacyRecord = SchemaMigrationRecord.builder()
|
||||
.version(1)
|
||||
.description("Initial schema")
|
||||
.checksum("") // empty checksum from legacy/repaired node
|
||||
.appliedAt("")
|
||||
.executionTimeMs(0L)
|
||||
.success(true)
|
||||
.statementsCount(0)
|
||||
.build();
|
||||
|
||||
SchemaMigrationService service = createSpiedService(
|
||||
List.of(v1Migration), List.of(legacyRecord));
|
||||
|
||||
// Should NOT throw — empty checksum is skipped, and V1 is treated as applied
|
||||
assertThatCode(() -> service.migrate("test-instance"))
|
||||
.doesNotThrowAnyException();
|
||||
|
||||
// V1 should NOT be re-executed (it's in the applied set)
|
||||
verify(neo4jClient, never()).query(contains("test1"));
|
||||
}
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// Lock Management
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Nested
|
||||
class LockManagement {
|
||||
|
||||
@Test
|
||||
void migrate_lockAcquired_executesAndReleases() {
|
||||
SchemaMigrationService service = createSpiedService(
|
||||
List.of(v1Migration), Collections.emptyList());
|
||||
setupQueryRunnable();
|
||||
|
||||
service.migrate("test-instance");
|
||||
|
||||
var inOrder = inOrder(service);
|
||||
inOrder.verify(service).acquireLock("test-instance");
|
||||
inOrder.verify(service).releaseLock("test-instance");
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@Test
|
||||
void migrate_lockHeldByAnother_throwsException() {
|
||||
SchemaMigrationService service = spy(createService(List.of(v1Migration)));
|
||||
doNothing().when(service).bootstrapMigrationSchema();
|
||||
|
||||
// Let acquireLock run for real — mock neo4jClient for lock query
|
||||
UnboundRunnableSpec lockSpec = mock(UnboundRunnableSpec.class);
|
||||
RunnableSpec runnableSpec = mock(RunnableSpec.class);
|
||||
RecordFetchSpec<Map<String, Object>> fetchSpec = mock(RecordFetchSpec.class);
|
||||
|
||||
when(neo4jClient.query(contains("MERGE (lock:_SchemaLock"))).thenReturn(lockSpec);
|
||||
when(lockSpec.bindAll(anyMap())).thenReturn(runnableSpec);
|
||||
when(runnableSpec.fetch()).thenReturn(fetchSpec);
|
||||
when(fetchSpec.first()).thenReturn(Optional.of(Map.of(
|
||||
"lockedBy", "other-instance",
|
||||
"canAcquire", false
|
||||
)));
|
||||
|
||||
assertThatThrownBy(() -> service.migrate("test-instance"))
|
||||
.isInstanceOf(BusinessException.class)
|
||||
.satisfies(e -> assertThat(((BusinessException) e).getErrorCodeEnum())
|
||||
.isEqualTo(KnowledgeGraphErrorCode.SCHEMA_MIGRATION_LOCKED));
|
||||
}
|
||||
|
||||
@Test
|
||||
void migrate_lockReleasedOnFailure() {
|
||||
SchemaMigrationService service = createSpiedService(
|
||||
List.of(v1Migration), Collections.emptyList());
|
||||
|
||||
// Make migration statement fail
|
||||
UnboundRunnableSpec failSpec = mock(UnboundRunnableSpec.class);
|
||||
when(neo4jClient.query(anyString())).thenReturn(failSpec);
|
||||
doThrow(new RuntimeException("Connection refused"))
|
||||
.when(failSpec).run();
|
||||
|
||||
assertThatThrownBy(() -> service.migrate("test-instance"))
|
||||
.isInstanceOf(BusinessException.class);
|
||||
|
||||
// Lock should still be released even after failure
|
||||
verify(service).releaseLock("test-instance");
|
||||
}
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// Migration Failure
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Nested
|
||||
class MigrationFailure {
|
||||
|
||||
@Test
|
||||
void migrate_statementFails_recordsFailureAndThrows() {
|
||||
SchemaMigrationService service = createSpiedService(
|
||||
List.of(v1Migration), Collections.emptyList());
|
||||
|
||||
// Make migration statement fail
|
||||
UnboundRunnableSpec failSpec = mock(UnboundRunnableSpec.class);
|
||||
when(neo4jClient.query(anyString())).thenReturn(failSpec);
|
||||
doThrow(new RuntimeException("Connection refused"))
|
||||
.when(failSpec).run();
|
||||
|
||||
assertThatThrownBy(() -> service.migrate("test-instance"))
|
||||
.isInstanceOf(BusinessException.class)
|
||||
.satisfies(e -> assertThat(((BusinessException) e).getErrorCodeEnum())
|
||||
.isEqualTo(KnowledgeGraphErrorCode.SCHEMA_MIGRATION_FAILED));
|
||||
|
||||
// Failure should be recorded
|
||||
verify(service).recordMigration(argThat(r -> !r.isSuccess()
|
||||
&& r.getErrorMessage() != null
|
||||
&& r.getErrorMessage().contains("Connection refused")));
|
||||
}
|
||||
|
||||
@Test
|
||||
void migrate_alreadyExistsError_safelySkipped() {
|
||||
SchemaMigrationService service = createSpiedService(
|
||||
List.of(v1Migration), Collections.emptyList());
|
||||
|
||||
// Make migration statement throw "already exists"
|
||||
UnboundRunnableSpec existsSpec = mock(UnboundRunnableSpec.class);
|
||||
when(neo4jClient.query(anyString())).thenReturn(existsSpec);
|
||||
doThrow(new RuntimeException("Constraint already exists"))
|
||||
.when(existsSpec).run();
|
||||
|
||||
// Should not throw
|
||||
assertThatCode(() -> service.migrate("test-instance"))
|
||||
.doesNotThrowAnyException();
|
||||
|
||||
// Success should be recorded
|
||||
verify(service).recordMigration(argThat(r -> r.isSuccess() && r.getVersion() == 1));
|
||||
}
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// Retry After Failure (P0)
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Nested
|
||||
class RetryAfterFailure {
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@Test
|
||||
void recordMigration_usesMerge_allowsRetryAfterFailure() {
|
||||
SchemaMigrationService service = createService(List.of(v1Migration));
|
||||
|
||||
UnboundRunnableSpec unboundSpec = mock(UnboundRunnableSpec.class);
|
||||
RunnableSpec runnableSpec = mock(RunnableSpec.class);
|
||||
when(neo4jClient.query(contains("MERGE"))).thenReturn(unboundSpec);
|
||||
when(unboundSpec.bindAll(anyMap())).thenReturn(runnableSpec);
|
||||
|
||||
SchemaMigrationRecord record = SchemaMigrationRecord.builder()
|
||||
.version(1)
|
||||
.description("test")
|
||||
.checksum("abc123")
|
||||
.appliedAt("2025-01-01T00:00:00Z")
|
||||
.executionTimeMs(100L)
|
||||
.success(true)
|
||||
.statementsCount(1)
|
||||
.build();
|
||||
|
||||
service.recordMigration(record);
|
||||
|
||||
// Verify MERGE is used (not CREATE) — ensures retries update
|
||||
// existing failed records instead of hitting unique constraint violations
|
||||
verify(neo4jClient).query(contains("MERGE"));
|
||||
}
|
||||
|
||||
@SuppressWarnings({"unchecked", "rawtypes"})
|
||||
@Test
|
||||
void recordMigration_nullErrorMessage_boundAsEmptyString() {
|
||||
SchemaMigrationService service = createService(List.of(v1Migration));
|
||||
|
||||
UnboundRunnableSpec unboundSpec = mock(UnboundRunnableSpec.class);
|
||||
RunnableSpec runnableSpec = mock(RunnableSpec.class);
|
||||
when(neo4jClient.query(contains("MERGE"))).thenReturn(unboundSpec);
|
||||
when(unboundSpec.bindAll(anyMap())).thenReturn(runnableSpec);
|
||||
|
||||
SchemaMigrationRecord record = SchemaMigrationRecord.builder()
|
||||
.version(1)
|
||||
.description("test")
|
||||
.checksum("abc123")
|
||||
.appliedAt("2025-01-01T00:00:00Z")
|
||||
.executionTimeMs(100L)
|
||||
.success(true)
|
||||
.statementsCount(1)
|
||||
// errorMessage intentionally not set (null)
|
||||
.build();
|
||||
|
||||
service.recordMigration(record);
|
||||
|
||||
ArgumentCaptor<Map> paramsCaptor = ArgumentCaptor.forClass(Map.class);
|
||||
verify(unboundSpec).bindAll(paramsCaptor.capture());
|
||||
Map<String, Object> params = paramsCaptor.getValue();
|
||||
|
||||
// All String params must be non-null to avoid Neo4j driver issues
|
||||
assertThat(params.get("errorMessage")).isEqualTo("");
|
||||
assertThat(params.get("description")).isEqualTo("test");
|
||||
assertThat(params.get("checksum")).isEqualTo("abc123");
|
||||
assertThat(params.get("appliedAt")).isEqualTo("2025-01-01T00:00:00Z");
|
||||
}
|
||||
|
||||
@Test
|
||||
void migrate_retryAfterFailure_recordsSuccess() {
|
||||
// Simulate: first run recorded a failure, second run should succeed.
|
||||
// loadAppliedMigrations only returns success=true, so failed V1 won't be in applied set.
|
||||
SchemaMigrationService service = createSpiedService(
|
||||
List.of(v1Migration), Collections.emptyList());
|
||||
setupQueryRunnable();
|
||||
|
||||
service.migrate("test-instance");
|
||||
|
||||
// Verify success record is written (MERGE will update existing failed record)
|
||||
verify(service).recordMigration(argThat(r -> r.isSuccess() && r.getVersion() == 1));
|
||||
}
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// Database Time for Lock (P1-1)
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Nested
|
||||
class DatabaseTimeLock {
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@Test
|
||||
void acquireLock_usesDatabaseTime_notLocalTime() {
|
||||
SchemaMigrationService service = createService(List.of(v1Migration));
|
||||
|
||||
UnboundRunnableSpec lockSpec = mock(UnboundRunnableSpec.class);
|
||||
RunnableSpec runnableSpec = mock(RunnableSpec.class);
|
||||
RecordFetchSpec<Map<String, Object>> fetchSpec = mock(RecordFetchSpec.class);
|
||||
|
||||
when(neo4jClient.query(contains("MERGE (lock:_SchemaLock"))).thenReturn(lockSpec);
|
||||
when(lockSpec.bindAll(anyMap())).thenReturn(runnableSpec);
|
||||
when(runnableSpec.fetch()).thenReturn(fetchSpec);
|
||||
when(fetchSpec.first()).thenReturn(Optional.of(Map.of(
|
||||
"lockedBy", "test-instance",
|
||||
"canAcquire", true
|
||||
)));
|
||||
|
||||
service.acquireLock("test-instance");
|
||||
|
||||
// Verify that local time is NOT passed as parameters — database time is used instead
|
||||
@SuppressWarnings("rawtypes")
|
||||
ArgumentCaptor<Map> paramsCaptor = ArgumentCaptor.forClass(Map.class);
|
||||
verify(lockSpec).bindAll(paramsCaptor.capture());
|
||||
Map<String, Object> params = paramsCaptor.getValue();
|
||||
assertThat(params).containsKey("instanceId");
|
||||
assertThat(params).containsKey("timeoutMs");
|
||||
assertThat(params).doesNotContainKey("now");
|
||||
assertThat(params).doesNotContainKey("expiry");
|
||||
}
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// Checksum Computation
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Nested
|
||||
class ChecksumComputation {
|
||||
|
||||
@Test
|
||||
void computeChecksum_deterministic() {
|
||||
List<String> statements = List.of("stmt1", "stmt2");
|
||||
String checksum1 = SchemaMigrationService.computeChecksum(statements);
|
||||
String checksum2 = SchemaMigrationService.computeChecksum(statements);
|
||||
|
||||
assertThat(checksum1).isEqualTo(checksum2);
|
||||
assertThat(checksum1).hasSize(64); // SHA-256 hex length
|
||||
}
|
||||
|
||||
@Test
|
||||
void computeChecksum_orderMatters() {
|
||||
String checksum1 = SchemaMigrationService.computeChecksum(List.of("stmt1", "stmt2"));
|
||||
String checksum2 = SchemaMigrationService.computeChecksum(List.of("stmt2", "stmt1"));
|
||||
|
||||
assertThat(checksum1).isNotEqualTo(checksum2);
|
||||
}
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// Bootstrap Repair
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Nested
|
||||
class BootstrapRepair {
|
||||
|
||||
@Test
|
||||
void bootstrapMigrationSchema_executesRepairQuery() {
|
||||
SchemaMigrationService service = createService(List.of(v1Migration));
|
||||
|
||||
UnboundRunnableSpec spec = mock(UnboundRunnableSpec.class);
|
||||
when(neo4jClient.query(anyString())).thenReturn(spec);
|
||||
|
||||
service.bootstrapMigrationSchema();
|
||||
|
||||
// Verify 3 queries: 2 constraints + 1 repair
|
||||
verify(neo4jClient, times(3)).query(anyString());
|
||||
// Verify repair query targets nodes with missing properties
|
||||
verify(neo4jClient).query(contains("m.description IS NULL OR m.checksum IS NULL"));
|
||||
}
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// Load Applied Migrations Query
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Nested
|
||||
class LoadAppliedMigrationsQuery {
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@Test
|
||||
void loadAppliedMigrations_usesCoalesceInQuery() {
|
||||
SchemaMigrationService service = createService(List.of(v1Migration));
|
||||
|
||||
UnboundRunnableSpec spec = mock(UnboundRunnableSpec.class);
|
||||
RecordFetchSpec<Map<String, Object>> fetchSpec = mock(RecordFetchSpec.class);
|
||||
when(neo4jClient.query(contains("COALESCE"))).thenReturn(spec);
|
||||
when(spec.fetch()).thenReturn(fetchSpec);
|
||||
when(fetchSpec.all()).thenReturn(Collections.emptyList());
|
||||
|
||||
service.loadAppliedMigrations();
|
||||
|
||||
// Verify COALESCE is used for all optional properties
|
||||
ArgumentCaptor<String> queryCaptor = ArgumentCaptor.forClass(String.class);
|
||||
verify(neo4jClient).query(queryCaptor.capture());
|
||||
String capturedQuery = queryCaptor.getValue();
|
||||
assertThat(capturedQuery)
|
||||
.contains("COALESCE(m.description, '')")
|
||||
.contains("COALESCE(m.checksum, '')")
|
||||
.contains("COALESCE(m.applied_at, '')")
|
||||
.contains("COALESCE(m.execution_time_ms, 0)")
|
||||
.contains("COALESCE(m.statements_count, 0)")
|
||||
.contains("COALESCE(m.error_message, '')");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,59 @@
|
||||
package com.datamate.knowledgegraph.infrastructure.neo4j.migration;
|
||||
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
class V2__PerformanceIndexesTest {
|
||||
|
||||
private final V2__PerformanceIndexes migration = new V2__PerformanceIndexes();
|
||||
|
||||
@Test
|
||||
void version_is_2() {
|
||||
assertThat(migration.getVersion()).isEqualTo(2);
|
||||
}
|
||||
|
||||
@Test
|
||||
void description_is_not_empty() {
|
||||
assertThat(migration.getDescription()).isNotBlank();
|
||||
}
|
||||
|
||||
@Test
|
||||
void statements_are_not_empty() {
|
||||
List<String> statements = migration.getStatements();
|
||||
assertThat(statements).isNotEmpty();
|
||||
}
|
||||
|
||||
@Test
|
||||
void all_statements_use_if_not_exists() {
|
||||
for (String stmt : migration.getStatements()) {
|
||||
assertThat(stmt).containsIgnoringCase("IF NOT EXISTS");
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void contains_relationship_index() {
|
||||
List<String> statements = migration.getStatements();
|
||||
boolean hasRelIndex = statements.stream()
|
||||
.anyMatch(s -> s.contains("RELATED_TO") && s.contains("graph_id"));
|
||||
assertThat(hasRelIndex).isTrue();
|
||||
}
|
||||
|
||||
@Test
|
||||
void contains_updated_at_index() {
|
||||
List<String> statements = migration.getStatements();
|
||||
boolean hasUpdatedAt = statements.stream()
|
||||
.anyMatch(s -> s.contains("updated_at"));
|
||||
assertThat(hasUpdatedAt).isTrue();
|
||||
}
|
||||
|
||||
@Test
|
||||
void contains_composite_graph_id_name_index() {
|
||||
List<String> statements = migration.getStatements();
|
||||
boolean hasComposite = statements.stream()
|
||||
.anyMatch(s -> s.contains("graph_id") && s.contains("n.name"));
|
||||
assertThat(hasComposite).isTrue();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,152 @@
|
||||
package com.datamate.knowledgegraph.infrastructure.security;
|
||||
|
||||
import com.datamate.knowledgegraph.infrastructure.neo4j.KnowledgeGraphProperties;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import jakarta.servlet.http.HttpServletResponse;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
import org.springframework.mock.web.MockHttpServletRequest;
|
||||
import org.springframework.mock.web.MockHttpServletResponse;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
class InternalTokenInterceptorTest {
|
||||
|
||||
private static final String VALID_TOKEN = "test-secret-token";
|
||||
|
||||
private KnowledgeGraphProperties properties;
|
||||
private InternalTokenInterceptor interceptor;
|
||||
|
||||
@BeforeEach
|
||||
void setUp() {
|
||||
properties = new KnowledgeGraphProperties();
|
||||
interceptor = new InternalTokenInterceptor(properties, new ObjectMapper());
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// fail-closed:Token 未配置 + skipTokenCheck=false → 拒绝
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Test
|
||||
void tokenNotConfigured_skipFalse_rejects() throws Exception {
|
||||
properties.getSecurity().setInternalToken(null);
|
||||
properties.getSecurity().setSkipTokenCheck(false);
|
||||
|
||||
MockHttpServletRequest request = new MockHttpServletRequest();
|
||||
MockHttpServletResponse response = new MockHttpServletResponse();
|
||||
|
||||
boolean result = interceptor.preHandle(request, response, new Object());
|
||||
|
||||
assertThat(result).isFalse();
|
||||
assertThat(response.getStatus()).isEqualTo(HttpServletResponse.SC_UNAUTHORIZED);
|
||||
assertThat(response.getContentAsString()).contains("knowledge_graph.0013");
|
||||
}
|
||||
|
||||
@Test
|
||||
void tokenEmpty_skipFalse_rejects() throws Exception {
|
||||
properties.getSecurity().setInternalToken("");
|
||||
properties.getSecurity().setSkipTokenCheck(false);
|
||||
|
||||
MockHttpServletRequest request = new MockHttpServletRequest();
|
||||
MockHttpServletResponse response = new MockHttpServletResponse();
|
||||
|
||||
boolean result = interceptor.preHandle(request, response, new Object());
|
||||
|
||||
assertThat(result).isFalse();
|
||||
assertThat(response.getStatus()).isEqualTo(HttpServletResponse.SC_UNAUTHORIZED);
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// dev/test 放行:Token 未配置 + skipTokenCheck=true → 放行
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Test
|
||||
void tokenNotConfigured_skipTrue_allows() throws Exception {
|
||||
properties.getSecurity().setInternalToken(null);
|
||||
properties.getSecurity().setSkipTokenCheck(true);
|
||||
|
||||
MockHttpServletRequest request = new MockHttpServletRequest();
|
||||
MockHttpServletResponse response = new MockHttpServletResponse();
|
||||
|
||||
boolean result = interceptor.preHandle(request, response, new Object());
|
||||
|
||||
assertThat(result).isTrue();
|
||||
assertThat(response.getStatus()).isEqualTo(HttpServletResponse.SC_OK);
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// 正常校验:Token 已配置 + 请求头匹配 → 放行
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Test
|
||||
void validToken_allows() throws Exception {
|
||||
properties.getSecurity().setInternalToken(VALID_TOKEN);
|
||||
|
||||
MockHttpServletRequest request = new MockHttpServletRequest();
|
||||
request.addHeader("X-Internal-Token", VALID_TOKEN);
|
||||
MockHttpServletResponse response = new MockHttpServletResponse();
|
||||
|
||||
boolean result = interceptor.preHandle(request, response, new Object());
|
||||
|
||||
assertThat(result).isTrue();
|
||||
assertThat(response.getStatus()).isEqualTo(HttpServletResponse.SC_OK);
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// 401:Token 已配置 + 请求头不匹配 → 拒绝
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Test
|
||||
void invalidToken_rejects() throws Exception {
|
||||
properties.getSecurity().setInternalToken(VALID_TOKEN);
|
||||
|
||||
MockHttpServletRequest request = new MockHttpServletRequest();
|
||||
request.addHeader("X-Internal-Token", "wrong-token");
|
||||
MockHttpServletResponse response = new MockHttpServletResponse();
|
||||
|
||||
boolean result = interceptor.preHandle(request, response, new Object());
|
||||
|
||||
assertThat(result).isFalse();
|
||||
assertThat(response.getStatus()).isEqualTo(HttpServletResponse.SC_UNAUTHORIZED);
|
||||
assertThat(response.getContentType()).startsWith("application/json");
|
||||
assertThat(response.getContentAsString()).contains("knowledge_graph.0013");
|
||||
}
|
||||
|
||||
@Test
|
||||
void missingTokenHeader_rejects() throws Exception {
|
||||
properties.getSecurity().setInternalToken(VALID_TOKEN);
|
||||
|
||||
MockHttpServletRequest request = new MockHttpServletRequest();
|
||||
// No X-Internal-Token header
|
||||
MockHttpServletResponse response = new MockHttpServletResponse();
|
||||
|
||||
boolean result = interceptor.preHandle(request, response, new Object());
|
||||
|
||||
assertThat(result).isFalse();
|
||||
assertThat(response.getStatus()).isEqualTo(HttpServletResponse.SC_UNAUTHORIZED);
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// 错误响应格式:应使用 Response 体系
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Test
|
||||
void errorResponse_usesResponseFormat() throws Exception {
|
||||
properties.getSecurity().setInternalToken(VALID_TOKEN);
|
||||
|
||||
MockHttpServletRequest request = new MockHttpServletRequest();
|
||||
request.addHeader("X-Internal-Token", "wrong");
|
||||
MockHttpServletResponse response = new MockHttpServletResponse();
|
||||
|
||||
interceptor.preHandle(request, response, new Object());
|
||||
|
||||
String body = response.getContentAsString();
|
||||
assertThat(body).contains("\"code\"");
|
||||
assertThat(body).contains("\"message\"");
|
||||
// Response.error() 包含 data 字段(值为 null)
|
||||
assertThat(body).contains("\"data\"");
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,239 @@
|
||||
package com.datamate.knowledgegraph.interfaces.rest;
|
||||
|
||||
import com.datamate.common.infrastructure.exception.BusinessException;
|
||||
import com.datamate.common.interfaces.PagedResponse;
|
||||
import com.datamate.knowledgegraph.application.EditReviewService;
|
||||
import com.datamate.knowledgegraph.infrastructure.exception.KnowledgeGraphErrorCode;
|
||||
import com.datamate.knowledgegraph.interfaces.dto.EditReviewVO;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.mockito.InjectMocks;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
import org.springframework.http.MediaType;
|
||||
import org.springframework.test.web.servlet.MockMvc;
|
||||
import org.springframework.test.web.servlet.setup.MockMvcBuilders;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.mockito.ArgumentMatchers.*;
|
||||
import static org.mockito.Mockito.verify;
|
||||
import static org.mockito.Mockito.when;
|
||||
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
|
||||
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
|
||||
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
|
||||
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
|
||||
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
class EditReviewControllerTest {
|
||||
|
||||
private static final String GRAPH_ID = "550e8400-e29b-41d4-a716-446655440000";
|
||||
private static final String REVIEW_ID = "660e8400-e29b-41d4-a716-446655440001";
|
||||
private static final String ENTITY_ID = "770e8400-e29b-41d4-a716-446655440002";
|
||||
|
||||
@Mock
|
||||
private EditReviewService reviewService;
|
||||
|
||||
@InjectMocks
|
||||
private EditReviewController controller;
|
||||
|
||||
private MockMvc mockMvc;
|
||||
private ObjectMapper objectMapper;
|
||||
|
||||
@BeforeEach
|
||||
void setUp() {
|
||||
mockMvc = MockMvcBuilders.standaloneSetup(controller).build();
|
||||
objectMapper = new ObjectMapper();
|
||||
objectMapper.registerModule(new JavaTimeModule());
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// POST /knowledge-graph/{graphId}/review/submit
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Test
|
||||
void submitReview_success() throws Exception {
|
||||
EditReviewVO vo = buildReviewVO("PENDING");
|
||||
when(reviewService.submitReview(eq(GRAPH_ID), any(), eq("user-1")))
|
||||
.thenReturn(vo);
|
||||
|
||||
mockMvc.perform(post("/knowledge-graph/{graphId}/review/submit", GRAPH_ID)
|
||||
.contentType(MediaType.APPLICATION_JSON)
|
||||
.header("X-User-Id", "user-1")
|
||||
.content(objectMapper.writeValueAsString(Map.of(
|
||||
"operationType", "CREATE_ENTITY",
|
||||
"payload", "{\"name\":\"Test\",\"type\":\"Dataset\"}"
|
||||
))))
|
||||
.andExpect(status().isCreated())
|
||||
.andExpect(jsonPath("$.id").value(REVIEW_ID))
|
||||
.andExpect(jsonPath("$.status").value("PENDING"))
|
||||
.andExpect(jsonPath("$.operationType").value("CREATE_ENTITY"));
|
||||
}
|
||||
|
||||
@Test
|
||||
void submitReview_delegatesToService() throws Exception {
|
||||
EditReviewVO vo = buildReviewVO("PENDING");
|
||||
when(reviewService.submitReview(eq(GRAPH_ID), any(), eq("user-1")))
|
||||
.thenReturn(vo);
|
||||
|
||||
mockMvc.perform(post("/knowledge-graph/{graphId}/review/submit", GRAPH_ID)
|
||||
.contentType(MediaType.APPLICATION_JSON)
|
||||
.header("X-User-Id", "user-1")
|
||||
.content(objectMapper.writeValueAsString(Map.of(
|
||||
"operationType", "DELETE_ENTITY",
|
||||
"entityId", ENTITY_ID
|
||||
))))
|
||||
.andExpect(status().isCreated());
|
||||
|
||||
verify(reviewService).submitReview(eq(GRAPH_ID), any(), eq("user-1"));
|
||||
}
|
||||
|
||||
@Test
|
||||
void submitReview_defaultUserId_whenHeaderMissing() throws Exception {
|
||||
EditReviewVO vo = buildReviewVO("PENDING");
|
||||
when(reviewService.submitReview(eq(GRAPH_ID), any(), eq("anonymous")))
|
||||
.thenReturn(vo);
|
||||
|
||||
mockMvc.perform(post("/knowledge-graph/{graphId}/review/submit", GRAPH_ID)
|
||||
.contentType(MediaType.APPLICATION_JSON)
|
||||
.content(objectMapper.writeValueAsString(Map.of(
|
||||
"operationType", "CREATE_ENTITY",
|
||||
"payload", "{\"name\":\"Test\"}"
|
||||
))))
|
||||
.andExpect(status().isCreated());
|
||||
|
||||
verify(reviewService).submitReview(eq(GRAPH_ID), any(), eq("anonymous"));
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// POST /knowledge-graph/{graphId}/review/{reviewId}/approve
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Test
|
||||
void approveReview_success() throws Exception {
|
||||
EditReviewVO vo = buildReviewVO("APPROVED");
|
||||
when(reviewService.approveReview(eq(GRAPH_ID), eq(REVIEW_ID), eq("reviewer-1"), isNull()))
|
||||
.thenReturn(vo);
|
||||
|
||||
mockMvc.perform(post("/knowledge-graph/{graphId}/review/{reviewId}/approve", GRAPH_ID, REVIEW_ID)
|
||||
.contentType(MediaType.APPLICATION_JSON)
|
||||
.header("X-User-Id", "reviewer-1"))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$.status").value("APPROVED"));
|
||||
}
|
||||
|
||||
@Test
|
||||
void approveReview_withComment() throws Exception {
|
||||
EditReviewVO vo = buildReviewVO("APPROVED");
|
||||
when(reviewService.approveReview(eq(GRAPH_ID), eq(REVIEW_ID), eq("reviewer-1"), eq("LGTM")))
|
||||
.thenReturn(vo);
|
||||
|
||||
mockMvc.perform(post("/knowledge-graph/{graphId}/review/{reviewId}/approve", GRAPH_ID, REVIEW_ID)
|
||||
.contentType(MediaType.APPLICATION_JSON)
|
||||
.header("X-User-Id", "reviewer-1")
|
||||
.content(objectMapper.writeValueAsString(Map.of("comment", "LGTM"))))
|
||||
.andExpect(status().isOk());
|
||||
|
||||
verify(reviewService).approveReview(GRAPH_ID, REVIEW_ID, "reviewer-1", "LGTM");
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// POST /knowledge-graph/{graphId}/review/{reviewId}/reject
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Test
|
||||
void rejectReview_success() throws Exception {
|
||||
EditReviewVO vo = buildReviewVO("REJECTED");
|
||||
when(reviewService.rejectReview(eq(GRAPH_ID), eq(REVIEW_ID), eq("reviewer-1"), eq("不合适")))
|
||||
.thenReturn(vo);
|
||||
|
||||
mockMvc.perform(post("/knowledge-graph/{graphId}/review/{reviewId}/reject", GRAPH_ID, REVIEW_ID)
|
||||
.contentType(MediaType.APPLICATION_JSON)
|
||||
.header("X-User-Id", "reviewer-1")
|
||||
.content(objectMapper.writeValueAsString(Map.of("comment", "不合适"))))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$.status").value("REJECTED"));
|
||||
|
||||
verify(reviewService).rejectReview(GRAPH_ID, REVIEW_ID, "reviewer-1", "不合适");
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// GET /knowledge-graph/{graphId}/review/pending
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Test
|
||||
void listPendingReviews_success() throws Exception {
|
||||
EditReviewVO vo = buildReviewVO("PENDING");
|
||||
PagedResponse<EditReviewVO> page = PagedResponse.of(List.of(vo), 0, 1, 1);
|
||||
when(reviewService.listPendingReviews(GRAPH_ID, 0, 20)).thenReturn(page);
|
||||
|
||||
mockMvc.perform(get("/knowledge-graph/{graphId}/review/pending", GRAPH_ID))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$.content").isArray())
|
||||
.andExpect(jsonPath("$.content[0].id").value(REVIEW_ID))
|
||||
.andExpect(jsonPath("$.totalElements").value(1));
|
||||
}
|
||||
|
||||
@Test
|
||||
void listPendingReviews_customPageSize() throws Exception {
|
||||
PagedResponse<EditReviewVO> page = PagedResponse.of(List.of(), 0, 0, 0);
|
||||
when(reviewService.listPendingReviews(GRAPH_ID, 1, 10)).thenReturn(page);
|
||||
|
||||
mockMvc.perform(get("/knowledge-graph/{graphId}/review/pending", GRAPH_ID)
|
||||
.param("page", "1")
|
||||
.param("size", "10"))
|
||||
.andExpect(status().isOk());
|
||||
|
||||
verify(reviewService).listPendingReviews(GRAPH_ID, 1, 10);
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// GET /knowledge-graph/{graphId}/review
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Test
|
||||
void listReviews_withStatusFilter() throws Exception {
|
||||
PagedResponse<EditReviewVO> page = PagedResponse.of(List.of(), 0, 0, 0);
|
||||
when(reviewService.listReviews(GRAPH_ID, "APPROVED", 0, 20)).thenReturn(page);
|
||||
|
||||
mockMvc.perform(get("/knowledge-graph/{graphId}/review", GRAPH_ID)
|
||||
.param("status", "APPROVED"))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$.content").isEmpty());
|
||||
|
||||
verify(reviewService).listReviews(GRAPH_ID, "APPROVED", 0, 20);
|
||||
}
|
||||
|
||||
@Test
|
||||
void listReviews_withoutStatusFilter() throws Exception {
|
||||
EditReviewVO vo = buildReviewVO("PENDING");
|
||||
PagedResponse<EditReviewVO> page = PagedResponse.of(List.of(vo), 0, 1, 1);
|
||||
when(reviewService.listReviews(GRAPH_ID, null, 0, 20)).thenReturn(page);
|
||||
|
||||
mockMvc.perform(get("/knowledge-graph/{graphId}/review", GRAPH_ID))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$.content").isArray())
|
||||
.andExpect(jsonPath("$.content[0].id").value(REVIEW_ID));
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// Helpers
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
private EditReviewVO buildReviewVO(String status) {
|
||||
return EditReviewVO.builder()
|
||||
.id(REVIEW_ID)
|
||||
.graphId(GRAPH_ID)
|
||||
.operationType("CREATE_ENTITY")
|
||||
.payload("{\"name\":\"Test\",\"type\":\"Dataset\"}")
|
||||
.status(status)
|
||||
.submittedBy("user-1")
|
||||
.createdAt(LocalDateTime.now())
|
||||
.build();
|
||||
}
|
||||
}
|
||||
@@ -81,6 +81,11 @@
|
||||
<artifactId>data-evaluation-service</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.datamate</groupId>
|
||||
<artifactId>task-coordination-service</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.datamate</groupId>
|
||||
<artifactId>pipeline-orchestration-service</artifactId>
|
||||
@@ -104,6 +109,13 @@
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
|
||||
<!-- 知识图谱服务依赖 -->
|
||||
<dependency>
|
||||
<groupId>com.datamate</groupId>
|
||||
<artifactId>knowledge-graph-service</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
|
||||
<!-- Database -->
|
||||
<dependency>
|
||||
<groupId>com.mysql</groupId>
|
||||
|
||||
@@ -7,8 +7,14 @@ import org.springframework.security.config.annotation.web.configuration.EnableWe
|
||||
import org.springframework.security.web.SecurityFilterChain;
|
||||
|
||||
/**
|
||||
* 安全配置 - 暂时禁用所有认证
|
||||
* 开发阶段使用,生产环境需要启用认证
|
||||
* Spring Security 配置。
|
||||
* <p>
|
||||
* 安全架构采用双层防护:
|
||||
* <ul>
|
||||
* <li><b>Gateway 层</b>:API Gateway 负责 JWT 校验,通过后透传 X-User-* headers 到后端服务</li>
|
||||
* <li><b>服务层</b>:内部 sync 端点通过 {@code InternalTokenInterceptor} 校验 X-Internal-Token</li>
|
||||
* </ul>
|
||||
* 当前 SecurityFilterChain 配置为 permitAll,HTTP 级别的访问控制由 Gateway 和业务拦截器共同完成。
|
||||
*/
|
||||
@Configuration
|
||||
@EnableWebSecurity
|
||||
|
||||
@@ -3,12 +3,6 @@ spring:
|
||||
application:
|
||||
name: datamate
|
||||
|
||||
# 暂时排除Spring Security自动配置(开发阶段使用)
|
||||
autoconfigure:
|
||||
exclude:
|
||||
- org.springframework.boot.autoconfigure.security.servlet.SecurityAutoConfiguration
|
||||
- org.springframework.boot.autoconfigure.security.servlet.UserDetailsServiceAutoConfiguration
|
||||
|
||||
# 数据源配置
|
||||
datasource:
|
||||
driver-class-name: com.mysql.cj.jdbc.Driver
|
||||
@@ -52,6 +46,7 @@ spring:
|
||||
import:
|
||||
- classpath:config/application-datacollection.yml
|
||||
- classpath:config/application-datamanagement.yml
|
||||
- optional:classpath:application-knowledgegraph.yml
|
||||
|
||||
# Redis配置
|
||||
data:
|
||||
|
||||
@@ -29,6 +29,7 @@
|
||||
<module>data-synthesis-service</module>
|
||||
<module>data-annotation-service</module>
|
||||
<module>data-evaluation-service</module>
|
||||
<module>task-coordination-service</module>
|
||||
<module>pipeline-orchestration-service</module>
|
||||
<module>execution-engine-service</module>
|
||||
|
||||
@@ -36,6 +37,9 @@
|
||||
<module>rag-indexer-service</module>
|
||||
<module>rag-query-service</module>
|
||||
|
||||
<!-- 知识图谱服务 -->
|
||||
<module>knowledge-graph-service</module>
|
||||
|
||||
<!-- 主启动模块 -->
|
||||
<module>main-application</module>
|
||||
</modules>
|
||||
|
||||
48
backend/services/task-coordination-service/pom.xml
Normal file
48
backend/services/task-coordination-service/pom.xml
Normal file
@@ -0,0 +1,48 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
|
||||
http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<parent>
|
||||
<groupId>com.datamate</groupId>
|
||||
<artifactId>services</artifactId>
|
||||
<version>1.0.0-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
<artifactId>task-coordination-service</artifactId>
|
||||
<name>Task Coordination Service</name>
|
||||
<description>任务拆分与分配协调服务</description>
|
||||
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>com.datamate</groupId>
|
||||
<artifactId>domain-common</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-web</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.projectlombok</groupId>
|
||||
<artifactId>lombok</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.mapstruct</groupId>
|
||||
<artifactId>mapstruct</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.mapstruct</groupId>
|
||||
<artifactId>mapstruct-processor</artifactId>
|
||||
<version>${mapstruct.version}</version>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-commons</artifactId>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
</project>
|
||||
@@ -0,0 +1,13 @@
|
||||
package com.datamate.coordination;
|
||||
|
||||
import org.springframework.context.annotation.ComponentScan;
|
||||
|
||||
/**
|
||||
* 任务协调服务配置类
|
||||
* 提供任务拆分、分配和进度聚合功能
|
||||
*/
|
||||
@ComponentScan(basePackages = {
|
||||
"com.datamate.coordination"
|
||||
})
|
||||
public class TaskCoordinationServiceConfiguration {
|
||||
}
|
||||
@@ -0,0 +1,113 @@
|
||||
package com.datamate.coordination.application;
|
||||
|
||||
import com.datamate.coordination.common.enums.AssignmentActionEnum;
|
||||
import com.datamate.coordination.common.exception.TaskCoordinationErrorCode;
|
||||
import com.datamate.coordination.domain.repository.TaskAssignmentLogRepository;
|
||||
import com.datamate.coordination.domain.repository.TaskMetaRepository;
|
||||
import com.datamate.coordination.interfaces.dto.*;
|
||||
import com.datamate.common.auth.infrastructure.context.RequestUserContextHolder;
|
||||
import com.datamate.common.infrastructure.exception.BusinessException;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.springframework.stereotype.Service;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
@Slf4j
|
||||
@Service
|
||||
@RequiredArgsConstructor
|
||||
public class TaskAssignmentService {
|
||||
|
||||
private final TaskMetaRepository taskMetaRepo;
|
||||
|
||||
private final TaskAssignmentLogRepository assignmentLogRepo;
|
||||
|
||||
@Transactional
|
||||
public void assignTask(String taskMetaId, AssignTaskRequest request) {
|
||||
TaskMetaDto dto = requireTask(taskMetaId);
|
||||
doAssign(taskMetaId, request.getUserId(), AssignmentActionEnum.ASSIGN,
|
||||
getCurrentUser(), request.getRemark());
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public void reassignTask(String taskMetaId, AssignTaskRequest request) {
|
||||
TaskMetaDto dto = requireTask(taskMetaId);
|
||||
Long previousUserId = dto.getAssignedTo();
|
||||
doAssign(taskMetaId, request.getUserId(), AssignmentActionEnum.REASSIGN,
|
||||
getCurrentUser(), request.getRemark());
|
||||
|
||||
// 记录日志中包含原用户
|
||||
// (doAssign 已写入日志,此处无需重复)
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public void revokeTask(String taskMetaId, String remark) {
|
||||
TaskMetaDto dto = requireTask(taskMetaId);
|
||||
Long previousUserId = dto.getAssignedTo();
|
||||
|
||||
dto.setAssignedTo(null);
|
||||
taskMetaRepo.update(dto);
|
||||
|
||||
TaskAssignmentLogDto logDto = new TaskAssignmentLogDto();
|
||||
logDto.setId(UUID.randomUUID().toString());
|
||||
logDto.setTaskMetaId(taskMetaId);
|
||||
logDto.setAction(AssignmentActionEnum.REVOKE);
|
||||
logDto.setFromUserId(previousUserId);
|
||||
logDto.setToUserId(null);
|
||||
logDto.setOperatedBy(getCurrentUser());
|
||||
logDto.setRemark(remark);
|
||||
assignmentLogRepo.insert(logDto);
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public void batchAssign(BatchAssignRequest request) {
|
||||
for (BatchAssignRequest.TaskAssignment assignment : request.getAssignments()) {
|
||||
doAssign(assignment.getTaskMetaId(), assignment.getUserId(),
|
||||
AssignmentActionEnum.ASSIGN, getCurrentUser(), assignment.getRemark());
|
||||
}
|
||||
}
|
||||
|
||||
public List<TaskAssignmentLogDto> getAssignmentLogs(String taskMetaId) {
|
||||
return assignmentLogRepo.findByTaskMetaId(taskMetaId);
|
||||
}
|
||||
|
||||
/**
|
||||
* 执行分配并写入日志(内部方法,也供 TaskSplitService 调用)
|
||||
*/
|
||||
void doAssign(String taskMetaId, Long userId, AssignmentActionEnum action,
|
||||
String operatedBy, String remark) {
|
||||
TaskMetaDto dto = taskMetaRepo.findById(taskMetaId);
|
||||
if (dto == null) {
|
||||
throw BusinessException.of(TaskCoordinationErrorCode.TASK_NOT_FOUND);
|
||||
}
|
||||
|
||||
Long previousUserId = dto.getAssignedTo();
|
||||
dto.setAssignedTo(userId);
|
||||
taskMetaRepo.update(dto);
|
||||
|
||||
TaskAssignmentLogDto logDto = new TaskAssignmentLogDto();
|
||||
logDto.setId(UUID.randomUUID().toString());
|
||||
logDto.setTaskMetaId(taskMetaId);
|
||||
logDto.setAction(action);
|
||||
logDto.setFromUserId(previousUserId);
|
||||
logDto.setToUserId(userId);
|
||||
logDto.setOperatedBy(operatedBy);
|
||||
logDto.setRemark(remark);
|
||||
assignmentLogRepo.insert(logDto);
|
||||
}
|
||||
|
||||
private TaskMetaDto requireTask(String id) {
|
||||
TaskMetaDto dto = taskMetaRepo.findById(id);
|
||||
if (dto == null) {
|
||||
throw BusinessException.of(TaskCoordinationErrorCode.TASK_NOT_FOUND);
|
||||
}
|
||||
return dto;
|
||||
}
|
||||
|
||||
private String getCurrentUser() {
|
||||
String userId = RequestUserContextHolder.getCurrentUserId();
|
||||
return userId != null ? userId : "system";
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,171 @@
|
||||
package com.datamate.coordination.application;
|
||||
|
||||
import com.datamate.coordination.common.enums.TaskMetaStatusEnum;
|
||||
import com.datamate.coordination.common.exception.TaskCoordinationErrorCode;
|
||||
import com.datamate.coordination.domain.repository.TaskMetaRepository;
|
||||
import com.datamate.coordination.interfaces.dto.AssigneeInfo;
|
||||
import com.datamate.coordination.interfaces.dto.CreateTaskMetaRequest;
|
||||
import com.datamate.coordination.interfaces.dto.TaskMetaDto;
|
||||
import com.datamate.common.auth.domain.model.AuthUserAccount;
|
||||
import com.datamate.common.auth.infrastructure.context.RequestUserContextHolder;
|
||||
import com.datamate.common.auth.infrastructure.persistence.mapper.AuthMapper;
|
||||
import com.datamate.common.infrastructure.exception.BusinessException;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.springframework.stereotype.Service;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
@Slf4j
|
||||
@Service
|
||||
@RequiredArgsConstructor
|
||||
public class TaskMetaService {
|
||||
|
||||
private final TaskMetaRepository taskMetaRepo;
|
||||
|
||||
private final AuthMapper authMapper;
|
||||
|
||||
@Transactional
|
||||
public TaskMetaDto createTaskMeta(CreateTaskMetaRequest request) {
|
||||
TaskMetaDto dto = new TaskMetaDto();
|
||||
dto.setId(UUID.randomUUID().toString());
|
||||
dto.setModule(request.getModule());
|
||||
dto.setRefTaskId(request.getRefTaskId());
|
||||
dto.setTaskName(request.getTaskName());
|
||||
dto.setStatus(TaskMetaStatusEnum.PENDING);
|
||||
dto.setAssignedTo(request.getAssignedTo());
|
||||
dto.setCreatedBy(getCurrentUser());
|
||||
dto.setProgress(0);
|
||||
dto.setTotalItems(request.getTotalItems() != null ? request.getTotalItems() : 0);
|
||||
dto.setCompletedItems(0);
|
||||
dto.setFailedItems(0);
|
||||
dto.setPriority(request.getPriority() != null ? request.getPriority() : 0);
|
||||
dto.setDeadline(request.getDeadline());
|
||||
taskMetaRepo.insert(dto);
|
||||
return dto;
|
||||
}
|
||||
|
||||
public TaskMetaDto getTaskMeta(String id) {
|
||||
TaskMetaDto dto = taskMetaRepo.findById(id);
|
||||
if (dto == null) {
|
||||
throw BusinessException.of(TaskCoordinationErrorCode.TASK_NOT_FOUND);
|
||||
}
|
||||
dto.setChildCount(taskMetaRepo.countByParentId(id));
|
||||
populateAssigneeInfo(List.of(dto));
|
||||
return dto;
|
||||
}
|
||||
|
||||
public List<TaskMetaDto> getChildren(String parentId, Integer page, Integer size) {
|
||||
if (!taskMetaRepo.existsById(parentId)) {
|
||||
throw BusinessException.of(TaskCoordinationErrorCode.PARENT_TASK_NOT_FOUND);
|
||||
}
|
||||
List<TaskMetaDto> children = taskMetaRepo.findByParentId(parentId);
|
||||
populateAssigneeInfo(children);
|
||||
return children;
|
||||
}
|
||||
|
||||
public int countChildren(String parentId) {
|
||||
return taskMetaRepo.countByParentId(parentId);
|
||||
}
|
||||
|
||||
public List<TaskMetaDto> getMyTasks(String status, String module,
|
||||
Integer page, Integer size) {
|
||||
Long currentUserId = getCurrentUserIdAsLong();
|
||||
List<TaskMetaDto> tasks = taskMetaRepo.findTasks(module, status, currentUserId, null, null, page, size);
|
||||
populateAssigneeInfo(tasks);
|
||||
return tasks;
|
||||
}
|
||||
|
||||
public int countMyTasks(String status, String module) {
|
||||
Long currentUserId = getCurrentUserIdAsLong();
|
||||
return taskMetaRepo.countTasks(module, status, currentUserId, null, null);
|
||||
}
|
||||
|
||||
public List<TaskMetaDto> findTasks(String module, String status, Long assignedTo,
|
||||
String keyword, Integer page, Integer size) {
|
||||
List<TaskMetaDto> tasks = taskMetaRepo.findTasks(module, status, assignedTo, null, keyword, page, size);
|
||||
populateAssigneeInfo(tasks);
|
||||
enrichChildCount(tasks);
|
||||
return tasks;
|
||||
}
|
||||
|
||||
public int countTasks(String module, String status, Long assignedTo, String keyword) {
|
||||
return taskMetaRepo.countTasks(module, status, assignedTo, null, keyword);
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public void deleteTaskMeta(String id) {
|
||||
TaskMetaDto dto = taskMetaRepo.findById(id);
|
||||
if (dto == null) {
|
||||
throw BusinessException.of(TaskCoordinationErrorCode.TASK_NOT_FOUND);
|
||||
}
|
||||
taskMetaRepo.deleteById(id);
|
||||
}
|
||||
|
||||
// ── Assignee population ──────────────────────────────
|
||||
|
||||
/**
|
||||
* 批量填充任务的 assignee 信息,避免 N+1 查询
|
||||
*/
|
||||
private void populateAssigneeInfo(List<TaskMetaDto> tasks) {
|
||||
Set<Long> userIds = tasks.stream()
|
||||
.map(TaskMetaDto::getAssignedTo)
|
||||
.filter(Objects::nonNull)
|
||||
.collect(Collectors.toSet());
|
||||
|
||||
if (userIds.isEmpty()) {
|
||||
return;
|
||||
}
|
||||
|
||||
Map<Long, AssigneeInfo> userMap = new HashMap<>();
|
||||
for (Long userId : userIds) {
|
||||
AuthUserAccount user = authMapper.findUserById(userId);
|
||||
if (user != null) {
|
||||
AssigneeInfo info = new AssigneeInfo();
|
||||
info.setId(user.getId());
|
||||
info.setUsername(user.getUsername());
|
||||
info.setFullName(user.getFullName());
|
||||
userMap.put(userId, info);
|
||||
}
|
||||
}
|
||||
|
||||
for (TaskMetaDto task : tasks) {
|
||||
if (task.getAssignedTo() != null) {
|
||||
task.setAssignee(userMap.get(task.getAssignedTo()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 批量填充顶层任务的 childCount
|
||||
*/
|
||||
private void enrichChildCount(List<TaskMetaDto> tasks) {
|
||||
for (TaskMetaDto task : tasks) {
|
||||
if (task.getParentId() == null) {
|
||||
task.setChildCount(taskMetaRepo.countByParentId(task.getId()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ── Helpers ──────────────────────────────────────────
|
||||
|
||||
private String getCurrentUser() {
|
||||
String userId = RequestUserContextHolder.getCurrentUserId();
|
||||
return userId != null ? userId : "system";
|
||||
}
|
||||
|
||||
private Long getCurrentUserIdAsLong() {
|
||||
String userId = RequestUserContextHolder.getCurrentUserId();
|
||||
if (userId == null) {
|
||||
return null;
|
||||
}
|
||||
try {
|
||||
return Long.parseLong(userId);
|
||||
} catch (NumberFormatException e) {
|
||||
log.warn("Cannot parse current user ID to Long: {}", userId);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,217 @@
|
||||
package com.datamate.coordination.application;
|
||||
|
||||
import com.datamate.coordination.common.enums.TaskMetaStatusEnum;
|
||||
import com.datamate.coordination.common.exception.TaskCoordinationErrorCode;
|
||||
import com.datamate.coordination.domain.repository.TaskMetaRepository;
|
||||
import com.datamate.coordination.interfaces.dto.*;
|
||||
import com.datamate.common.auth.domain.model.AuthUserAccount;
|
||||
import com.datamate.common.auth.infrastructure.persistence.mapper.AuthMapper;
|
||||
import com.datamate.common.infrastructure.exception.BusinessException;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.springframework.stereotype.Service;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.*;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
@Slf4j
|
||||
@Service
|
||||
@RequiredArgsConstructor
|
||||
public class TaskProgressService {
|
||||
|
||||
private final TaskMetaRepository taskMetaRepo;
|
||||
|
||||
private final AuthMapper authMapper;
|
||||
|
||||
public TaskProgressResponse getProgress(String taskMetaId) {
|
||||
TaskMetaDto parent = taskMetaRepo.findById(taskMetaId);
|
||||
if (parent == null) {
|
||||
throw BusinessException.of(TaskCoordinationErrorCode.TASK_NOT_FOUND);
|
||||
}
|
||||
|
||||
TaskProgressResponse response = new TaskProgressResponse();
|
||||
response.setTaskId(parent.getId());
|
||||
response.setTaskName(parent.getTaskName());
|
||||
response.setStatus(parent.getStatus());
|
||||
|
||||
List<TaskMetaDto> children = taskMetaRepo.findByParentId(taskMetaId);
|
||||
if (children.isEmpty()) {
|
||||
// 没有子任务,返回自身进度
|
||||
response.setOverallProgress(parent.getProgress());
|
||||
response.setTotalItems(parent.getTotalItems());
|
||||
response.setCompletedItems(parent.getCompletedItems());
|
||||
response.setFailedItems(parent.getFailedItems());
|
||||
response.setChildren(List.of());
|
||||
return response;
|
||||
}
|
||||
|
||||
// 聚合子任务进度
|
||||
int totalItems = 0;
|
||||
int completedItems = 0;
|
||||
int failedItems = 0;
|
||||
|
||||
// 批量查询 assignee 信息
|
||||
Map<Long, AssigneeInfo> userMap = buildAssigneeMap(children);
|
||||
|
||||
List<ChildTaskProgressDto> childDtos = children.stream().map(child -> {
|
||||
ChildTaskProgressDto dto = new ChildTaskProgressDto();
|
||||
dto.setTaskId(child.getId());
|
||||
dto.setTaskName(child.getTaskName());
|
||||
dto.setProgress(child.getProgress());
|
||||
dto.setTotalItems(child.getTotalItems());
|
||||
dto.setCompletedItems(child.getCompletedItems());
|
||||
dto.setFailedItems(child.getFailedItems());
|
||||
dto.setStatus(child.getStatus());
|
||||
if (child.getAssignedTo() != null) {
|
||||
dto.setAssignee(userMap.get(child.getAssignedTo()));
|
||||
}
|
||||
return dto;
|
||||
}).toList();
|
||||
|
||||
for (TaskMetaDto child : children) {
|
||||
totalItems += (child.getTotalItems() != null ? child.getTotalItems() : 0);
|
||||
completedItems += (child.getCompletedItems() != null ? child.getCompletedItems() : 0);
|
||||
failedItems += (child.getFailedItems() != null ? child.getFailedItems() : 0);
|
||||
}
|
||||
|
||||
int overallProgress = totalItems > 0
|
||||
? (int) Math.round((double) completedItems / totalItems * 100)
|
||||
: 0;
|
||||
|
||||
response.setOverallProgress(overallProgress);
|
||||
response.setTotalItems(totalItems);
|
||||
response.setCompletedItems(completedItems);
|
||||
response.setFailedItems(failedItems);
|
||||
response.setChildren(childDtos);
|
||||
return response;
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public void updateProgress(String taskMetaId, UpdateProgressRequest request) {
|
||||
TaskMetaDto dto = taskMetaRepo.findById(taskMetaId);
|
||||
if (dto == null) {
|
||||
throw BusinessException.of(TaskCoordinationErrorCode.TASK_NOT_FOUND);
|
||||
}
|
||||
|
||||
if (request.getProgress() != null) {
|
||||
dto.setProgress(request.getProgress());
|
||||
}
|
||||
if (request.getTotalItems() != null) {
|
||||
dto.setTotalItems(request.getTotalItems());
|
||||
}
|
||||
if (request.getCompletedItems() != null) {
|
||||
dto.setCompletedItems(request.getCompletedItems());
|
||||
}
|
||||
if (request.getFailedItems() != null) {
|
||||
dto.setFailedItems(request.getFailedItems());
|
||||
}
|
||||
if (request.getStatus() != null) {
|
||||
TaskMetaStatusEnum newStatus = TaskMetaStatusEnum.fromValue(request.getStatus());
|
||||
dto.setStatus(newStatus);
|
||||
|
||||
if (newStatus == TaskMetaStatusEnum.IN_PROGRESS && dto.getStartedAt() == null) {
|
||||
dto.setStartedAt(LocalDateTime.now());
|
||||
}
|
||||
if (newStatus == TaskMetaStatusEnum.COMPLETED || newStatus == TaskMetaStatusEnum.FAILED) {
|
||||
dto.setCompletedAt(LocalDateTime.now());
|
||||
}
|
||||
}
|
||||
|
||||
taskMetaRepo.update(dto);
|
||||
|
||||
// 如果是子任务,聚合更新父任务进度
|
||||
if (dto.getParentId() != null) {
|
||||
aggregateParentProgress(dto.getParentId());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 根据所有子任务的状态聚合父任务进度和状态
|
||||
*/
|
||||
void aggregateParentProgress(String parentId) {
|
||||
TaskMetaDto parent = taskMetaRepo.findById(parentId);
|
||||
if (parent == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
List<TaskMetaDto> children = taskMetaRepo.findByParentId(parentId);
|
||||
if (children.isEmpty()) {
|
||||
return;
|
||||
}
|
||||
|
||||
int totalItems = 0;
|
||||
int completedItems = 0;
|
||||
int failedItems = 0;
|
||||
boolean anyInProgress = false;
|
||||
boolean allCompleted = true;
|
||||
boolean allTerminated = true; // completed, failed, stopped, cancelled
|
||||
boolean anyFailed = false;
|
||||
|
||||
for (TaskMetaDto child : children) {
|
||||
totalItems += (child.getTotalItems() != null ? child.getTotalItems() : 0);
|
||||
completedItems += (child.getCompletedItems() != null ? child.getCompletedItems() : 0);
|
||||
failedItems += (child.getFailedItems() != null ? child.getFailedItems() : 0);
|
||||
|
||||
TaskMetaStatusEnum s = child.getStatus();
|
||||
if (s == TaskMetaStatusEnum.IN_PROGRESS) {
|
||||
anyInProgress = true;
|
||||
}
|
||||
if (s != TaskMetaStatusEnum.COMPLETED) {
|
||||
allCompleted = false;
|
||||
}
|
||||
if (s == TaskMetaStatusEnum.FAILED) {
|
||||
anyFailed = true;
|
||||
}
|
||||
if (s == TaskMetaStatusEnum.PENDING || s == TaskMetaStatusEnum.IN_PROGRESS) {
|
||||
allTerminated = false;
|
||||
}
|
||||
}
|
||||
|
||||
parent.setTotalItems(totalItems);
|
||||
parent.setCompletedItems(completedItems);
|
||||
parent.setFailedItems(failedItems);
|
||||
parent.setProgress(totalItems > 0
|
||||
? (int) Math.round((double) completedItems / totalItems * 100)
|
||||
: 0);
|
||||
|
||||
// 状态聚合
|
||||
if (allCompleted) {
|
||||
parent.setStatus(TaskMetaStatusEnum.COMPLETED);
|
||||
parent.setCompletedAt(LocalDateTime.now());
|
||||
} else if (anyInProgress) {
|
||||
parent.setStatus(TaskMetaStatusEnum.IN_PROGRESS);
|
||||
if (parent.getStartedAt() == null) {
|
||||
parent.setStartedAt(LocalDateTime.now());
|
||||
}
|
||||
} else if (allTerminated && anyFailed) {
|
||||
parent.setStatus(TaskMetaStatusEnum.FAILED);
|
||||
parent.setCompletedAt(LocalDateTime.now());
|
||||
} else if (allTerminated) {
|
||||
parent.setStatus(TaskMetaStatusEnum.STOPPED);
|
||||
}
|
||||
|
||||
taskMetaRepo.update(parent);
|
||||
}
|
||||
|
||||
private Map<Long, AssigneeInfo> buildAssigneeMap(List<TaskMetaDto> tasks) {
|
||||
Set<Long> userIds = tasks.stream()
|
||||
.map(TaskMetaDto::getAssignedTo)
|
||||
.filter(Objects::nonNull)
|
||||
.collect(Collectors.toSet());
|
||||
|
||||
Map<Long, AssigneeInfo> map = new HashMap<>();
|
||||
for (Long userId : userIds) {
|
||||
AuthUserAccount user = authMapper.findUserById(userId);
|
||||
if (user != null) {
|
||||
AssigneeInfo info = new AssigneeInfo();
|
||||
info.setId(user.getId());
|
||||
info.setUsername(user.getUsername());
|
||||
info.setFullName(user.getFullName());
|
||||
map.put(userId, info);
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user