You've already forked DataMate
Compare commits
87 Commits
f381d641ab
...
lsf
| Author | SHA1 | Date | |
|---|---|---|---|
| 75f9b95093 | |||
| ca37bc5a3b | |||
| e62a8369d4 | |||
| 6de41f1a5b | |||
| 24e59b87f2 | |||
| 1b2ed5335e | |||
| a5d8997c22 | |||
| e9e4cf3b1c | |||
| 9800517378 | |||
| 3a9afe3480 | |||
| afcb8783aa | |||
| 9b6ff59a11 | |||
| 39338df808 | |||
| 0ed7dcbee7 | |||
| 7abdafc338 | |||
| cca463e7d1 | |||
| 20446bf57d | |||
| 444f8cd015 | |||
| f12e4abd83 | |||
| 42069f82b3 | |||
| 74daed1c25 | |||
| 75db6daeb5 | |||
| ebb4548ca5 | |||
| 37b478a052 | |||
| a260134d7c | |||
| 8b1ab8ff36 | |||
| 910251e898 | |||
| 0e0782a452 | |||
| 5a553ddde3 | |||
| 8f21798d57 | |||
| f707ce9dae | |||
| 9988ff00f5 | |||
| 2fbfefdb91 | |||
| dc490f03be | |||
| 49f99527cc | |||
| 06a7cd9abd | |||
| ea7ca5474e | |||
| 8ffa131fad | |||
| 807c2289e2 | |||
| 7d5a809772 | |||
| 2f8645a011 | |||
| 71f8f7d1c3 | |||
| 78624915b7 | |||
| 2f49fc4199 | |||
| 9efc07935f | |||
| 7264e111ae | |||
| 3dd4035005 | |||
| 36b410ba7b | |||
| 329382db47 | |||
| e862925a06 | |||
| 05752678cc | |||
| 0f1dd9ec8d | |||
| 38add27d84 | |||
| f9f4ea352e | |||
| 24d8ee49a1 | |||
| 38e58ba864 | |||
| cd5f5ef6da | |||
| 1f6c821cbc | |||
| 44a1f2193f | |||
| 6a4c4ae3d7 | |||
| c6dccf5e29 | |||
| fbc83b5610 | |||
| 056cee11cc | |||
| f8f9faaa06 | |||
| 719f54bf2e | |||
| 5507adeb45 | |||
| 48cf49d064 | |||
| f5cb265667 | |||
| 4143bc75f9 | |||
| 99bd83d312 | |||
| c03bdf1a24 | |||
| 9057807ec1 | |||
| f15fd044ce | |||
| d0972cbc9d | |||
| 473f4e717f | |||
| 6b0042cb66 | |||
| fa9e9d9f68 | |||
| 707e65b017 | |||
| cda22a720c | |||
| 394e2bda18 | |||
| 4220284f5a | |||
| 8415166949 | |||
| 078f303f57 | |||
| 50f2da5503 | |||
| 3af1daf8b6 | |||
| 7c7729434b | |||
| 17a62cd3c2 |
54
Makefile
54
Makefile
@@ -76,6 +76,12 @@ help:
|
||||
@echo " make download SAVE=true PLATFORM=linux/arm64 Save ARM64 images"
|
||||
@echo " make load-images Load all downloaded images from dist/"
|
||||
@echo ""
|
||||
@echo "Neo4j Commands:"
|
||||
@echo " make neo4j-up Start Neo4j graph database"
|
||||
@echo " make neo4j-down Stop Neo4j graph database"
|
||||
@echo " make neo4j-logs View Neo4j logs"
|
||||
@echo " make neo4j-shell Open Neo4j Cypher shell"
|
||||
@echo ""
|
||||
@echo "Utility Commands:"
|
||||
@echo " make create-namespace Create Kubernetes namespace"
|
||||
@echo " make help Show this help message"
|
||||
@@ -205,8 +211,9 @@ endif
|
||||
.PHONY: install
|
||||
install:
|
||||
ifeq ($(origin INSTALLER), undefined)
|
||||
$(call prompt-installer,datamate-$$INSTALLER-install milvus-$$INSTALLER-install)
|
||||
$(call prompt-installer,neo4j-$$INSTALLER-install datamate-$$INSTALLER-install milvus-$$INSTALLER-install)
|
||||
else
|
||||
$(MAKE) neo4j-$(INSTALLER)-install
|
||||
$(MAKE) datamate-$(INSTALLER)-install
|
||||
$(MAKE) milvus-$(INSTALLER)-install
|
||||
endif
|
||||
@@ -222,7 +229,7 @@ endif
|
||||
.PHONY: uninstall
|
||||
uninstall:
|
||||
ifeq ($(origin INSTALLER), undefined)
|
||||
$(call prompt-uninstaller,label-studio-$$INSTALLER-uninstall milvus-$$INSTALLER-uninstall deer-flow-$$INSTALLER-uninstall datamate-$$INSTALLER-uninstall)
|
||||
$(call prompt-uninstaller,label-studio-$$INSTALLER-uninstall milvus-$$INSTALLER-uninstall neo4j-$$INSTALLER-uninstall deer-flow-$$INSTALLER-uninstall datamate-$$INSTALLER-uninstall)
|
||||
else
|
||||
@if [ "$(INSTALLER)" = "docker" ]; then \
|
||||
echo "Delete volumes? (This will remove all data)"; \
|
||||
@@ -234,6 +241,7 @@ else
|
||||
fi
|
||||
@$(MAKE) label-studio-$(INSTALLER)-uninstall DELETE_VOLUMES_CHOICE=$$DELETE_VOLUMES_CHOICE; \
|
||||
$(MAKE) milvus-$(INSTALLER)-uninstall DELETE_VOLUMES_CHOICE=$$DELETE_VOLUMES_CHOICE; \
|
||||
$(MAKE) neo4j-$(INSTALLER)-uninstall DELETE_VOLUMES_CHOICE=$$DELETE_VOLUMES_CHOICE; \
|
||||
$(MAKE) deer-flow-$(INSTALLER)-uninstall DELETE_VOLUMES_CHOICE=$$DELETE_VOLUMES_CHOICE; \
|
||||
$(MAKE) datamate-$(INSTALLER)-uninstall DELETE_VOLUMES_CHOICE=$$DELETE_VOLUMES_CHOICE
|
||||
endif
|
||||
@@ -241,7 +249,7 @@ endif
|
||||
# ========== Docker Install/Uninstall Targets ==========
|
||||
|
||||
# Valid service targets for docker install/uninstall
|
||||
VALID_SERVICE_TARGETS := datamate backend frontend runtime mineru "deer-flow" milvus "label-studio" "data-juicer" dj
|
||||
VALID_SERVICE_TARGETS := datamate backend frontend runtime mineru "deer-flow" milvus neo4j "label-studio" "data-juicer" dj
|
||||
|
||||
# Generic docker service install target
|
||||
.PHONY: %-docker-install
|
||||
@@ -266,6 +274,8 @@ VALID_SERVICE_TARGETS := datamate backend frontend runtime mineru "deer-flow" mi
|
||||
REGISTRY=$(REGISTRY) docker compose -f deployment/docker/deer-flow/docker-compose.yml up -d; \
|
||||
elif [ "$*" = "milvus" ]; then \
|
||||
docker compose -f deployment/docker/milvus/docker-compose.yml up -d; \
|
||||
elif [ "$*" = "neo4j" ]; then \
|
||||
docker compose -f deployment/docker/neo4j/docker-compose.yml up -d; \
|
||||
elif [ "$*" = "data-juicer" ] || [ "$*" = "dj" ]; then \
|
||||
REGISTRY=$(REGISTRY) && docker compose -f deployment/docker/datamate/docker-compose.yml up -d datamate-data-juicer; \
|
||||
else \
|
||||
@@ -305,6 +315,12 @@ VALID_SERVICE_TARGETS := datamate backend frontend runtime mineru "deer-flow" mi
|
||||
else \
|
||||
docker compose -f deployment/docker/milvus/docker-compose.yml down; \
|
||||
fi; \
|
||||
elif [ "$*" = "neo4j" ]; then \
|
||||
if [ "$(DELETE_VOLUMES_CHOICE)" = "1" ]; then \
|
||||
docker compose -f deployment/docker/neo4j/docker-compose.yml down -v; \
|
||||
else \
|
||||
docker compose -f deployment/docker/neo4j/docker-compose.yml down; \
|
||||
fi; \
|
||||
elif [ "$*" = "data-juicer" ] || [ "$*" = "dj" ]; then \
|
||||
$(call docker-compose-service,datamate-data-juicer,down,deployment/docker/datamate); \
|
||||
else \
|
||||
@@ -314,7 +330,7 @@ VALID_SERVICE_TARGETS := datamate backend frontend runtime mineru "deer-flow" mi
|
||||
# ========== Kubernetes Install/Uninstall Targets ==========
|
||||
|
||||
# Valid k8s targets
|
||||
VALID_K8S_TARGETS := mineru datamate deer-flow milvus label-studio data-juicer dj
|
||||
VALID_K8S_TARGETS := mineru datamate deer-flow milvus neo4j label-studio data-juicer dj
|
||||
|
||||
# Generic k8s install target
|
||||
.PHONY: %-k8s-install
|
||||
@@ -327,7 +343,9 @@ VALID_K8S_TARGETS := mineru datamate deer-flow milvus label-studio data-juicer d
|
||||
done; \
|
||||
exit 1; \
|
||||
fi
|
||||
@if [ "$*" = "label-studio" ]; then \
|
||||
@if [ "$*" = "neo4j" ]; then \
|
||||
echo "Skipping Neo4j: no Helm chart available. Use 'make neo4j-docker-install' or provide an external Neo4j instance."; \
|
||||
elif [ "$*" = "label-studio" ]; then \
|
||||
helm upgrade label-studio deployment/helm/label-studio/ -n $(NAMESPACE) --install; \
|
||||
elif [ "$*" = "mineru" ]; then \
|
||||
kubectl apply -f deployment/kubernetes/mineru/deploy.yaml -n $(NAMESPACE); \
|
||||
@@ -356,7 +374,9 @@ VALID_K8S_TARGETS := mineru datamate deer-flow milvus label-studio data-juicer d
|
||||
done; \
|
||||
exit 1; \
|
||||
fi
|
||||
@if [ "$*" = "mineru" ]; then \
|
||||
@if [ "$*" = "neo4j" ]; then \
|
||||
echo "Skipping Neo4j: no Helm chart available. Use 'make neo4j-docker-uninstall' or manage your external Neo4j instance."; \
|
||||
elif [ "$*" = "mineru" ]; then \
|
||||
kubectl delete -f deployment/kubernetes/mineru/deploy.yaml -n $(NAMESPACE); \
|
||||
elif [ "$*" = "datamate" ]; then \
|
||||
helm uninstall datamate -n $(NAMESPACE) --ignore-not-found; \
|
||||
@@ -498,3 +518,25 @@ load-images:
|
||||
else \
|
||||
echo "Successfully loaded $$count image(s)"; \
|
||||
fi
|
||||
|
||||
# ========== Neo4j Targets ==========
|
||||
|
||||
.PHONY: neo4j-up
|
||||
neo4j-up:
|
||||
@echo "Starting Neo4j graph database..."
|
||||
docker compose -f deployment/docker/neo4j/docker-compose.yml up -d
|
||||
@echo "Neo4j Browser: http://localhost:7474"
|
||||
@echo "Bolt URI: bolt://localhost:7687"
|
||||
|
||||
.PHONY: neo4j-down
|
||||
neo4j-down:
|
||||
@echo "Stopping Neo4j graph database..."
|
||||
docker compose -f deployment/docker/neo4j/docker-compose.yml down
|
||||
|
||||
.PHONY: neo4j-logs
|
||||
neo4j-logs:
|
||||
docker compose -f deployment/docker/neo4j/docker-compose.yml logs -f
|
||||
|
||||
.PHONY: neo4j-shell
|
||||
neo4j-shell:
|
||||
docker exec -it datamate-neo4j cypher-shell -u neo4j -p "$${NEO4J_PASSWORD:-datamate123}"
|
||||
|
||||
@@ -110,9 +110,9 @@ Thank you for your interest in this project! We warmly welcome contributions fro
|
||||
bug reports, suggesting new features, or directly participating in code development, all forms of help make the project
|
||||
better.
|
||||
|
||||
• 📮 [GitHub Issues](../../issues): Submit bugs or feature suggestions.
|
||||
• 📮 [GitHub Issues](https://github.com/ModelEngine-Group/DataMate/issues): Submit bugs or feature suggestions.
|
||||
|
||||
• 🔧 [GitHub Pull Requests](../../pulls): Contribute code improvements.
|
||||
• 🔧 [GitHub Pull Requests](https://github.com/ModelEngine-Group/DataMate/pulls): Contribute code improvements.
|
||||
|
||||
## 📄 License
|
||||
|
||||
|
||||
@@ -36,6 +36,23 @@
|
||||
<groupId>com.alibaba.fastjson2</groupId>
|
||||
<artifactId>fastjson2</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.jsonwebtoken</groupId>
|
||||
<artifactId>jjwt-api</artifactId>
|
||||
<version>0.11.5</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.jsonwebtoken</groupId>
|
||||
<artifactId>jjwt-impl</artifactId>
|
||||
<version>0.11.5</version>
|
||||
<scope>runtime</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.jsonwebtoken</groupId>
|
||||
<artifactId>jjwt-jackson</artifactId>
|
||||
<version>0.11.5</version>
|
||||
<scope>runtime</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
|
||||
@@ -37,6 +37,14 @@ public class ApiGatewayApplication {
|
||||
.route("data-collection", r -> r.path("/api/data-collection/**")
|
||||
.uri("http://datamate-backend-python:18000"))
|
||||
|
||||
// 知识图谱抽取服务路由
|
||||
.route("kg-extraction", r -> r.path("/api/kg/**")
|
||||
.uri("http://datamate-backend-python:18000"))
|
||||
|
||||
// GraphRAG 融合查询服务路由
|
||||
.route("graphrag", r -> r.path("/api/graphrag/**")
|
||||
.uri("http://datamate-backend-python:18000"))
|
||||
|
||||
.route("deer-flow-frontend", r -> r.path("/chat/**")
|
||||
.uri("http://deer-flow-frontend:3000"))
|
||||
|
||||
|
||||
@@ -1,34 +1,126 @@
|
||||
package com.datamate.gateway.filter;
|
||||
|
||||
import com.alibaba.fastjson2.JSONObject;
|
||||
import com.datamate.gateway.security.GatewayJwtUtils;
|
||||
import com.datamate.gateway.security.PermissionRuleMatcher;
|
||||
import io.jsonwebtoken.Claims;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.cloud.gateway.filter.GatewayFilterChain;
|
||||
import org.springframework.cloud.gateway.filter.GlobalFilter;
|
||||
import org.springframework.core.Ordered;
|
||||
import org.springframework.http.HttpMethod;
|
||||
import org.springframework.http.HttpStatus;
|
||||
import org.springframework.http.server.reactive.ServerHttpRequest;
|
||||
import org.springframework.stereotype.Component;
|
||||
import org.springframework.util.StringUtils;
|
||||
import org.springframework.web.server.ServerWebExchange;
|
||||
import reactor.core.publisher.Mono;
|
||||
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* 用户信息过滤器
|
||||
*
|
||||
*/
|
||||
@Slf4j
|
||||
@Component
|
||||
public class UserContextFilter implements GlobalFilter {
|
||||
@Value("${commercial.switch:false}")
|
||||
private boolean isCommercial;
|
||||
public class UserContextFilter implements GlobalFilter, Ordered {
|
||||
private final GatewayJwtUtils gatewayJwtUtils;
|
||||
private final PermissionRuleMatcher permissionRuleMatcher;
|
||||
|
||||
@Value("${datamate.auth.enabled:true}")
|
||||
private boolean authEnabled;
|
||||
|
||||
public UserContextFilter(GatewayJwtUtils gatewayJwtUtils, PermissionRuleMatcher permissionRuleMatcher) {
|
||||
this.gatewayJwtUtils = gatewayJwtUtils;
|
||||
this.permissionRuleMatcher = permissionRuleMatcher;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mono<Void> filter(ServerWebExchange exchange, GatewayFilterChain chain) {
|
||||
if (!isCommercial) {
|
||||
if (!authEnabled) {
|
||||
return chain.filter(exchange);
|
||||
}
|
||||
try {
|
||||
ServerHttpRequest request = exchange.getRequest();
|
||||
String path = request.getURI().getPath();
|
||||
HttpMethod method = request.getMethod();
|
||||
|
||||
} catch (Exception e) {
|
||||
log.error("get current user info error", e);
|
||||
if (!path.startsWith("/api/")) {
|
||||
return chain.filter(exchange);
|
||||
}
|
||||
if (HttpMethod.OPTIONS.equals(method)) {
|
||||
return chain.filter(exchange);
|
||||
}
|
||||
if (permissionRuleMatcher.isWhitelisted(path)) {
|
||||
return chain.filter(exchange);
|
||||
}
|
||||
|
||||
String token = extractBearerToken(request.getHeaders().getFirst("Authorization"));
|
||||
if (!StringUtils.hasText(token)) {
|
||||
return writeError(exchange, HttpStatus.UNAUTHORIZED, "auth.0003", "未登录或登录状态已失效");
|
||||
}
|
||||
|
||||
Claims claims;
|
||||
try {
|
||||
if (!gatewayJwtUtils.validateToken(token)) {
|
||||
return writeError(exchange, HttpStatus.UNAUTHORIZED, "auth.0003", "登录状态已失效");
|
||||
}
|
||||
claims = gatewayJwtUtils.getClaimsFromToken(token);
|
||||
} catch (Exception ex) {
|
||||
log.warn("JWT校验失败: {}", ex.getMessage());
|
||||
return writeError(exchange, HttpStatus.UNAUTHORIZED, "auth.0003", "登录状态已失效");
|
||||
}
|
||||
|
||||
String requiredPermission = permissionRuleMatcher.resolveRequiredPermission(method, path);
|
||||
if (StringUtils.hasText(requiredPermission)) {
|
||||
List<String> permissionCodes = gatewayJwtUtils.getStringListClaim(claims, "permissions");
|
||||
if (!permissionCodes.contains(requiredPermission)) {
|
||||
return writeError(exchange, HttpStatus.FORBIDDEN, "auth.0006", "权限不足");
|
||||
}
|
||||
}
|
||||
|
||||
String userId = String.valueOf(claims.get("userId"));
|
||||
String username = claims.getSubject();
|
||||
List<String> roles = gatewayJwtUtils.getStringListClaim(claims, "roles");
|
||||
List<String> permissions = gatewayJwtUtils.getStringListClaim(claims, "permissions");
|
||||
|
||||
ServerHttpRequest mutatedRequest = request.mutate()
|
||||
.header("X-User-Id", userId)
|
||||
.header("X-User-Name", username)
|
||||
.header("X-User-Roles", String.join(",", roles))
|
||||
.header("X-User-Permissions", String.join(",", permissions))
|
||||
.build();
|
||||
return chain.filter(exchange.mutate().request(mutatedRequest).build());
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getOrder() {
|
||||
return -200;
|
||||
}
|
||||
|
||||
private String extractBearerToken(String authorizationHeader) {
|
||||
if (!StringUtils.hasText(authorizationHeader)) {
|
||||
return null;
|
||||
}
|
||||
if (!authorizationHeader.startsWith("Bearer ")) {
|
||||
return null;
|
||||
}
|
||||
String token = authorizationHeader.substring("Bearer ".length()).trim();
|
||||
return token.isEmpty() ? null : token;
|
||||
}
|
||||
|
||||
private Mono<Void> writeError(ServerWebExchange exchange,
|
||||
HttpStatus status,
|
||||
String code,
|
||||
String message) {
|
||||
exchange.getResponse().setStatusCode(status);
|
||||
exchange.getResponse().getHeaders().set("Content-Type", "application/json;charset=UTF-8");
|
||||
byte[] body = JSONObject.toJSONString(new ErrorBody(code, message, null))
|
||||
.getBytes(StandardCharsets.UTF_8);
|
||||
return exchange.getResponse().writeWith(Mono.just(exchange.getResponse().bufferFactory().wrap(body)));
|
||||
}
|
||||
|
||||
private record ErrorBody(String code, String message, Object data) {
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,65 @@
|
||||
package com.datamate.gateway.security;
|
||||
|
||||
import io.jsonwebtoken.Claims;
|
||||
import io.jsonwebtoken.Jwts;
|
||||
import io.jsonwebtoken.security.Keys;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.stereotype.Component;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import javax.crypto.SecretKey;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.security.MessageDigest;
|
||||
import java.security.NoSuchAlgorithmException;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
* 网关侧JWT工具
|
||||
*/
|
||||
@Component
|
||||
public class GatewayJwtUtils {
|
||||
private static final String DEFAULT_SECRET = "datamate-secret-key-for-jwt-token-generation";
|
||||
|
||||
@Value("${jwt.secret:" + DEFAULT_SECRET + "}")
|
||||
private String secret;
|
||||
|
||||
public Claims getClaimsFromToken(String token) {
|
||||
return Jwts.parserBuilder()
|
||||
.setSigningKey(getSigningKey())
|
||||
.build()
|
||||
.parseClaimsJws(token)
|
||||
.getBody();
|
||||
}
|
||||
|
||||
public boolean validateToken(String token) {
|
||||
Claims claims = getClaimsFromToken(token);
|
||||
Date expiration = claims.getExpiration();
|
||||
return expiration != null && expiration.after(new Date());
|
||||
}
|
||||
|
||||
public List<String> getStringListClaim(Claims claims, String claimName) {
|
||||
Object claimValue = claims.get(claimName);
|
||||
if (!(claimValue instanceof Collection<?> values)) {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
return values.stream()
|
||||
.map(String::valueOf)
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
||||
private SecretKey getSigningKey() {
|
||||
String secretValue = StringUtils.hasText(secret) ? secret : DEFAULT_SECRET;
|
||||
try {
|
||||
MessageDigest digest = MessageDigest.getInstance("SHA-512");
|
||||
byte[] keyBytes = digest.digest(secretValue.getBytes(StandardCharsets.UTF_8));
|
||||
return Keys.hmacShaKeyFor(keyBytes);
|
||||
} catch (NoSuchAlgorithmException e) {
|
||||
throw new IllegalStateException("Cannot initialize JWT signing key", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,88 @@
|
||||
package com.datamate.gateway.security;
|
||||
|
||||
import lombok.Getter;
|
||||
import org.springframework.http.HttpMethod;
|
||||
import org.springframework.stereotype.Component;
|
||||
import org.springframework.util.AntPathMatcher;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
* 权限规则匹配器
|
||||
*/
|
||||
@Component
|
||||
public class PermissionRuleMatcher {
|
||||
private static final Set<HttpMethod> READ_METHODS = Set.of(HttpMethod.GET, HttpMethod.HEAD);
|
||||
private static final Set<HttpMethod> WRITE_METHODS = Set.of(HttpMethod.POST, HttpMethod.PUT, HttpMethod.PATCH, HttpMethod.DELETE);
|
||||
|
||||
private final AntPathMatcher pathMatcher = new AntPathMatcher();
|
||||
private final List<String> whiteListPatterns = List.of(
|
||||
"/api/auth/login",
|
||||
"/api/auth/login/**"
|
||||
);
|
||||
private final List<PermissionRule> rules = buildRules();
|
||||
|
||||
public boolean isWhitelisted(String path) {
|
||||
return whiteListPatterns.stream().anyMatch(pattern -> pathMatcher.match(pattern, path));
|
||||
}
|
||||
|
||||
public String resolveRequiredPermission(HttpMethod method, String path) {
|
||||
for (PermissionRule rule : rules) {
|
||||
if (rule.matches(method, path, pathMatcher)) {
|
||||
return rule.getPermissionCode();
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private List<PermissionRule> buildRules() {
|
||||
List<PermissionRule> permissionRules = new ArrayList<>();
|
||||
addModuleRules(permissionRules, "/api/data-management/**", "module:data-management:read", "module:data-management:write");
|
||||
addModuleRules(permissionRules, "/api/annotation/**", "module:data-annotation:read", "module:data-annotation:write");
|
||||
addModuleRules(permissionRules, "/api/data-collection/**", "module:data-collection:read", "module:data-collection:write");
|
||||
addModuleRules(permissionRules, "/api/evaluation/**", "module:data-evaluation:read", "module:data-evaluation:write");
|
||||
addModuleRules(permissionRules, "/api/synthesis/**", "module:data-synthesis:read", "module:data-synthesis:write");
|
||||
addModuleRules(permissionRules, "/api/knowledge-base/**", "module:knowledge-base:read", "module:knowledge-base:write");
|
||||
addModuleRules(permissionRules, "/api/operator-market/**", "module:operator-market:read", "module:operator-market:write");
|
||||
addModuleRules(permissionRules, "/api/orchestration/**", "module:orchestration:read", "module:orchestration:write");
|
||||
addModuleRules(permissionRules, "/api/content-generation/**", "module:content-generation:use", "module:content-generation:use");
|
||||
addModuleRules(permissionRules, "/api/task-meta/**", "module:task-coordination:read", "module:task-coordination:write");
|
||||
addModuleRules(permissionRules, "/api/knowledge-graph/**", "module:knowledge-graph:read", "module:knowledge-graph:write");
|
||||
addModuleRules(permissionRules, "/api/graphrag/**", "module:knowledge-base:read", "module:knowledge-base:write");
|
||||
|
||||
permissionRules.add(new PermissionRule(READ_METHODS, "/api/auth/users/**", "system:user:manage"));
|
||||
permissionRules.add(new PermissionRule(WRITE_METHODS, "/api/auth/users/**", "system:user:manage"));
|
||||
permissionRules.add(new PermissionRule(READ_METHODS, "/api/auth/roles/**", "system:role:manage"));
|
||||
permissionRules.add(new PermissionRule(WRITE_METHODS, "/api/auth/roles/**", "system:role:manage"));
|
||||
permissionRules.add(new PermissionRule(READ_METHODS, "/api/auth/permissions/**", "system:permission:manage"));
|
||||
permissionRules.add(new PermissionRule(WRITE_METHODS, "/api/auth/permissions/**", "system:permission:manage"));
|
||||
return permissionRules;
|
||||
}
|
||||
|
||||
private void addModuleRules(List<PermissionRule> rules,
|
||||
String pathPattern,
|
||||
String readPermissionCode,
|
||||
String writePermissionCode) {
|
||||
rules.add(new PermissionRule(READ_METHODS, pathPattern, readPermissionCode));
|
||||
rules.add(new PermissionRule(WRITE_METHODS, pathPattern, writePermissionCode));
|
||||
}
|
||||
|
||||
@Getter
|
||||
private static class PermissionRule {
|
||||
private final Set<HttpMethod> methods;
|
||||
private final String pathPattern;
|
||||
private final String permissionCode;
|
||||
|
||||
private PermissionRule(Set<HttpMethod> methods, String pathPattern, String permissionCode) {
|
||||
this.methods = methods;
|
||||
this.pathPattern = pathPattern;
|
||||
this.permissionCode = permissionCode;
|
||||
}
|
||||
|
||||
private boolean matches(HttpMethod method, String path, AntPathMatcher matcher) {
|
||||
return method != null && methods.contains(method) && matcher.match(pathPattern, path);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -470,6 +470,23 @@ paths:
|
||||
'200':
|
||||
description: 上传成功
|
||||
|
||||
/data-management/datasets/upload/cancel-upload/{reqId}:
|
||||
put:
|
||||
tags: [ DatasetFile ]
|
||||
operationId: cancelUpload
|
||||
summary: 取消上传
|
||||
description: 取消预上传请求并清理临时分片
|
||||
parameters:
|
||||
- name: reqId
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
description: 预上传请求ID
|
||||
responses:
|
||||
'200':
|
||||
description: 取消成功
|
||||
|
||||
/data-management/dataset-types:
|
||||
get:
|
||||
operationId: getDatasetTypes
|
||||
|
||||
@@ -3,6 +3,7 @@ package com.datamate.datamanagement.application;
|
||||
import com.baomidou.mybatisplus.core.conditions.update.LambdaUpdateWrapper;
|
||||
import com.baomidou.mybatisplus.core.metadata.IPage;
|
||||
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
|
||||
import com.datamate.common.auth.application.ResourceAccessService;
|
||||
import com.datamate.common.domain.utils.ChunksSaver;
|
||||
import com.datamate.common.setting.application.SysParamApplicationService;
|
||||
import com.datamate.datamanagement.interfaces.dto.*;
|
||||
@@ -64,6 +65,7 @@ public class DatasetApplicationService {
|
||||
private final CollectionTaskClient collectionTaskClient;
|
||||
private final DatasetFileApplicationService datasetFileApplicationService;
|
||||
private final SysParamApplicationService sysParamService;
|
||||
private final ResourceAccessService resourceAccessService;
|
||||
|
||||
@Value("${datamate.data-management.base-path:/dataset}")
|
||||
private String datasetBasePath;
|
||||
@@ -102,6 +104,7 @@ public class DatasetApplicationService {
|
||||
public Dataset updateDataset(String datasetId, UpdateDatasetRequest updateDatasetRequest) {
|
||||
Dataset dataset = datasetRepository.getById(datasetId);
|
||||
BusinessAssert.notNull(dataset, DataManagementErrorCode.DATASET_NOT_FOUND);
|
||||
resourceAccessService.assertOwnerAccess(dataset.getCreatedBy());
|
||||
|
||||
if (StringUtils.hasText(updateDatasetRequest.getName())) {
|
||||
dataset.setName(updateDatasetRequest.getName());
|
||||
@@ -151,6 +154,7 @@ public class DatasetApplicationService {
|
||||
public void deleteDataset(String datasetId) {
|
||||
Dataset dataset = datasetRepository.getById(datasetId);
|
||||
BusinessAssert.notNull(dataset, DataManagementErrorCode.DATASET_NOT_FOUND);
|
||||
resourceAccessService.assertOwnerAccess(dataset.getCreatedBy());
|
||||
long childCount = datasetRepository.countByParentId(datasetId);
|
||||
BusinessAssert.isTrue(childCount == 0, DataManagementErrorCode.DATASET_HAS_CHILDREN);
|
||||
datasetRepository.removeById(datasetId);
|
||||
@@ -164,7 +168,8 @@ public class DatasetApplicationService {
|
||||
public Dataset getDataset(String datasetId) {
|
||||
Dataset dataset = datasetRepository.getById(datasetId);
|
||||
BusinessAssert.notNull(dataset, DataManagementErrorCode.DATASET_NOT_FOUND);
|
||||
List<DatasetFile> datasetFiles = datasetFileRepository.findAllByDatasetId(datasetId);
|
||||
resourceAccessService.assertOwnerAccess(dataset.getCreatedBy());
|
||||
List<DatasetFile> datasetFiles = datasetFileRepository.findAllVisibleByDatasetId(datasetId);
|
||||
dataset.setFiles(datasetFiles);
|
||||
applyVisibleFileCounts(Collections.singletonList(dataset));
|
||||
return dataset;
|
||||
@@ -176,7 +181,8 @@ public class DatasetApplicationService {
|
||||
@Transactional(readOnly = true)
|
||||
public PagedResponse<DatasetResponse> getDatasets(DatasetPagingQuery query) {
|
||||
IPage<Dataset> page = new Page<>(query.getPage(), query.getSize());
|
||||
page = datasetRepository.findByCriteria(page, query);
|
||||
String ownerFilterUserId = resourceAccessService.resolveOwnerFilterUserId();
|
||||
page = datasetRepository.findByCriteria(page, query, ownerFilterUserId);
|
||||
String datasetPvcName = getDatasetPvcName();
|
||||
applyVisibleFileCounts(page.getRecords());
|
||||
List<DatasetResponse> datasetResponses = DatasetConverter.INSTANCE.convertToResponse(page.getRecords());
|
||||
@@ -189,6 +195,7 @@ public class DatasetApplicationService {
|
||||
BusinessAssert.isTrue(StringUtils.hasText(datasetId), CommonErrorCode.PARAM_ERROR);
|
||||
Dataset dataset = datasetRepository.getById(datasetId);
|
||||
BusinessAssert.notNull(dataset, DataManagementErrorCode.DATASET_NOT_FOUND);
|
||||
resourceAccessService.assertOwnerAccess(dataset.getCreatedBy());
|
||||
Set<String> sourceTags = normalizeTagNames(dataset.getTags());
|
||||
if (sourceTags.isEmpty()) {
|
||||
return Collections.emptyList();
|
||||
@@ -198,10 +205,12 @@ public class DatasetApplicationService {
|
||||
SIMILAR_DATASET_CANDIDATE_MAX,
|
||||
Math.max(safeLimit * SIMILAR_DATASET_CANDIDATE_FACTOR, safeLimit)
|
||||
);
|
||||
String ownerFilterUserId = resourceAccessService.resolveOwnerFilterUserId();
|
||||
List<Dataset> candidates = datasetRepository.findSimilarByTags(
|
||||
new ArrayList<>(sourceTags),
|
||||
datasetId,
|
||||
candidateLimit
|
||||
candidateLimit,
|
||||
ownerFilterUserId
|
||||
);
|
||||
if (CollectionUtils.isEmpty(candidates)) {
|
||||
return Collections.emptyList();
|
||||
@@ -436,10 +445,11 @@ public class DatasetApplicationService {
|
||||
if (dataset == null) {
|
||||
throw new IllegalArgumentException("Dataset not found: " + datasetId);
|
||||
}
|
||||
resourceAccessService.assertOwnerAccess(dataset.getCreatedBy());
|
||||
|
||||
Map<String, Object> statistics = new HashMap<>();
|
||||
|
||||
List<DatasetFile> allFiles = datasetFileRepository.findAllByDatasetId(datasetId);
|
||||
List<DatasetFile> allFiles = datasetFileRepository.findAllVisibleByDatasetId(datasetId);
|
||||
List<DatasetFile> visibleFiles = filterVisibleFiles(allFiles);
|
||||
long totalFiles = visibleFiles.size();
|
||||
long completedFiles = visibleFiles.stream()
|
||||
@@ -485,8 +495,12 @@ public class DatasetApplicationService {
|
||||
* 获取所有数据集的汇总统计信息
|
||||
*/
|
||||
public AllDatasetStatisticsResponse getAllDatasetStatistics() {
|
||||
if (resourceAccessService.isAdmin()) {
|
||||
return datasetRepository.getAllDatasetStatistics();
|
||||
}
|
||||
String currentUserId = resourceAccessService.requireCurrentUserId();
|
||||
return datasetRepository.getAllDatasetStatisticsByCreatedBy(currentUserId);
|
||||
}
|
||||
|
||||
/**
|
||||
* 异步处理数据源文件扫描
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,7 +1,9 @@
|
||||
package com.datamate.datamanagement.application;
|
||||
|
||||
import com.datamate.common.auth.application.ResourceAccessService;
|
||||
import com.datamate.common.infrastructure.exception.BusinessAssert;
|
||||
import com.datamate.common.infrastructure.exception.CommonErrorCode;
|
||||
import com.datamate.common.infrastructure.exception.SystemErrorCode;
|
||||
import com.datamate.datamanagement.common.enums.KnowledgeStatusType;
|
||||
import com.datamate.datamanagement.domain.model.knowledge.KnowledgeItemDirectory;
|
||||
import com.datamate.datamanagement.domain.model.knowledge.KnowledgeSet;
|
||||
@@ -32,17 +34,19 @@ public class KnowledgeDirectoryApplicationService {
|
||||
private final KnowledgeItemDirectoryRepository knowledgeItemDirectoryRepository;
|
||||
private final KnowledgeItemRepository knowledgeItemRepository;
|
||||
private final KnowledgeSetRepository knowledgeSetRepository;
|
||||
private final ResourceAccessService resourceAccessService;
|
||||
|
||||
@Transactional(readOnly = true)
|
||||
public List<KnowledgeItemDirectory> getKnowledgeDirectories(String setId, KnowledgeDirectoryQuery query) {
|
||||
BusinessAssert.notNull(query, CommonErrorCode.PARAM_ERROR);
|
||||
requireAccessibleKnowledgeSet(setId);
|
||||
query.setSetId(setId);
|
||||
return knowledgeItemDirectoryRepository.findByCriteria(query);
|
||||
}
|
||||
|
||||
public KnowledgeItemDirectory createKnowledgeDirectory(String setId, CreateKnowledgeDirectoryRequest request) {
|
||||
BusinessAssert.notNull(request, CommonErrorCode.PARAM_ERROR);
|
||||
KnowledgeSet knowledgeSet = requireKnowledgeSet(setId);
|
||||
KnowledgeSet knowledgeSet = requireAccessibleKnowledgeSet(setId);
|
||||
BusinessAssert.isTrue(!isReadOnlyStatus(knowledgeSet.getStatus()),
|
||||
DataManagementErrorCode.KNOWLEDGE_SET_STATUS_ERROR);
|
||||
|
||||
@@ -71,7 +75,7 @@ public class KnowledgeDirectoryApplicationService {
|
||||
}
|
||||
|
||||
public void deleteKnowledgeDirectory(String setId, String relativePath) {
|
||||
KnowledgeSet knowledgeSet = requireKnowledgeSet(setId);
|
||||
KnowledgeSet knowledgeSet = requireAccessibleKnowledgeSet(setId);
|
||||
BusinessAssert.isTrue(!isReadOnlyStatus(knowledgeSet.getStatus()),
|
||||
DataManagementErrorCode.KNOWLEDGE_SET_STATUS_ERROR);
|
||||
|
||||
@@ -88,6 +92,15 @@ public class KnowledgeDirectoryApplicationService {
|
||||
return knowledgeSet;
|
||||
}
|
||||
|
||||
private KnowledgeSet requireAccessibleKnowledgeSet(String setId) {
|
||||
KnowledgeSet knowledgeSet = requireKnowledgeSet(setId);
|
||||
if (ResourceAccessService.CONFIDENTIAL_SENSITIVITY.equalsIgnoreCase(knowledgeSet.getSensitivity())) {
|
||||
BusinessAssert.isTrue(resourceAccessService.canViewConfidential(),
|
||||
SystemErrorCode.INSUFFICIENT_PERMISSIONS);
|
||||
}
|
||||
return knowledgeSet;
|
||||
}
|
||||
|
||||
private boolean isReadOnlyStatus(KnowledgeStatusType status) {
|
||||
return status == KnowledgeStatusType.ARCHIVED || status == KnowledgeStatusType.DEPRECATED;
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ package com.datamate.datamanagement.application;
|
||||
|
||||
import com.baomidou.mybatisplus.core.metadata.IPage;
|
||||
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
|
||||
import com.datamate.common.auth.application.ResourceAccessService;
|
||||
import com.datamate.common.infrastructure.exception.BusinessAssert;
|
||||
import com.datamate.common.infrastructure.exception.BusinessException;
|
||||
import com.datamate.common.infrastructure.exception.CommonErrorCode;
|
||||
@@ -12,11 +13,11 @@ import com.datamate.datamanagement.common.enums.KnowledgeSourceType;
|
||||
import com.datamate.datamanagement.common.enums.KnowledgeStatusType;
|
||||
import com.datamate.datamanagement.domain.model.dataset.Dataset;
|
||||
import com.datamate.datamanagement.domain.model.dataset.DatasetFile;
|
||||
import com.datamate.datamanagement.domain.model.dataset.Tag;
|
||||
import com.datamate.datamanagement.domain.model.knowledge.KnowledgeItem;
|
||||
import com.datamate.datamanagement.domain.model.knowledge.KnowledgeSet;
|
||||
import com.datamate.datamanagement.infrastructure.config.DataManagementProperties;
|
||||
import com.datamate.datamanagement.infrastructure.exception.DataManagementErrorCode;
|
||||
import com.datamate.datamanagement.infrastructure.persistence.mapper.TagMapper;
|
||||
import com.datamate.datamanagement.infrastructure.persistence.repository.DatasetFileRepository;
|
||||
import com.datamate.datamanagement.infrastructure.persistence.repository.DatasetRepository;
|
||||
import com.datamate.datamanagement.infrastructure.persistence.repository.KnowledgeItemRepository;
|
||||
@@ -30,6 +31,7 @@ import com.datamate.datamanagement.interfaces.dto.KnowledgeItemResponse;
|
||||
import com.datamate.datamanagement.interfaces.dto.KnowledgeItemSearchQuery;
|
||||
import com.datamate.datamanagement.interfaces.dto.KnowledgeItemSearchResponse;
|
||||
import com.datamate.datamanagement.interfaces.dto.KnowledgeManagementStatisticsResponse;
|
||||
import com.datamate.datamanagement.interfaces.dto.KnowledgeSetPagingQuery;
|
||||
import com.datamate.datamanagement.interfaces.dto.ReplaceKnowledgeItemFileRequest;
|
||||
import com.datamate.datamanagement.interfaces.dto.UpdateKnowledgeItemRequest;
|
||||
import com.datamate.datamanagement.interfaces.dto.UploadKnowledgeItemsRequest;
|
||||
@@ -56,12 +58,15 @@ import java.nio.file.Paths;
|
||||
import java.time.LocalDateTime;
|
||||
import java.time.format.DateTimeFormatter;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.UUID;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
* 知识条目应用服务
|
||||
@@ -88,11 +93,11 @@ public class KnowledgeItemApplicationService {
|
||||
private final DatasetRepository datasetRepository;
|
||||
private final DatasetFileRepository datasetFileRepository;
|
||||
private final DataManagementProperties dataManagementProperties;
|
||||
private final TagMapper tagMapper;
|
||||
private final KnowledgeItemPreviewService knowledgeItemPreviewService;
|
||||
private final ResourceAccessService resourceAccessService;
|
||||
|
||||
public KnowledgeItem createKnowledgeItem(String setId, CreateKnowledgeItemRequest request) {
|
||||
KnowledgeSet knowledgeSet = requireKnowledgeSet(setId);
|
||||
KnowledgeSet knowledgeSet = requireAccessibleKnowledgeSet(setId);
|
||||
BusinessAssert.isTrue(!isReadOnlyStatus(knowledgeSet.getStatus()),
|
||||
DataManagementErrorCode.KNOWLEDGE_SET_STATUS_ERROR);
|
||||
|
||||
@@ -112,7 +117,7 @@ public class KnowledgeItemApplicationService {
|
||||
}
|
||||
|
||||
public List<KnowledgeItem> uploadKnowledgeItems(String setId, UploadKnowledgeItemsRequest request) {
|
||||
KnowledgeSet knowledgeSet = requireKnowledgeSet(setId);
|
||||
KnowledgeSet knowledgeSet = requireAccessibleKnowledgeSet(setId);
|
||||
BusinessAssert.isTrue(!isReadOnlyStatus(knowledgeSet.getStatus()),
|
||||
DataManagementErrorCode.KNOWLEDGE_SET_STATUS_ERROR);
|
||||
|
||||
@@ -126,7 +131,9 @@ public class KnowledgeItemApplicationService {
|
||||
createDirectories(setDir);
|
||||
|
||||
List<KnowledgeItem> items = new ArrayList<>();
|
||||
List<Path> savedFilePaths = new ArrayList<>();
|
||||
|
||||
try {
|
||||
for (MultipartFile file : files) {
|
||||
BusinessAssert.notNull(file, CommonErrorCode.PARAM_ERROR);
|
||||
BusinessAssert.isTrue(!file.isEmpty(), CommonErrorCode.PARAM_ERROR);
|
||||
@@ -144,6 +151,7 @@ public class KnowledgeItemApplicationService {
|
||||
BusinessAssert.isTrue(targetPath.startsWith(setDir), CommonErrorCode.PARAM_ERROR);
|
||||
|
||||
saveMultipartFile(file, targetPath);
|
||||
savedFilePaths.add(targetPath);
|
||||
|
||||
KnowledgeItem knowledgeItem = new KnowledgeItem();
|
||||
knowledgeItem.setId(UUID.randomUUID().toString());
|
||||
@@ -161,10 +169,19 @@ public class KnowledgeItemApplicationService {
|
||||
knowledgeItemRepository.saveBatch(items, items.size());
|
||||
}
|
||||
return items;
|
||||
} catch (Exception e) {
|
||||
for (Path filePath : savedFilePaths) {
|
||||
deleteFileQuietly(filePath);
|
||||
}
|
||||
if (e instanceof BusinessException) {
|
||||
throw (BusinessException) e;
|
||||
}
|
||||
throw BusinessException.of(SystemErrorCode.FILE_SYSTEM_ERROR);
|
||||
}
|
||||
}
|
||||
|
||||
public KnowledgeItem updateKnowledgeItem(String setId, String itemId, UpdateKnowledgeItemRequest request) {
|
||||
KnowledgeSet knowledgeSet = requireKnowledgeSet(setId);
|
||||
KnowledgeSet knowledgeSet = requireAccessibleKnowledgeSet(setId);
|
||||
KnowledgeItem knowledgeItem = knowledgeItemRepository.getById(itemId);
|
||||
BusinessAssert.notNull(knowledgeItem, DataManagementErrorCode.KNOWLEDGE_ITEM_NOT_FOUND);
|
||||
BusinessAssert.isTrue(Objects.equals(knowledgeItem.getSetId(), setId), CommonErrorCode.PARAM_ERROR);
|
||||
@@ -187,13 +204,18 @@ public class KnowledgeItemApplicationService {
|
||||
}
|
||||
|
||||
public void deleteKnowledgeItem(String setId, String itemId) {
|
||||
requireAccessibleKnowledgeSet(setId);
|
||||
KnowledgeItem knowledgeItem = knowledgeItemRepository.getById(itemId);
|
||||
BusinessAssert.notNull(knowledgeItem, DataManagementErrorCode.KNOWLEDGE_ITEM_NOT_FOUND);
|
||||
BusinessAssert.isTrue(Objects.equals(knowledgeItem.getSetId(), setId), CommonErrorCode.PARAM_ERROR);
|
||||
|
||||
deleteKnowledgeItemFile(knowledgeItem);
|
||||
knowledgeItemPreviewService.deletePreviewFileQuietly(setId, itemId);
|
||||
knowledgeItemRepository.removeById(itemId);
|
||||
}
|
||||
|
||||
public void deleteKnowledgeItems(String setId, DeleteKnowledgeItemsRequest request) {
|
||||
requireAccessibleKnowledgeSet(setId);
|
||||
BusinessAssert.notNull(request, CommonErrorCode.PARAM_ERROR);
|
||||
List<String> ids = request.getIds();
|
||||
BusinessAssert.isTrue(CollectionUtils.isNotEmpty(ids), CommonErrorCode.PARAM_ERROR);
|
||||
@@ -205,12 +227,18 @@ public class KnowledgeItemApplicationService {
|
||||
boolean allMatch = items.stream().allMatch(item -> Objects.equals(item.getSetId(), setId));
|
||||
BusinessAssert.isTrue(allMatch, CommonErrorCode.PARAM_ERROR);
|
||||
|
||||
for (KnowledgeItem item : items) {
|
||||
deleteKnowledgeItemFile(item);
|
||||
knowledgeItemPreviewService.deletePreviewFileQuietly(setId, item.getId());
|
||||
}
|
||||
|
||||
List<String> deleteIds = items.stream().map(KnowledgeItem::getId).toList();
|
||||
knowledgeItemRepository.removeByIds(deleteIds);
|
||||
}
|
||||
|
||||
@Transactional(readOnly = true)
|
||||
public KnowledgeItem getKnowledgeItem(String setId, String itemId) {
|
||||
requireAccessibleKnowledgeSet(setId);
|
||||
KnowledgeItem knowledgeItem = knowledgeItemRepository.getById(itemId);
|
||||
BusinessAssert.notNull(knowledgeItem, DataManagementErrorCode.KNOWLEDGE_ITEM_NOT_FOUND);
|
||||
BusinessAssert.isTrue(Objects.equals(knowledgeItem.getSetId(), setId), CommonErrorCode.PARAM_ERROR);
|
||||
@@ -219,6 +247,7 @@ public class KnowledgeItemApplicationService {
|
||||
|
||||
@Transactional(readOnly = true)
|
||||
public PagedResponse<KnowledgeItemResponse> getKnowledgeItems(String setId, KnowledgeItemPagingQuery query) {
|
||||
requireAccessibleKnowledgeSet(setId);
|
||||
query.setSetId(setId);
|
||||
IPage<KnowledgeItem> page = new Page<>(query.getPage(), query.getSize());
|
||||
page = knowledgeItemRepository.findByCriteria(page, query);
|
||||
@@ -228,19 +257,58 @@ public class KnowledgeItemApplicationService {
|
||||
|
||||
@Transactional(readOnly = true)
|
||||
public KnowledgeManagementStatisticsResponse getKnowledgeManagementStatistics() {
|
||||
boolean excludeConfidential = !resourceAccessService.canViewConfidential();
|
||||
String ownerFilterUserId = resourceAccessService.resolveOwnerFilterUserId();
|
||||
KnowledgeSetPagingQuery baseQuery = new KnowledgeSetPagingQuery();
|
||||
KnowledgeManagementStatisticsResponse response = new KnowledgeManagementStatisticsResponse();
|
||||
response.setTotalKnowledgeSets(knowledgeSetRepository.count());
|
||||
|
||||
long totalFiles = knowledgeItemRepository.countBySourceTypes(List.of(
|
||||
long totalSets = knowledgeSetRepository.countByCriteria(baseQuery, ownerFilterUserId, excludeConfidential);
|
||||
response.setTotalKnowledgeSets(totalSets);
|
||||
|
||||
List<String> accessibleSetIds = knowledgeSetRepository.listSetIdsByCriteria(baseQuery, ownerFilterUserId, excludeConfidential);
|
||||
if (CollectionUtils.isEmpty(accessibleSetIds)) {
|
||||
response.setTotalFiles(0L);
|
||||
response.setTotalSize(0L);
|
||||
response.setTotalTags(0L);
|
||||
return response;
|
||||
}
|
||||
List<KnowledgeSet> accessibleSets = knowledgeSetRepository.listByIds(accessibleSetIds);
|
||||
if (CollectionUtils.isEmpty(accessibleSets)) {
|
||||
response.setTotalFiles(0L);
|
||||
response.setTotalSize(0L);
|
||||
response.setTotalTags(0L);
|
||||
return response;
|
||||
}
|
||||
|
||||
List<String> normalizedSetIds = accessibleSets.stream()
|
||||
.map(KnowledgeSet::getId)
|
||||
.filter(StringUtils::isNotBlank)
|
||||
.toList();
|
||||
if (CollectionUtils.isEmpty(normalizedSetIds)) {
|
||||
response.setTotalFiles(0L);
|
||||
response.setTotalSize(0L);
|
||||
response.setTotalTags(0L);
|
||||
return response;
|
||||
}
|
||||
|
||||
long totalFiles = knowledgeItemRepository.countBySourceTypesAndSetIds(List.of(
|
||||
KnowledgeSourceType.DATASET_FILE,
|
||||
KnowledgeSourceType.FILE_UPLOAD
|
||||
));
|
||||
), normalizedSetIds);
|
||||
response.setTotalFiles(totalFiles);
|
||||
|
||||
long datasetFileSize = safeLong(knowledgeItemRepository.sumDatasetFileSize());
|
||||
long uploadFileSize = calculateUploadFileTotalSize();
|
||||
long datasetFileSize = safeLong(knowledgeItemRepository.sumDatasetFileSizeBySetIds(normalizedSetIds));
|
||||
long uploadFileSize = calculateUploadFileTotalSize(normalizedSetIds);
|
||||
response.setTotalSize(datasetFileSize + uploadFileSize);
|
||||
response.setTotalTags(safeLong(tagMapper.countKnowledgeSetTags()));
|
||||
|
||||
long totalTags = accessibleSets.stream()
|
||||
.filter(Objects::nonNull)
|
||||
.flatMap(set -> CollectionUtils.isEmpty(set.getTags()) ? Collections.<Tag>emptyList().stream() : set.getTags().stream())
|
||||
.map(tag -> StringUtils.trimToNull(tag == null ? null : tag.getName()))
|
||||
.filter(Objects::nonNull)
|
||||
.collect(Collectors.toCollection(HashSet::new))
|
||||
.size();
|
||||
response.setTotalTags(totalTags);
|
||||
|
||||
return response;
|
||||
}
|
||||
@@ -251,8 +319,9 @@ public class KnowledgeItemApplicationService {
|
||||
String keyword = StringUtils.trimToEmpty(query.getKeyword());
|
||||
BusinessAssert.isTrue(StringUtils.isNotBlank(keyword), CommonErrorCode.PARAM_ERROR);
|
||||
|
||||
boolean excludeConfidential = !resourceAccessService.canViewConfidential();
|
||||
IPage<KnowledgeItemSearchResponse> page = new Page<>(query.getPage(), query.getSize());
|
||||
IPage<KnowledgeItemSearchResponse> result = knowledgeItemRepository.searchFileItems(page, keyword);
|
||||
IPage<KnowledgeItemSearchResponse> result = knowledgeItemRepository.searchFileItems(page, keyword, excludeConfidential);
|
||||
List<KnowledgeItemSearchResponse> responses = result.getRecords()
|
||||
.stream()
|
||||
.map(this::normalizeSearchResponse)
|
||||
@@ -261,7 +330,7 @@ public class KnowledgeItemApplicationService {
|
||||
}
|
||||
|
||||
public List<KnowledgeItem> importKnowledgeItems(String setId, ImportKnowledgeItemsRequest request) {
|
||||
KnowledgeSet knowledgeSet = requireKnowledgeSet(setId);
|
||||
KnowledgeSet knowledgeSet = requireAccessibleKnowledgeSet(setId);
|
||||
BusinessAssert.isTrue(!isReadOnlyStatus(knowledgeSet.getStatus()),
|
||||
DataManagementErrorCode.KNOWLEDGE_SET_STATUS_ERROR);
|
||||
Dataset dataset = datasetRepository.getById(request.getDatasetId());
|
||||
@@ -298,7 +367,7 @@ public class KnowledgeItemApplicationService {
|
||||
@Transactional(readOnly = true)
|
||||
public void exportKnowledgeItems(String setId, HttpServletResponse response) {
|
||||
BusinessAssert.notNull(response, CommonErrorCode.PARAM_ERROR);
|
||||
KnowledgeSet knowledgeSet = requireKnowledgeSet(setId);
|
||||
KnowledgeSet knowledgeSet = requireAccessibleKnowledgeSet(setId);
|
||||
List<KnowledgeItem> items = knowledgeItemRepository.findAllBySetId(setId);
|
||||
|
||||
response.setContentType(EXPORT_CONTENT_TYPE);
|
||||
@@ -327,6 +396,7 @@ public class KnowledgeItemApplicationService {
|
||||
@Transactional(readOnly = true)
|
||||
public void downloadKnowledgeItemFile(String setId, String itemId, HttpServletResponse response) {
|
||||
BusinessAssert.notNull(response, CommonErrorCode.PARAM_ERROR);
|
||||
requireAccessibleKnowledgeSet(setId);
|
||||
KnowledgeItem knowledgeItem = knowledgeItemRepository.getById(itemId);
|
||||
BusinessAssert.notNull(knowledgeItem, DataManagementErrorCode.KNOWLEDGE_ITEM_NOT_FOUND);
|
||||
BusinessAssert.isTrue(Objects.equals(knowledgeItem.getSetId(), setId), CommonErrorCode.PARAM_ERROR);
|
||||
@@ -360,6 +430,7 @@ public class KnowledgeItemApplicationService {
|
||||
@Transactional(readOnly = true)
|
||||
public void previewKnowledgeItemFile(String setId, String itemId, HttpServletResponse response) {
|
||||
BusinessAssert.notNull(response, CommonErrorCode.PARAM_ERROR);
|
||||
requireAccessibleKnowledgeSet(setId);
|
||||
KnowledgeItem knowledgeItem = knowledgeItemRepository.getById(itemId);
|
||||
BusinessAssert.notNull(knowledgeItem, DataManagementErrorCode.KNOWLEDGE_ITEM_NOT_FOUND);
|
||||
BusinessAssert.isTrue(Objects.equals(knowledgeItem.getSetId(), setId), CommonErrorCode.PARAM_ERROR);
|
||||
@@ -421,7 +492,7 @@ public class KnowledgeItemApplicationService {
|
||||
}
|
||||
|
||||
public KnowledgeItem replaceKnowledgeItemFile(String setId, String itemId, ReplaceKnowledgeItemFileRequest request) {
|
||||
KnowledgeSet knowledgeSet = requireKnowledgeSet(setId);
|
||||
KnowledgeSet knowledgeSet = requireAccessibleKnowledgeSet(setId);
|
||||
KnowledgeItem knowledgeItem = knowledgeItemRepository.getById(itemId);
|
||||
BusinessAssert.notNull(knowledgeItem, DataManagementErrorCode.KNOWLEDGE_ITEM_NOT_FOUND);
|
||||
BusinessAssert.isTrue(Objects.equals(knowledgeItem.getSetId(), setId), CommonErrorCode.PARAM_ERROR);
|
||||
@@ -635,8 +706,8 @@ public class KnowledgeItemApplicationService {
|
||||
return item;
|
||||
}
|
||||
|
||||
private long calculateUploadFileTotalSize() {
|
||||
List<KnowledgeItem> items = knowledgeItemRepository.findFileUploadItems();
|
||||
private long calculateUploadFileTotalSize(List<String> setIds) {
|
||||
List<KnowledgeItem> items = knowledgeItemRepository.findFileUploadItemsBySetIds(setIds);
|
||||
if (CollectionUtils.isEmpty(items)) {
|
||||
return 0L;
|
||||
}
|
||||
@@ -785,6 +856,29 @@ public class KnowledgeItemApplicationService {
|
||||
}
|
||||
}
|
||||
|
||||
private void deleteKnowledgeItemFile(KnowledgeItem knowledgeItem) {
|
||||
if (knowledgeItem == null) {
|
||||
return;
|
||||
}
|
||||
if (knowledgeItem.getContentType() != KnowledgeContentType.FILE) {
|
||||
return;
|
||||
}
|
||||
KnowledgeSourceType sourceType = knowledgeItem.getSourceType();
|
||||
if (sourceType != KnowledgeSourceType.FILE_UPLOAD && sourceType != KnowledgeSourceType.MANUAL) {
|
||||
return;
|
||||
}
|
||||
|
||||
String relativePath = knowledgeItem.getContent();
|
||||
if (StringUtils.isNotBlank(relativePath)) {
|
||||
try {
|
||||
Path filePath = resolveKnowledgeItemStoragePath(relativePath);
|
||||
deleteFileQuietly(filePath);
|
||||
} catch (Exception e) {
|
||||
log.warn("delete knowledge item file error, itemId: {}, path: {}", knowledgeItem.getId(), relativePath, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private String resolveOriginalFileName(MultipartFile file) {
|
||||
String originalName = file.getOriginalFilename();
|
||||
if (StringUtils.isBlank(originalName)) {
|
||||
@@ -803,6 +897,18 @@ public class KnowledgeItemApplicationService {
|
||||
return knowledgeSet;
|
||||
}
|
||||
|
||||
/**
|
||||
* 校验当前用户是否可访问指定知识集(含保密权限检查)
|
||||
*/
|
||||
private KnowledgeSet requireAccessibleKnowledgeSet(String setId) {
|
||||
KnowledgeSet knowledgeSet = requireKnowledgeSet(setId);
|
||||
if (ResourceAccessService.CONFIDENTIAL_SENSITIVITY.equalsIgnoreCase(knowledgeSet.getSensitivity())) {
|
||||
BusinessAssert.isTrue(resourceAccessService.canViewConfidential(),
|
||||
SystemErrorCode.INSUFFICIENT_PERMISSIONS);
|
||||
}
|
||||
return knowledgeSet;
|
||||
}
|
||||
|
||||
private String buildExportFileName(String setId) {
|
||||
return EXPORT_FILE_PREFIX + setId + "_" + LocalDateTime.now().format(EXPORT_TIME_FORMATTER) + EXPORT_FILE_SUFFIX;
|
||||
}
|
||||
|
||||
@@ -1,13 +1,17 @@
|
||||
package com.datamate.datamanagement.application;
|
||||
|
||||
import com.datamate.common.auth.application.ResourceAccessService;
|
||||
import com.datamate.common.infrastructure.exception.BusinessAssert;
|
||||
import com.datamate.common.infrastructure.exception.CommonErrorCode;
|
||||
import com.datamate.common.infrastructure.exception.SystemErrorCode;
|
||||
import com.datamate.datamanagement.common.enums.KnowledgeContentType;
|
||||
import com.datamate.datamanagement.common.enums.KnowledgeItemPreviewStatus;
|
||||
import com.datamate.datamanagement.common.enums.KnowledgeSourceType;
|
||||
import com.datamate.datamanagement.domain.model.knowledge.KnowledgeItem;
|
||||
import com.datamate.datamanagement.domain.model.knowledge.KnowledgeSet;
|
||||
import com.datamate.datamanagement.infrastructure.config.DataManagementProperties;
|
||||
import com.datamate.datamanagement.infrastructure.persistence.repository.KnowledgeItemRepository;
|
||||
import com.datamate.datamanagement.infrastructure.persistence.repository.KnowledgeSetRepository;
|
||||
import com.datamate.datamanagement.interfaces.dto.KnowledgeItemPreviewStatusResponse;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
@@ -38,8 +42,10 @@ public class KnowledgeItemPreviewService {
|
||||
private static final DateTimeFormatter PREVIEW_TIME_FORMATTER = DateTimeFormatter.ISO_LOCAL_DATE_TIME;
|
||||
|
||||
private final KnowledgeItemRepository knowledgeItemRepository;
|
||||
private final KnowledgeSetRepository knowledgeSetRepository;
|
||||
private final DataManagementProperties dataManagementProperties;
|
||||
private final KnowledgeItemPreviewAsyncService knowledgeItemPreviewAsyncService;
|
||||
private final ResourceAccessService resourceAccessService;
|
||||
private final ObjectMapper objectMapper = new ObjectMapper();
|
||||
|
||||
public KnowledgeItemPreviewStatusResponse getPreviewStatus(String setId, String itemId) {
|
||||
@@ -138,6 +144,14 @@ public class KnowledgeItemPreviewService {
|
||||
private KnowledgeItem requireKnowledgeItem(String setId, String itemId) {
|
||||
BusinessAssert.isTrue(StringUtils.isNotBlank(setId), CommonErrorCode.PARAM_ERROR);
|
||||
BusinessAssert.isTrue(StringUtils.isNotBlank(itemId), CommonErrorCode.PARAM_ERROR);
|
||||
|
||||
KnowledgeSet knowledgeSet = knowledgeSetRepository.getById(setId);
|
||||
BusinessAssert.notNull(knowledgeSet, CommonErrorCode.PARAM_ERROR);
|
||||
if (ResourceAccessService.CONFIDENTIAL_SENSITIVITY.equalsIgnoreCase(knowledgeSet.getSensitivity())) {
|
||||
BusinessAssert.isTrue(resourceAccessService.canViewConfidential(),
|
||||
SystemErrorCode.INSUFFICIENT_PERMISSIONS);
|
||||
}
|
||||
|
||||
KnowledgeItem knowledgeItem = knowledgeItemRepository.getById(itemId);
|
||||
BusinessAssert.notNull(knowledgeItem, CommonErrorCode.PARAM_ERROR);
|
||||
BusinessAssert.isTrue(Objects.equals(knowledgeItem.getSetId(), setId), CommonErrorCode.PARAM_ERROR);
|
||||
|
||||
@@ -2,8 +2,10 @@ package com.datamate.datamanagement.application;
|
||||
|
||||
import com.baomidou.mybatisplus.core.metadata.IPage;
|
||||
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
|
||||
import com.datamate.common.auth.application.ResourceAccessService;
|
||||
import com.datamate.common.infrastructure.exception.BusinessAssert;
|
||||
import com.datamate.common.infrastructure.exception.CommonErrorCode;
|
||||
import com.datamate.common.infrastructure.exception.SystemErrorCode;
|
||||
import com.datamate.common.interfaces.PagedResponse;
|
||||
import com.datamate.datamanagement.common.enums.KnowledgeStatusType;
|
||||
import com.datamate.datamanagement.domain.model.dataset.Tag;
|
||||
@@ -40,13 +42,16 @@ import java.util.UUID;
|
||||
public class KnowledgeSetApplicationService {
|
||||
private final KnowledgeSetRepository knowledgeSetRepository;
|
||||
private final TagMapper tagMapper;
|
||||
private final ResourceAccessService resourceAccessService;
|
||||
|
||||
public KnowledgeSet createKnowledgeSet(CreateKnowledgeSetRequest request) {
|
||||
BusinessAssert.isTrue(knowledgeSetRepository.findByName(request.getName()) == null,
|
||||
DataManagementErrorCode.KNOWLEDGE_SET_ALREADY_EXISTS);
|
||||
assertCanUseSensitivity(request.getSensitivity());
|
||||
|
||||
KnowledgeSet knowledgeSet = KnowledgeConverter.INSTANCE.convertToKnowledgeSet(request);
|
||||
knowledgeSet.setId(UUID.randomUUID().toString());
|
||||
knowledgeSet.setSensitivity(normalizeSensitivity(knowledgeSet.getSensitivity()));
|
||||
if (knowledgeSet.getStatus() == null) {
|
||||
knowledgeSet.setStatus(KnowledgeStatusType.DRAFT);
|
||||
}
|
||||
@@ -64,6 +69,8 @@ public class KnowledgeSetApplicationService {
|
||||
public KnowledgeSet updateKnowledgeSet(String setId, UpdateKnowledgeSetRequest request) {
|
||||
KnowledgeSet knowledgeSet = knowledgeSetRepository.getById(setId);
|
||||
BusinessAssert.notNull(knowledgeSet, DataManagementErrorCode.KNOWLEDGE_SET_NOT_FOUND);
|
||||
resourceAccessService.assertOwnerAccess(knowledgeSet.getCreatedBy());
|
||||
assertConfidentialAccess(knowledgeSet);
|
||||
BusinessAssert.isTrue(!isReadOnlyStatus(knowledgeSet.getStatus()),
|
||||
DataManagementErrorCode.KNOWLEDGE_SET_STATUS_ERROR);
|
||||
|
||||
@@ -103,7 +110,8 @@ public class KnowledgeSetApplicationService {
|
||||
knowledgeSet.setSourceType(request.getSourceType());
|
||||
}
|
||||
if (request.getSensitivity() != null) {
|
||||
knowledgeSet.setSensitivity(request.getSensitivity());
|
||||
assertCanUseSensitivity(request.getSensitivity());
|
||||
knowledgeSet.setSensitivity(normalizeSensitivity(request.getSensitivity()));
|
||||
}
|
||||
if (request.getMetadata() != null) {
|
||||
knowledgeSet.setMetadata(request.getMetadata());
|
||||
@@ -119,6 +127,8 @@ public class KnowledgeSetApplicationService {
|
||||
public void deleteKnowledgeSet(String setId) {
|
||||
KnowledgeSet knowledgeSet = knowledgeSetRepository.getById(setId);
|
||||
BusinessAssert.notNull(knowledgeSet, DataManagementErrorCode.KNOWLEDGE_SET_NOT_FOUND);
|
||||
resourceAccessService.assertOwnerAccess(knowledgeSet.getCreatedBy());
|
||||
assertConfidentialAccess(knowledgeSet);
|
||||
knowledgeSetRepository.removeById(setId);
|
||||
}
|
||||
|
||||
@@ -126,17 +136,42 @@ public class KnowledgeSetApplicationService {
|
||||
public KnowledgeSet getKnowledgeSet(String setId) {
|
||||
KnowledgeSet knowledgeSet = knowledgeSetRepository.getById(setId);
|
||||
BusinessAssert.notNull(knowledgeSet, DataManagementErrorCode.KNOWLEDGE_SET_NOT_FOUND);
|
||||
resourceAccessService.assertOwnerAccess(knowledgeSet.getCreatedBy());
|
||||
assertConfidentialAccess(knowledgeSet);
|
||||
return knowledgeSet;
|
||||
}
|
||||
|
||||
@Transactional(readOnly = true)
|
||||
public PagedResponse<KnowledgeSetResponse> getKnowledgeSets(KnowledgeSetPagingQuery query) {
|
||||
IPage<KnowledgeSet> page = new Page<>(query.getPage(), query.getSize());
|
||||
page = knowledgeSetRepository.findByCriteria(page, query);
|
||||
String ownerFilterUserId = resourceAccessService.resolveOwnerFilterUserId();
|
||||
boolean excludeConfidential = !resourceAccessService.canViewConfidential();
|
||||
page = knowledgeSetRepository.findByCriteria(page, query, ownerFilterUserId, excludeConfidential);
|
||||
List<KnowledgeSetResponse> responses = KnowledgeConverter.INSTANCE.convertSetResponses(page.getRecords());
|
||||
return PagedResponse.of(responses, page.getCurrent(), page.getTotal(), page.getPages());
|
||||
}
|
||||
|
||||
private void assertConfidentialAccess(KnowledgeSet knowledgeSet) {
|
||||
if (ResourceAccessService.CONFIDENTIAL_SENSITIVITY.equalsIgnoreCase(knowledgeSet.getSensitivity())) {
|
||||
BusinessAssert.isTrue(resourceAccessService.canViewConfidential(),
|
||||
SystemErrorCode.INSUFFICIENT_PERMISSIONS);
|
||||
}
|
||||
}
|
||||
|
||||
private void assertCanUseSensitivity(String sensitivity) {
|
||||
if (ResourceAccessService.CONFIDENTIAL_SENSITIVITY.equalsIgnoreCase(sensitivity)) {
|
||||
BusinessAssert.isTrue(resourceAccessService.canViewConfidential(),
|
||||
SystemErrorCode.INSUFFICIENT_PERMISSIONS);
|
||||
}
|
||||
}
|
||||
|
||||
private String normalizeSensitivity(String sensitivity) {
|
||||
if (!StringUtils.hasText(sensitivity)) {
|
||||
return null;
|
||||
}
|
||||
return sensitivity.trim().toUpperCase();
|
||||
}
|
||||
|
||||
private boolean isReadOnlyStatus(KnowledgeStatusType status) {
|
||||
return status == KnowledgeStatusType.ARCHIVED || status == KnowledgeStatusType.DEPRECATED;
|
||||
}
|
||||
|
||||
@@ -4,6 +4,8 @@ import com.datamate.common.infrastructure.common.Response;
|
||||
import com.datamate.datamanagement.infrastructure.client.PdfTextExtractClient;
|
||||
import com.datamate.datamanagement.infrastructure.client.dto.PdfTextExtractRequest;
|
||||
import com.datamate.datamanagement.infrastructure.client.dto.PdfTextExtractResponse;
|
||||
import feign.FeignException;
|
||||
import feign.Request;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.springframework.scheduling.annotation.Async;
|
||||
@@ -47,8 +49,71 @@ public class PdfTextExtractAsyncService {
|
||||
} else {
|
||||
log.info("PdfTextExtract succeeded, datasetId={}, fileId={}", datasetId, fileId);
|
||||
}
|
||||
} catch (FeignException feignException) {
|
||||
logFeignException(datasetId, fileId, feignException);
|
||||
} catch (Exception e) {
|
||||
log.error("PdfTextExtract call failed, datasetId={}, fileId={}", datasetId, fileId, e);
|
||||
}
|
||||
}
|
||||
|
||||
private void logFeignException(String datasetId, String fileId, FeignException feignException) {
|
||||
Request request = feignException.request();
|
||||
String httpMethod = request == null || request.httpMethod() == null
|
||||
? "UNKNOWN"
|
||||
: request.httpMethod().name();
|
||||
String requestUrl = request == null || request.url() == null
|
||||
? "UNKNOWN"
|
||||
: request.url();
|
||||
String responseBody = resolveFeignResponseBody(feignException);
|
||||
String rootCauseChain = buildCauseChain(feignException, 12);
|
||||
log.error(
|
||||
"PdfTextExtract call failed with FeignException, datasetId={}, fileId={}, status={}, method={}, url={}, responseBody=\n{}\nrootCauseChain={}",
|
||||
datasetId,
|
||||
fileId,
|
||||
feignException.status(),
|
||||
httpMethod,
|
||||
requestUrl,
|
||||
responseBody,
|
||||
rootCauseChain,
|
||||
feignException
|
||||
);
|
||||
}
|
||||
|
||||
private String resolveFeignResponseBody(FeignException feignException) {
|
||||
String responseBody = feignException.contentUTF8();
|
||||
if (responseBody == null || responseBody.isBlank()) {
|
||||
responseBody = feignException.getMessage();
|
||||
}
|
||||
if (responseBody == null || responseBody.isBlank()) {
|
||||
return "EMPTY_RESPONSE_BODY";
|
||||
}
|
||||
return responseBody;
|
||||
}
|
||||
|
||||
private String buildCauseChain(Throwable throwable, int maxDepth) {
|
||||
StringBuilder causeChain = new StringBuilder();
|
||||
Throwable current = throwable;
|
||||
int depth = 0;
|
||||
while (current != null && depth < maxDepth) {
|
||||
if (causeChain.length() > 0) {
|
||||
causeChain.append(" <- ");
|
||||
}
|
||||
causeChain.append(current.getClass().getSimpleName())
|
||||
.append(": ")
|
||||
.append(normalizeCauseMessage(current.getMessage()));
|
||||
current = current.getCause();
|
||||
depth++;
|
||||
}
|
||||
if (current != null) {
|
||||
causeChain.append(" <- ...");
|
||||
}
|
||||
return causeChain.toString();
|
||||
}
|
||||
|
||||
private String normalizeCauseMessage(String message) {
|
||||
if (message == null || message.isBlank()) {
|
||||
return "EMPTY_MESSAGE";
|
||||
}
|
||||
return message.replace("\r", " ").replace("\n", " ").trim();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,5 +7,6 @@ package com.datamate.datamanagement.common.enums;
|
||||
*/
|
||||
public enum DuplicateMethod {
|
||||
ERROR,
|
||||
COVER
|
||||
COVER,
|
||||
VERSION
|
||||
}
|
||||
|
||||
@@ -152,12 +152,20 @@ public class Dataset extends BaseEntity<String> {
|
||||
}
|
||||
|
||||
public void removeFile(DatasetFile file) {
|
||||
if (this.files.remove(file)) {
|
||||
if (file == null) {
|
||||
return;
|
||||
}
|
||||
boolean removed = this.files.remove(file);
|
||||
if (!removed && file.getId() != null) {
|
||||
removed = this.files.removeIf(existing -> Objects.equals(existing.getId(), file.getId()));
|
||||
}
|
||||
if (!removed) {
|
||||
return;
|
||||
}
|
||||
this.fileCount = Math.max(0, this.fileCount - 1);
|
||||
this.sizeBytes = Math.max(0, this.sizeBytes - (file.getFileSize() != null ? file.getFileSize() : 0L));
|
||||
this.updatedAt = LocalDateTime.now();
|
||||
}
|
||||
}
|
||||
|
||||
public void active() {
|
||||
if (this.status == DatasetStatusType.DRAFT) {
|
||||
|
||||
@@ -28,12 +28,16 @@ public class DatasetFile {
|
||||
private String datasetId; // UUID
|
||||
private String fileName;
|
||||
private String filePath;
|
||||
/** 文件逻辑路径(相对数据集根目录,包含子目录) */
|
||||
private String logicalPath;
|
||||
/** 文件版本号(同一个 logicalPath 下递增) */
|
||||
private Long version;
|
||||
private String fileType; // JPG/PNG/DCM/TXT
|
||||
private Long fileSize; // bytes
|
||||
private String checkSum;
|
||||
private String tags;
|
||||
private String metadata;
|
||||
private String status; // UPLOADED, PROCESSING, COMPLETED, ERROR
|
||||
private String status; // ACTIVE/ARCHIVED/DELETED/PROCESSING...
|
||||
private LocalDateTime uploadTime;
|
||||
private LocalDateTime lastAccessTime;
|
||||
private LocalDateTime createdAt;
|
||||
|
||||
@@ -21,4 +21,7 @@ public class DatasetFileUploadCheckInfo {
|
||||
|
||||
/** 目标子目录前缀,例如 "images/",为空表示数据集根目录 */
|
||||
private String prefix;
|
||||
|
||||
/** 上传临时落盘目录(仅服务端使用,不对外暴露) */
|
||||
private String stagingPath;
|
||||
}
|
||||
|
||||
@@ -21,8 +21,8 @@ public class DataManagementConfig {
|
||||
/**
|
||||
* 缓存管理器
|
||||
*/
|
||||
@Bean
|
||||
public CacheManager cacheManager() {
|
||||
@Bean("dataManagementCacheManager")
|
||||
public CacheManager dataManagementCacheManager() {
|
||||
return new ConcurrentMapCacheManager("datasets", "datasetFiles", "tags");
|
||||
}
|
||||
|
||||
|
||||
@@ -26,8 +26,6 @@ public interface DatasetFileMapper extends BaseMapper<DatasetFile> {
|
||||
@Param("status") String status,
|
||||
RowBounds rowBounds);
|
||||
|
||||
int update(DatasetFile file);
|
||||
int deleteById(@Param("id") String id);
|
||||
int updateFilePathPrefix(@Param("datasetId") String datasetId,
|
||||
@Param("oldPrefix") String oldPrefix,
|
||||
@Param("newPrefix") String newPrefix);
|
||||
@@ -48,4 +46,13 @@ public interface DatasetFileMapper extends BaseMapper<DatasetFile> {
|
||||
* @return 文件数统计列表
|
||||
*/
|
||||
List<DatasetFileCount> countNonDerivedByDatasetIds(@Param("datasetIds") List<String> datasetIds);
|
||||
|
||||
/**
|
||||
* 查询指定逻辑路径的所有文件(包括所有状态)
|
||||
*
|
||||
* @param datasetId 数据集ID
|
||||
* @param logicalPath 逻辑路径
|
||||
* @return 文件列表
|
||||
*/
|
||||
List<DatasetFile> findAllByDatasetIdAndLogicalPath(@Param("datasetId") String datasetId, @Param("logicalPath") String logicalPath);
|
||||
}
|
||||
|
||||
@@ -28,6 +28,5 @@ public interface DatasetMapper extends BaseMapper<Dataset> {
|
||||
@Param("keyword") String keyword,
|
||||
@Param("tagNames") List<String> tagNames);
|
||||
|
||||
int deleteById(@Param("id") String id);
|
||||
AllDatasetStatisticsResponse getAllDatasetStatistics();
|
||||
}
|
||||
|
||||
@@ -8,9 +8,12 @@ import org.apache.ibatis.annotations.Mapper;
|
||||
import org.apache.ibatis.annotations.Param;
|
||||
import org.apache.ibatis.annotations.Select;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
@Mapper
|
||||
public interface KnowledgeItemMapper extends BaseMapper<KnowledgeItem> {
|
||||
@Select("""
|
||||
<script>
|
||||
SELECT
|
||||
ki.id AS id,
|
||||
ki.set_id AS setId,
|
||||
@@ -34,19 +37,32 @@ public interface KnowledgeItemMapper extends BaseMapper<KnowledgeItem> {
|
||||
FROM t_dm_knowledge_items ki
|
||||
LEFT JOIN t_dm_knowledge_sets ks ON ki.set_id = ks.id
|
||||
LEFT JOIN t_dm_dataset_files df ON ki.source_file_id = df.id AND ki.source_type = 'DATASET_FILE'
|
||||
WHERE (ki.source_type = 'FILE_UPLOAD' AND (ki.source_file_id LIKE CONCAT('%', #{keyword}, '%')
|
||||
WHERE ((ki.source_type = 'FILE_UPLOAD' AND (ki.source_file_id LIKE CONCAT('%', #{keyword}, '%')
|
||||
OR ki.relative_path LIKE CONCAT('%', #{keyword}, '%')))
|
||||
OR (ki.source_type = 'DATASET_FILE' AND (df.file_name LIKE CONCAT('%', #{keyword}, '%')
|
||||
OR ki.relative_path LIKE CONCAT('%', #{keyword}, '%')))
|
||||
OR ki.relative_path LIKE CONCAT('%', #{keyword}, '%'))))
|
||||
<if test="excludeConfidential">
|
||||
AND (ks.sensitivity IS NULL OR UPPER(TRIM(ks.sensitivity)) != 'CONFIDENTIAL')
|
||||
</if>
|
||||
ORDER BY ki.created_at DESC
|
||||
</script>
|
||||
""")
|
||||
IPage<KnowledgeItemSearchResponse> searchFileItems(IPage<?> page, @Param("keyword") String keyword);
|
||||
IPage<KnowledgeItemSearchResponse> searchFileItems(IPage<?> page, @Param("keyword") String keyword,
|
||||
@Param("excludeConfidential") boolean excludeConfidential);
|
||||
|
||||
@Select("""
|
||||
<script>
|
||||
SELECT COALESCE(SUM(df.file_size), 0)
|
||||
FROM t_dm_knowledge_items ki
|
||||
LEFT JOIN t_dm_dataset_files df ON ki.source_file_id = df.id
|
||||
WHERE ki.source_type = 'DATASET_FILE'
|
||||
<if test="setIds != null and setIds.size() > 0">
|
||||
AND ki.set_id IN
|
||||
<foreach collection="setIds" item="setId" open="(" separator="," close=")">
|
||||
#{setId}
|
||||
</foreach>
|
||||
</if>
|
||||
</script>
|
||||
""")
|
||||
Long sumDatasetFileSize();
|
||||
Long sumDatasetFileSizeBySetIds(@Param("setIds") List<String> setIds);
|
||||
}
|
||||
|
||||
@@ -24,8 +24,28 @@ public interface DatasetFileRepository extends IRepository<DatasetFile> {
|
||||
|
||||
List<DatasetFile> findAllByDatasetId(String datasetId);
|
||||
|
||||
/**
|
||||
* 查询数据集内“可见文件”(默认不包含历史归档版本)。
|
||||
* 约定:status 为 NULL 视为可见;status = ARCHIVED 视为历史版本。
|
||||
*/
|
||||
List<DatasetFile> findAllVisibleByDatasetId(String datasetId);
|
||||
|
||||
DatasetFile findByDatasetIdAndFileName(String datasetId, String fileName);
|
||||
|
||||
/**
|
||||
* 查询指定逻辑路径的最新版本(ACTIVE/NULL)。
|
||||
*/
|
||||
DatasetFile findLatestByDatasetIdAndLogicalPath(String datasetId, String logicalPath);
|
||||
|
||||
/**
|
||||
* 查询指定逻辑路径的所有文件(包括所有状态)
|
||||
*
|
||||
* @param datasetId 数据集ID
|
||||
* @param logicalPath 逻辑路径
|
||||
* @return 文件列表
|
||||
*/
|
||||
List<DatasetFile> findAllByDatasetIdAndLogicalPath(String datasetId, String logicalPath);
|
||||
|
||||
IPage<DatasetFile> findByCriteria(String datasetId, String fileType, String status, String name,
|
||||
Boolean hasAnnotation, IPage<DatasetFile> page);
|
||||
|
||||
|
||||
@@ -25,9 +25,11 @@ public interface DatasetRepository extends IRepository<Dataset> {
|
||||
|
||||
AllDatasetStatisticsResponse getAllDatasetStatistics();
|
||||
|
||||
IPage<Dataset> findByCriteria(IPage<Dataset> page, DatasetPagingQuery query);
|
||||
AllDatasetStatisticsResponse getAllDatasetStatisticsByCreatedBy(String createdBy);
|
||||
|
||||
IPage<Dataset> findByCriteria(IPage<Dataset> page, DatasetPagingQuery query, String createdBy);
|
||||
|
||||
long countByParentId(String parentDatasetId);
|
||||
|
||||
List<Dataset> findSimilarByTags(List<String> tagNames, String excludedDatasetId, int limit);
|
||||
List<Dataset> findSimilarByTags(List<String> tagNames, String excludedDatasetId, int limit, String createdBy);
|
||||
}
|
||||
|
||||
@@ -2,11 +2,11 @@ package com.datamate.datamanagement.infrastructure.persistence.repository;
|
||||
|
||||
import com.baomidou.mybatisplus.core.metadata.IPage;
|
||||
import com.baomidou.mybatisplus.extension.repository.IRepository;
|
||||
import com.baomidou.mybatisplus.core.metadata.IPage;
|
||||
import com.datamate.datamanagement.common.enums.KnowledgeSourceType;
|
||||
import com.datamate.datamanagement.domain.model.knowledge.KnowledgeItem;
|
||||
import com.datamate.datamanagement.interfaces.dto.KnowledgeItemPagingQuery;
|
||||
import com.datamate.datamanagement.interfaces.dto.KnowledgeItemSearchResponse;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
@@ -19,13 +19,13 @@ public interface KnowledgeItemRepository extends IRepository<KnowledgeItem> {
|
||||
|
||||
List<KnowledgeItem> findAllBySetId(String setId);
|
||||
|
||||
long countBySourceTypes(List<KnowledgeSourceType> sourceTypes);
|
||||
long countBySourceTypesAndSetIds(List<KnowledgeSourceType> sourceTypes, List<String> setIds);
|
||||
|
||||
List<KnowledgeItem> findFileUploadItems();
|
||||
List<KnowledgeItem> findFileUploadItemsBySetIds(List<String> setIds);
|
||||
|
||||
IPage<KnowledgeItemSearchResponse> searchFileItems(IPage<?> page, String keyword);
|
||||
IPage<KnowledgeItemSearchResponse> searchFileItems(IPage<?> page, String keyword, boolean excludeConfidential);
|
||||
|
||||
Long sumDatasetFileSize();
|
||||
Long sumDatasetFileSizeBySetIds(List<String> setIds);
|
||||
|
||||
boolean existsBySetIdAndRelativePath(String setId, String relativePath);
|
||||
|
||||
|
||||
@@ -5,11 +5,18 @@ import com.baomidou.mybatisplus.extension.repository.IRepository;
|
||||
import com.datamate.datamanagement.domain.model.knowledge.KnowledgeSet;
|
||||
import com.datamate.datamanagement.interfaces.dto.KnowledgeSetPagingQuery;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* 知识集仓储接口
|
||||
*/
|
||||
public interface KnowledgeSetRepository extends IRepository<KnowledgeSet> {
|
||||
KnowledgeSet findByName(String name);
|
||||
|
||||
IPage<KnowledgeSet> findByCriteria(IPage<KnowledgeSet> page, KnowledgeSetPagingQuery query);
|
||||
IPage<KnowledgeSet> findByCriteria(IPage<KnowledgeSet> page, KnowledgeSetPagingQuery query, String createdBy,
|
||||
boolean excludeConfidential);
|
||||
|
||||
long countByCriteria(KnowledgeSetPagingQuery query, String createdBy, boolean excludeConfidential);
|
||||
|
||||
List<String> listSetIdsByCriteria(KnowledgeSetPagingQuery query, String createdBy, boolean excludeConfidential);
|
||||
}
|
||||
|
||||
@@ -25,6 +25,8 @@ public class DatasetFileRepositoryImpl extends CrudRepository<DatasetFileMapper,
|
||||
private final DatasetFileMapper datasetFileMapper;
|
||||
private static final String ANNOTATION_EXISTS_SQL =
|
||||
"SELECT 1 FROM t_dm_annotation_results ar WHERE ar.file_id = t_dm_dataset_files.id";
|
||||
private static final String FILE_STATUS_ARCHIVED = "ARCHIVED";
|
||||
private static final String FILE_STATUS_ACTIVE = "ACTIVE";
|
||||
|
||||
@Override
|
||||
public Long countByDatasetId(String datasetId) {
|
||||
@@ -51,19 +53,59 @@ public class DatasetFileRepositoryImpl extends CrudRepository<DatasetFileMapper,
|
||||
return datasetFileMapper.findAllByDatasetId(datasetId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<DatasetFile> findAllVisibleByDatasetId(String datasetId) {
|
||||
return datasetFileMapper.selectList(new LambdaQueryWrapper<DatasetFile>()
|
||||
.eq(DatasetFile::getDatasetId, datasetId)
|
||||
.and(wrapper -> wrapper.isNull(DatasetFile::getStatus)
|
||||
.or()
|
||||
.ne(DatasetFile::getStatus, FILE_STATUS_ARCHIVED))
|
||||
.orderByDesc(DatasetFile::getUploadTime));
|
||||
}
|
||||
|
||||
@Override
|
||||
public DatasetFile findByDatasetIdAndFileName(String datasetId, String fileName) {
|
||||
return datasetFileMapper.findByDatasetIdAndFileName(datasetId, fileName);
|
||||
}
|
||||
|
||||
@Override
|
||||
public DatasetFile findLatestByDatasetIdAndLogicalPath(String datasetId, String logicalPath) {
|
||||
if (!StringUtils.hasText(datasetId) || !StringUtils.hasText(logicalPath)) {
|
||||
return null;
|
||||
}
|
||||
return datasetFileMapper.selectOne(new LambdaQueryWrapper<DatasetFile>()
|
||||
.eq(DatasetFile::getDatasetId, datasetId)
|
||||
.eq(DatasetFile::getLogicalPath, logicalPath)
|
||||
.and(wrapper -> wrapper.isNull(DatasetFile::getStatus)
|
||||
.or()
|
||||
.eq(DatasetFile::getStatus, FILE_STATUS_ACTIVE))
|
||||
.orderByDesc(DatasetFile::getVersion)
|
||||
.orderByDesc(DatasetFile::getUploadTime)
|
||||
.last("LIMIT 1"));
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<DatasetFile> findAllByDatasetIdAndLogicalPath(String datasetId, String logicalPath) {
|
||||
return datasetFileMapper.findAllByDatasetIdAndLogicalPath(datasetId, logicalPath);
|
||||
}
|
||||
|
||||
public IPage<DatasetFile> findByCriteria(String datasetId, String fileType, String status, String name,
|
||||
Boolean hasAnnotation, IPage<DatasetFile> page) {
|
||||
return datasetFileMapper.selectPage(page, new LambdaQueryWrapper<DatasetFile>()
|
||||
LambdaQueryWrapper<DatasetFile> wrapper = new LambdaQueryWrapper<DatasetFile>()
|
||||
.eq(DatasetFile::getDatasetId, datasetId)
|
||||
.eq(StringUtils.hasText(fileType), DatasetFile::getFileType, fileType)
|
||||
.eq(StringUtils.hasText(status), DatasetFile::getStatus, status)
|
||||
.like(StringUtils.hasText(name), DatasetFile::getFileName, name)
|
||||
.exists(Boolean.TRUE.equals(hasAnnotation), ANNOTATION_EXISTS_SQL));
|
||||
.exists(Boolean.TRUE.equals(hasAnnotation), ANNOTATION_EXISTS_SQL);
|
||||
|
||||
if (StringUtils.hasText(status)) {
|
||||
wrapper.eq(DatasetFile::getStatus, status);
|
||||
} else {
|
||||
wrapper.and(visibility -> visibility.isNull(DatasetFile::getStatus)
|
||||
.or()
|
||||
.ne(DatasetFile::getStatus, FILE_STATUS_ARCHIVED));
|
||||
}
|
||||
|
||||
return datasetFileMapper.selectPage(page, wrapper);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
||||
@@ -51,10 +51,34 @@ public class DatasetRepositoryImpl extends CrudRepository<DatasetMapper, Dataset
|
||||
|
||||
|
||||
@Override
|
||||
public IPage<Dataset> findByCriteria(IPage<Dataset> page, DatasetPagingQuery query) {
|
||||
public AllDatasetStatisticsResponse getAllDatasetStatisticsByCreatedBy(String createdBy) {
|
||||
List<Dataset> datasets = lambdaQuery()
|
||||
.eq(Dataset::getCreatedBy, createdBy)
|
||||
.list();
|
||||
long totalFiles = datasets.stream()
|
||||
.map(Dataset::getFileCount)
|
||||
.filter(java.util.Objects::nonNull)
|
||||
.mapToLong(Long::longValue)
|
||||
.sum();
|
||||
long totalSize = datasets.stream()
|
||||
.map(Dataset::getSizeBytes)
|
||||
.filter(java.util.Objects::nonNull)
|
||||
.mapToLong(Long::longValue)
|
||||
.sum();
|
||||
AllDatasetStatisticsResponse response = new AllDatasetStatisticsResponse();
|
||||
response.setTotalDatasets(datasets.size());
|
||||
response.setTotalFiles(totalFiles);
|
||||
response.setTotalSize(totalSize);
|
||||
return response;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public IPage<Dataset> findByCriteria(IPage<Dataset> page, DatasetPagingQuery query, String createdBy) {
|
||||
LambdaQueryWrapper<Dataset> wrapper = new LambdaQueryWrapper<Dataset>()
|
||||
.eq(query.getType() != null, Dataset::getDatasetType, query.getType())
|
||||
.eq(query.getStatus() != null, Dataset::getStatus, query.getStatus());
|
||||
.eq(query.getStatus() != null, Dataset::getStatus, query.getStatus())
|
||||
.eq(StringUtils.isNotBlank(createdBy), Dataset::getCreatedBy, createdBy);
|
||||
|
||||
if (query.getParentDatasetId() != null) {
|
||||
if (StringUtils.isBlank(query.getParentDatasetId())) {
|
||||
@@ -92,7 +116,7 @@ public class DatasetRepositoryImpl extends CrudRepository<DatasetMapper, Dataset
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Dataset> findSimilarByTags(List<String> tagNames, String excludedDatasetId, int limit) {
|
||||
public List<Dataset> findSimilarByTags(List<String> tagNames, String excludedDatasetId, int limit, String createdBy) {
|
||||
if (limit <= 0 || tagNames == null || tagNames.isEmpty()) {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
@@ -109,6 +133,9 @@ public class DatasetRepositoryImpl extends CrudRepository<DatasetMapper, Dataset
|
||||
if (StringUtils.isNotBlank(excludedDatasetId)) {
|
||||
wrapper.ne(Dataset::getId, excludedDatasetId.trim());
|
||||
}
|
||||
if (StringUtils.isNotBlank(createdBy)) {
|
||||
wrapper.eq(Dataset::getCreatedBy, createdBy);
|
||||
}
|
||||
wrapper.apply("tags IS NOT NULL AND JSON_VALID(tags) = 1 AND JSON_LENGTH(tags) > 0");
|
||||
wrapper.and(condition -> {
|
||||
boolean hasCondition = false;
|
||||
|
||||
@@ -61,26 +61,37 @@ public class KnowledgeItemRepositoryImpl extends CrudRepository<KnowledgeItemMap
|
||||
}
|
||||
|
||||
@Override
|
||||
public long countBySourceTypes(List<KnowledgeSourceType> sourceTypes) {
|
||||
public long countBySourceTypesAndSetIds(List<KnowledgeSourceType> sourceTypes, List<String> setIds) {
|
||||
if (sourceTypes == null || sourceTypes.isEmpty() || setIds == null || setIds.isEmpty()) {
|
||||
return 0L;
|
||||
}
|
||||
return knowledgeItemMapper.selectCount(new LambdaQueryWrapper<KnowledgeItem>()
|
||||
.in(KnowledgeItem::getSourceType, sourceTypes));
|
||||
.in(KnowledgeItem::getSourceType, sourceTypes)
|
||||
.in(KnowledgeItem::getSetId, setIds));
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<KnowledgeItem> findFileUploadItems() {
|
||||
public List<KnowledgeItem> findFileUploadItemsBySetIds(List<String> setIds) {
|
||||
if (setIds == null || setIds.isEmpty()) {
|
||||
return List.of();
|
||||
}
|
||||
return knowledgeItemMapper.selectList(new LambdaQueryWrapper<KnowledgeItem>()
|
||||
.eq(KnowledgeItem::getSourceType, KnowledgeSourceType.FILE_UPLOAD)
|
||||
.select(KnowledgeItem::getId, KnowledgeItem::getContent, KnowledgeItem::getSourceFileId));
|
||||
.in(KnowledgeItem::getSetId, setIds)
|
||||
.select(KnowledgeItem::getId, KnowledgeItem::getSetId, KnowledgeItem::getContent, KnowledgeItem::getSourceFileId));
|
||||
}
|
||||
|
||||
@Override
|
||||
public IPage<KnowledgeItemSearchResponse> searchFileItems(IPage<?> page, String keyword) {
|
||||
return knowledgeItemMapper.searchFileItems(page, keyword);
|
||||
public IPage<KnowledgeItemSearchResponse> searchFileItems(IPage<?> page, String keyword, boolean excludeConfidential) {
|
||||
return knowledgeItemMapper.searchFileItems(page, keyword, excludeConfidential);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Long sumDatasetFileSize() {
|
||||
return knowledgeItemMapper.sumDatasetFileSize();
|
||||
public Long sumDatasetFileSizeBySetIds(List<String> setIds) {
|
||||
if (setIds == null || setIds.isEmpty()) {
|
||||
return 0L;
|
||||
}
|
||||
return knowledgeItemMapper.sumDatasetFileSizeBySetIds(setIds);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
||||
@@ -3,6 +3,7 @@ package com.datamate.datamanagement.infrastructure.persistence.repository.impl;
|
||||
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
|
||||
import com.baomidou.mybatisplus.core.metadata.IPage;
|
||||
import com.baomidou.mybatisplus.extension.repository.CrudRepository;
|
||||
import com.datamate.common.auth.application.ResourceAccessService;
|
||||
import com.datamate.datamanagement.domain.model.knowledge.KnowledgeSet;
|
||||
import com.datamate.datamanagement.infrastructure.persistence.mapper.KnowledgeSetMapper;
|
||||
import com.datamate.datamanagement.infrastructure.persistence.repository.KnowledgeSetRepository;
|
||||
@@ -11,6 +12,9 @@ import lombok.RequiredArgsConstructor;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.springframework.stereotype.Repository;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* 知识集仓储实现类
|
||||
*/
|
||||
@@ -25,24 +29,62 @@ public class KnowledgeSetRepositoryImpl extends CrudRepository<KnowledgeSetMappe
|
||||
}
|
||||
|
||||
@Override
|
||||
public IPage<KnowledgeSet> findByCriteria(IPage<KnowledgeSet> page, KnowledgeSetPagingQuery query) {
|
||||
LambdaQueryWrapper<KnowledgeSet> wrapper = new LambdaQueryWrapper<KnowledgeSet>()
|
||||
.eq(query.getStatus() != null, KnowledgeSet::getStatus, query.getStatus())
|
||||
.eq(StringUtils.isNotBlank(query.getDomain()), KnowledgeSet::getDomain, query.getDomain())
|
||||
.eq(StringUtils.isNotBlank(query.getBusinessLine()), KnowledgeSet::getBusinessLine, query.getBusinessLine())
|
||||
.eq(StringUtils.isNotBlank(query.getOwner()), KnowledgeSet::getOwner, query.getOwner())
|
||||
.eq(StringUtils.isNotBlank(query.getSensitivity()), KnowledgeSet::getSensitivity, query.getSensitivity())
|
||||
.eq(query.getSourceType() != null, KnowledgeSet::getSourceType, query.getSourceType())
|
||||
.ge(query.getValidFrom() != null, KnowledgeSet::getValidFrom, query.getValidFrom())
|
||||
.le(query.getValidTo() != null, KnowledgeSet::getValidTo, query.getValidTo());
|
||||
|
||||
if (StringUtils.isNotBlank(query.getKeyword())) {
|
||||
wrapper.and(w -> w.like(KnowledgeSet::getName, query.getKeyword())
|
||||
.or()
|
||||
.like(KnowledgeSet::getDescription, query.getKeyword()));
|
||||
public IPage<KnowledgeSet> findByCriteria(IPage<KnowledgeSet> page, KnowledgeSetPagingQuery query, String createdBy,
|
||||
boolean excludeConfidential) {
|
||||
LambdaQueryWrapper<KnowledgeSet> wrapper = buildCriteriaWrapper(query, createdBy, excludeConfidential);
|
||||
wrapper.orderByDesc(KnowledgeSet::getCreatedAt);
|
||||
return knowledgeSetMapper.selectPage(page, wrapper);
|
||||
}
|
||||
|
||||
for (String tagName : query.getTags()) {
|
||||
@Override
|
||||
public long countByCriteria(KnowledgeSetPagingQuery query, String createdBy, boolean excludeConfidential) {
|
||||
return knowledgeSetMapper.selectCount(buildCriteriaWrapper(query, createdBy, excludeConfidential));
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<String> listSetIdsByCriteria(KnowledgeSetPagingQuery query, String createdBy, boolean excludeConfidential) {
|
||||
LambdaQueryWrapper<KnowledgeSet> wrapper = buildCriteriaWrapper(query, createdBy, excludeConfidential)
|
||||
.select(KnowledgeSet::getId)
|
||||
.orderByDesc(KnowledgeSet::getCreatedAt);
|
||||
List<KnowledgeSet> sets = knowledgeSetMapper.selectList(wrapper);
|
||||
if (sets == null || sets.isEmpty()) {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
return sets.stream().map(KnowledgeSet::getId).filter(StringUtils::isNotBlank).toList();
|
||||
}
|
||||
|
||||
private LambdaQueryWrapper<KnowledgeSet> buildCriteriaWrapper(KnowledgeSetPagingQuery query,
|
||||
String createdBy,
|
||||
boolean excludeConfidential) {
|
||||
KnowledgeSetPagingQuery safeQuery = query == null ? new KnowledgeSetPagingQuery() : query;
|
||||
|
||||
LambdaQueryWrapper<KnowledgeSet> wrapper = new LambdaQueryWrapper<KnowledgeSet>()
|
||||
.eq(safeQuery.getStatus() != null, KnowledgeSet::getStatus, safeQuery.getStatus())
|
||||
.eq(StringUtils.isNotBlank(safeQuery.getDomain()), KnowledgeSet::getDomain, safeQuery.getDomain())
|
||||
.eq(StringUtils.isNotBlank(safeQuery.getBusinessLine()), KnowledgeSet::getBusinessLine, safeQuery.getBusinessLine())
|
||||
.eq(StringUtils.isNotBlank(safeQuery.getOwner()), KnowledgeSet::getOwner, safeQuery.getOwner())
|
||||
.eq(safeQuery.getSourceType() != null, KnowledgeSet::getSourceType, safeQuery.getSourceType())
|
||||
.ge(safeQuery.getValidFrom() != null, KnowledgeSet::getValidFrom, safeQuery.getValidFrom())
|
||||
.le(safeQuery.getValidTo() != null, KnowledgeSet::getValidTo, safeQuery.getValidTo())
|
||||
.eq(StringUtils.isNotBlank(createdBy), KnowledgeSet::getCreatedBy, createdBy);
|
||||
|
||||
if (queryHasSensitivity(safeQuery)) {
|
||||
wrapper.apply("UPPER(TRIM(sensitivity)) = {0}", normalizeSensitivity(safeQuery.getSensitivity()));
|
||||
}
|
||||
|
||||
if (excludeConfidential) {
|
||||
wrapper.and(w -> w.isNull(KnowledgeSet::getSensitivity)
|
||||
.or()
|
||||
.apply("UPPER(TRIM(sensitivity)) != {0}", ResourceAccessService.CONFIDENTIAL_SENSITIVITY));
|
||||
}
|
||||
|
||||
if (StringUtils.isNotBlank(safeQuery.getKeyword())) {
|
||||
wrapper.and(w -> w.like(KnowledgeSet::getName, safeQuery.getKeyword())
|
||||
.or()
|
||||
.like(KnowledgeSet::getDescription, safeQuery.getKeyword()));
|
||||
}
|
||||
|
||||
for (String tagName : safeQuery.getTags()) {
|
||||
wrapper.and(w ->
|
||||
w.apply("tags IS NOT NULL " +
|
||||
"AND JSON_VALID(tags) = 1 " +
|
||||
@@ -51,7 +93,15 @@ public class KnowledgeSetRepositoryImpl extends CrudRepository<KnowledgeSetMappe
|
||||
);
|
||||
}
|
||||
|
||||
wrapper.orderByDesc(KnowledgeSet::getCreatedAt);
|
||||
return knowledgeSetMapper.selectPage(page, wrapper);
|
||||
return wrapper;
|
||||
}
|
||||
|
||||
private boolean queryHasSensitivity(KnowledgeSetPagingQuery query) {
|
||||
String normalized = normalizeSensitivity(query.getSensitivity());
|
||||
return StringUtils.isNotBlank(normalized) && !"ALL".equals(normalized);
|
||||
}
|
||||
|
||||
private String normalizeSensitivity(String sensitivity) {
|
||||
return StringUtils.upperCase(StringUtils.trimToNull(sensitivity));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,33 @@
|
||||
package com.datamate.datamanagement.interfaces.rest;
|
||||
|
||||
import com.datamate.datamanagement.application.DatasetFileApplicationService;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.springframework.http.ResponseEntity;
|
||||
import org.springframework.web.bind.annotation.PathVariable;
|
||||
import org.springframework.web.bind.annotation.PutMapping;
|
||||
import org.springframework.web.bind.annotation.RequestMapping;
|
||||
import org.springframework.web.bind.annotation.RestController;
|
||||
|
||||
/**
|
||||
* 数据集上传控制器
|
||||
*/
|
||||
@Slf4j
|
||||
@RestController
|
||||
@RequiredArgsConstructor
|
||||
@RequestMapping("/data-management/datasets/upload")
|
||||
public class DatasetUploadController {
|
||||
|
||||
private final DatasetFileApplicationService datasetFileApplicationService;
|
||||
|
||||
/**
|
||||
* 取消上传
|
||||
*
|
||||
* @param reqId 预上传请求ID
|
||||
*/
|
||||
@PutMapping("/cancel-upload/{reqId}")
|
||||
public ResponseEntity<Void> cancelUpload(@PathVariable("reqId") String reqId) {
|
||||
datasetFileApplicationService.cancelUpload(reqId);
|
||||
return ResponseEntity.ok().build();
|
||||
}
|
||||
}
|
||||
@@ -3,7 +3,7 @@
|
||||
"http://mybatis.org/dtd/mybatis-3-mapper.dtd">
|
||||
<mapper namespace="com.datamate.datamanagement.infrastructure.persistence.mapper.DatasetFileMapper">
|
||||
<sql id="Base_Column_List">
|
||||
id, dataset_id, file_name, file_path, file_type, file_size, check_sum, tags, metadata, status,
|
||||
id, dataset_id, file_name, file_path, logical_path, version, file_type, file_size, check_sum, tags, metadata, status,
|
||||
upload_time, last_access_time, created_at, updated_at
|
||||
</sql>
|
||||
|
||||
@@ -39,13 +39,17 @@
|
||||
</select>
|
||||
|
||||
<select id="countByDatasetId" parameterType="string" resultType="long">
|
||||
SELECT COUNT(*) FROM t_dm_dataset_files WHERE dataset_id = #{datasetId}
|
||||
SELECT COUNT(*)
|
||||
FROM t_dm_dataset_files
|
||||
WHERE dataset_id = #{datasetId}
|
||||
AND (status IS NULL OR status <> 'ARCHIVED')
|
||||
</select>
|
||||
|
||||
<select id="countNonDerivedByDatasetId" parameterType="string" resultType="long">
|
||||
SELECT COUNT(*)
|
||||
FROM t_dm_dataset_files
|
||||
WHERE dataset_id = #{datasetId}
|
||||
AND (status IS NULL OR status <> 'ARCHIVED')
|
||||
AND (metadata IS NULL OR JSON_EXTRACT(metadata, '$.derived_from_file_id') IS NULL)
|
||||
</select>
|
||||
|
||||
@@ -54,16 +58,30 @@
|
||||
</select>
|
||||
|
||||
<select id="sumSizeByDatasetId" parameterType="string" resultType="long">
|
||||
SELECT COALESCE(SUM(file_size), 0) FROM t_dm_dataset_files WHERE dataset_id = #{datasetId}
|
||||
SELECT COALESCE(SUM(file_size), 0)
|
||||
FROM t_dm_dataset_files
|
||||
WHERE dataset_id = #{datasetId}
|
||||
AND (status IS NULL OR status <> 'ARCHIVED')
|
||||
</select>
|
||||
|
||||
<select id="findByDatasetIdAndFileName" resultType="com.datamate.datamanagement.domain.model.dataset.DatasetFile">
|
||||
SELECT <include refid="Base_Column_List"/>
|
||||
FROM t_dm_dataset_files
|
||||
WHERE dataset_id = #{datasetId} AND file_name = #{fileName}
|
||||
WHERE dataset_id = #{datasetId}
|
||||
AND file_name = #{fileName}
|
||||
AND (status IS NULL OR status <> 'ARCHIVED')
|
||||
ORDER BY version DESC, upload_time DESC
|
||||
LIMIT 1
|
||||
</select>
|
||||
|
||||
<select id="findAllByDatasetIdAndLogicalPath" resultType="com.datamate.datamanagement.domain.model.dataset.DatasetFile">
|
||||
SELECT <include refid="Base_Column_List"/>
|
||||
FROM t_dm_dataset_files
|
||||
WHERE dataset_id = #{datasetId}
|
||||
AND logical_path = #{logicalPath}
|
||||
ORDER BY version DESC, upload_time DESC
|
||||
</select>
|
||||
|
||||
<select id="findAllByDatasetId" parameterType="string"
|
||||
resultType="com.datamate.datamanagement.domain.model.dataset.DatasetFile">
|
||||
SELECT <include refid="Base_Column_List"/>
|
||||
@@ -87,22 +105,6 @@
|
||||
</select>
|
||||
|
||||
|
||||
<update id="update" parameterType="com.datamate.datamanagement.domain.model.dataset.DatasetFile">
|
||||
UPDATE t_dm_dataset_files
|
||||
SET file_name = #{fileName},
|
||||
file_path = #{filePath},
|
||||
file_type = #{fileType},
|
||||
file_size = #{fileSize},
|
||||
upload_time = #{uploadTime},
|
||||
last_access_time = #{lastAccessTime},
|
||||
status = #{status}
|
||||
WHERE id = #{id}
|
||||
</update>
|
||||
|
||||
<delete id="deleteById" parameterType="string">
|
||||
DELETE FROM t_dm_dataset_files WHERE id = #{id}
|
||||
</delete>
|
||||
|
||||
<update id="updateFilePathPrefix">
|
||||
UPDATE t_dm_dataset_files
|
||||
SET file_path = CONCAT(#{newPrefix}, SUBSTRING(file_path, LENGTH(#{oldPrefix}) + 1))
|
||||
@@ -126,6 +128,7 @@
|
||||
<foreach collection="datasetIds" item="datasetId" open="(" separator="," close=")">
|
||||
#{datasetId}
|
||||
</foreach>
|
||||
AND (status IS NULL OR status <> 'ARCHIVED')
|
||||
AND (metadata IS NULL OR JSON_EXTRACT(metadata, '$.derived_from_file_id') IS NULL)
|
||||
GROUP BY dataset_id
|
||||
</select>
|
||||
|
||||
@@ -139,10 +139,6 @@
|
||||
</where>
|
||||
</select>
|
||||
|
||||
<delete id="deleteById" parameterType="string">
|
||||
DELETE FROM t_dm_datasets WHERE id = #{id}
|
||||
</delete>
|
||||
|
||||
<select id="getAllDatasetStatistics" resultType="com.datamate.datamanagement.interfaces.dto.AllDatasetStatisticsResponse">
|
||||
SELECT
|
||||
(SELECT COUNT(*) FROM t_dm_datasets) AS total_datasets,
|
||||
|
||||
@@ -0,0 +1,147 @@
|
||||
package com.datamate.datamanagement.application;
|
||||
|
||||
import com.datamate.common.domain.service.FileService;
|
||||
import com.datamate.datamanagement.domain.model.dataset.Dataset;
|
||||
import com.datamate.datamanagement.domain.model.dataset.DatasetFile;
|
||||
import com.datamate.datamanagement.infrastructure.persistence.repository.DatasetFileRepository;
|
||||
import com.datamate.datamanagement.infrastructure.persistence.repository.DatasetRepository;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.junit.jupiter.api.io.TempDir;
|
||||
import org.mockito.ArgumentCaptor;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.security.MessageDigest;
|
||||
import java.util.List;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.mockito.ArgumentMatchers.anyString;
|
||||
import static org.mockito.Mockito.verify;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
class DatasetFileApplicationServiceVersioningTest {
|
||||
|
||||
@TempDir
|
||||
Path tempDir;
|
||||
|
||||
@Mock
|
||||
DatasetFileRepository datasetFileRepository;
|
||||
|
||||
@Mock
|
||||
DatasetRepository datasetRepository;
|
||||
|
||||
@Mock
|
||||
FileService fileService;
|
||||
|
||||
@Mock
|
||||
PdfTextExtractAsyncService pdfTextExtractAsyncService;
|
||||
|
||||
@Mock
|
||||
DatasetFilePreviewService datasetFilePreviewService;
|
||||
|
||||
@Test
|
||||
void copyFilesToDatasetDirWithSourceRoot_shouldArchiveOldFileAndCreateNewVersionWhenDuplicateLogicalPath()
|
||||
throws Exception {
|
||||
String datasetId = "dataset-1";
|
||||
|
||||
Path datasetRoot = tempDir.resolve("dataset-root");
|
||||
Files.createDirectories(datasetRoot);
|
||||
|
||||
Path sourceRoot = tempDir.resolve("source-root");
|
||||
Files.createDirectories(sourceRoot);
|
||||
|
||||
Path existingPath = datasetRoot.resolve("a.txt");
|
||||
Files.writeString(existingPath, "old-content", StandardCharsets.UTF_8);
|
||||
|
||||
Path incomingPath = sourceRoot.resolve("a.txt");
|
||||
Files.writeString(incomingPath, "new-content", StandardCharsets.UTF_8);
|
||||
|
||||
Dataset dataset = new Dataset();
|
||||
dataset.setId(datasetId);
|
||||
dataset.setPath(datasetRoot.toString());
|
||||
|
||||
DatasetFile oldRecord = DatasetFile.builder()
|
||||
.id("old-file-id")
|
||||
.datasetId(datasetId)
|
||||
.fileName("a.txt")
|
||||
.filePath(existingPath.toString())
|
||||
.logicalPath(null)
|
||||
.version(null)
|
||||
.status(null)
|
||||
.fileSize(Files.size(existingPath))
|
||||
.build();
|
||||
|
||||
when(datasetRepository.getById(datasetId)).thenReturn(dataset);
|
||||
when(datasetFileRepository.findAllVisibleByDatasetId(datasetId)).thenReturn(List.of(oldRecord));
|
||||
when(datasetFileRepository.findLatestByDatasetIdAndLogicalPath(anyString(), anyString())).thenReturn(null);
|
||||
|
||||
DatasetFileApplicationService service = new DatasetFileApplicationService(
|
||||
datasetFileRepository,
|
||||
datasetRepository,
|
||||
fileService,
|
||||
pdfTextExtractAsyncService,
|
||||
datasetFilePreviewService
|
||||
);
|
||||
|
||||
List<DatasetFile> copied = service.copyFilesToDatasetDirWithSourceRoot(
|
||||
datasetId,
|
||||
sourceRoot,
|
||||
List.of(incomingPath.toString())
|
||||
);
|
||||
|
||||
assertThat(copied).hasSize(1);
|
||||
assertThat(Files.readString(existingPath, StandardCharsets.UTF_8)).isEqualTo("new-content");
|
||||
|
||||
String logicalPathHash = sha256Hex("a.txt");
|
||||
Path archivedPath = datasetRoot
|
||||
.resolve(".datamate")
|
||||
.resolve("versions")
|
||||
.resolve(logicalPathHash)
|
||||
.resolve("v1")
|
||||
.resolve("old-file-id__a.txt")
|
||||
.toAbsolutePath()
|
||||
.normalize();
|
||||
|
||||
assertThat(Files.exists(archivedPath)).isTrue();
|
||||
assertThat(Files.readString(archivedPath, StandardCharsets.UTF_8)).isEqualTo("old-content");
|
||||
|
||||
ArgumentCaptor<DatasetFile> archivedCaptor = ArgumentCaptor.forClass(DatasetFile.class);
|
||||
verify(datasetFileRepository).updateById(archivedCaptor.capture());
|
||||
DatasetFile archivedRecord = archivedCaptor.getValue();
|
||||
assertThat(archivedRecord.getId()).isEqualTo("old-file-id");
|
||||
assertThat(archivedRecord.getStatus()).isEqualTo("ARCHIVED");
|
||||
assertThat(archivedRecord.getLogicalPath()).isEqualTo("a.txt");
|
||||
assertThat(archivedRecord.getVersion()).isEqualTo(1L);
|
||||
assertThat(Paths.get(archivedRecord.getFilePath()).toAbsolutePath().normalize()).isEqualTo(archivedPath);
|
||||
|
||||
ArgumentCaptor<DatasetFile> createdCaptor = ArgumentCaptor.forClass(DatasetFile.class);
|
||||
verify(datasetFileRepository).saveOrUpdate(createdCaptor.capture());
|
||||
DatasetFile newRecord = createdCaptor.getValue();
|
||||
assertThat(newRecord.getId()).isNotEqualTo("old-file-id");
|
||||
assertThat(newRecord.getStatus()).isEqualTo("ACTIVE");
|
||||
assertThat(newRecord.getLogicalPath()).isEqualTo("a.txt");
|
||||
assertThat(newRecord.getVersion()).isEqualTo(2L);
|
||||
assertThat(Paths.get(newRecord.getFilePath()).toAbsolutePath().normalize()).isEqualTo(existingPath.toAbsolutePath().normalize());
|
||||
}
|
||||
|
||||
private static String sha256Hex(String value) {
|
||||
try {
|
||||
MessageDigest digest = MessageDigest.getInstance("SHA-256");
|
||||
byte[] hashed = digest.digest((value == null ? "" : value).getBytes(StandardCharsets.UTF_8));
|
||||
StringBuilder builder = new StringBuilder(hashed.length * 2);
|
||||
for (byte b : hashed) {
|
||||
builder.append(String.format("%02x", b));
|
||||
}
|
||||
return builder.toString();
|
||||
} catch (Exception e) {
|
||||
return Integer.toHexString((value == null ? "" : value).hashCode());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
114
backend/services/knowledge-graph-service/pom.xml
Normal file
114
backend/services/knowledge-graph-service/pom.xml
Normal file
@@ -0,0 +1,114 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
|
||||
http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<parent>
|
||||
<groupId>com.datamate</groupId>
|
||||
<artifactId>services</artifactId>
|
||||
<version>1.0.0-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
<artifactId>knowledge-graph-service</artifactId>
|
||||
<name>Knowledge Graph Service</name>
|
||||
<description>知识图谱服务 - 基于Neo4j的实体关系管理与图谱查询</description>
|
||||
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>com.datamate</groupId>
|
||||
<artifactId>domain-common</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
|
||||
<!-- Spring Data Neo4j -->
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-data-neo4j</artifactId>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-web</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-data-redis</artifactId>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.mysql</groupId>
|
||||
<artifactId>mysql-connector-j</artifactId>
|
||||
<version>${mysql.version}</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-test</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.springdoc</groupId>
|
||||
<artifactId>springdoc-openapi-starter-webmvc-ui</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.openapitools</groupId>
|
||||
<artifactId>jackson-databind-nullable</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>jakarta.validation</groupId>
|
||||
<artifactId>jakarta.validation-api</artifactId>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-maven-plugin</artifactId>
|
||||
<configuration>
|
||||
<arguments>true</arguments>
|
||||
<classifier>exec</classifier>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-compiler-plugin</artifactId>
|
||||
<version>3.11.0</version>
|
||||
<configuration>
|
||||
<source>${maven.compiler.source}</source>
|
||||
<target>${maven.compiler.target}</target>
|
||||
<annotationProcessorPaths>
|
||||
<path>
|
||||
<groupId>org.projectlombok</groupId>
|
||||
<artifactId>lombok</artifactId>
|
||||
<version>${lombok.version}</version>
|
||||
</path>
|
||||
<path>
|
||||
<groupId>org.projectlombok</groupId>
|
||||
<artifactId>lombok-mapstruct-binding</artifactId>
|
||||
<version>${lombok-mapstruct-binding.version}</version>
|
||||
</path>
|
||||
<path>
|
||||
<groupId>org.mapstruct</groupId>
|
||||
<artifactId>mapstruct-processor</artifactId>
|
||||
<version>${mapstruct.version}</version>
|
||||
</path>
|
||||
</annotationProcessorPaths>
|
||||
<compilerArgs>
|
||||
<arg>-parameters</arg>
|
||||
<arg>-Amapstruct.defaultComponentModel=spring</arg>
|
||||
</compilerArgs>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-surefire-plugin</artifactId>
|
||||
<version>3.2.5</version>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</project>
|
||||
@@ -0,0 +1,28 @@
|
||||
package com.datamate.knowledgegraph;
|
||||
|
||||
import com.datamate.knowledgegraph.infrastructure.neo4j.KnowledgeGraphProperties;
|
||||
import org.springframework.boot.web.client.RestTemplateBuilder;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.ComponentScan;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.data.neo4j.repository.config.EnableNeo4jRepositories;
|
||||
import org.springframework.scheduling.annotation.EnableScheduling;
|
||||
import org.springframework.web.client.RestTemplate;
|
||||
|
||||
import java.time.Duration;
|
||||
|
||||
@Configuration
|
||||
@ComponentScan(basePackages = {"com.datamate.knowledgegraph", "com.datamate.common.auth"})
|
||||
@EnableNeo4jRepositories(basePackages = "com.datamate.knowledgegraph.domain.repository")
|
||||
@EnableScheduling
|
||||
public class KnowledgeGraphServiceConfiguration {
|
||||
|
||||
@Bean("kgRestTemplate")
|
||||
public RestTemplate kgRestTemplate(RestTemplateBuilder builder, KnowledgeGraphProperties properties) {
|
||||
KnowledgeGraphProperties.Sync syncConfig = properties.getSync();
|
||||
return builder
|
||||
.connectTimeout(Duration.ofMillis(syncConfig.getConnectTimeout()))
|
||||
.readTimeout(Duration.ofMillis(syncConfig.getReadTimeout()))
|
||||
.build();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,219 @@
|
||||
package com.datamate.knowledgegraph.application;
|
||||
|
||||
import com.datamate.common.infrastructure.exception.BusinessException;
|
||||
import com.datamate.common.infrastructure.exception.SystemErrorCode;
|
||||
import com.datamate.common.interfaces.PagedResponse;
|
||||
import com.datamate.knowledgegraph.domain.model.EditReview;
|
||||
import com.datamate.knowledgegraph.domain.repository.EditReviewRepository;
|
||||
import com.datamate.knowledgegraph.infrastructure.exception.KnowledgeGraphErrorCode;
|
||||
import com.datamate.knowledgegraph.interfaces.dto.*;
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.springframework.stereotype.Service;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.List;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
/**
|
||||
* 编辑审核业务服务。
|
||||
* <p>
|
||||
* 提供编辑审核的提交、审批、拒绝和查询功能。
|
||||
* 审批通过后自动调用对应的实体/关系 CRUD 服务执行变更。
|
||||
*/
|
||||
@Service
|
||||
@Slf4j
|
||||
@RequiredArgsConstructor
|
||||
public class EditReviewService {
|
||||
|
||||
private static final long MAX_SKIP = 100_000L;
|
||||
private static final Pattern UUID_PATTERN = Pattern.compile(
|
||||
"^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$"
|
||||
);
|
||||
private static final ObjectMapper MAPPER = new ObjectMapper();
|
||||
|
||||
private final EditReviewRepository reviewRepository;
|
||||
private final GraphEntityService entityService;
|
||||
private final GraphRelationService relationService;
|
||||
|
||||
@Transactional
|
||||
public EditReviewVO submitReview(String graphId, SubmitReviewRequest request, String submittedBy) {
|
||||
validateGraphId(graphId);
|
||||
|
||||
EditReview review = EditReview.builder()
|
||||
.graphId(graphId)
|
||||
.operationType(request.getOperationType())
|
||||
.entityId(request.getEntityId())
|
||||
.relationId(request.getRelationId())
|
||||
.payload(request.getPayload())
|
||||
.status("PENDING")
|
||||
.submittedBy(submittedBy)
|
||||
.build();
|
||||
|
||||
EditReview saved = reviewRepository.save(review);
|
||||
log.info("Review submitted: id={}, graphId={}, type={}, by={}",
|
||||
saved.getId(), graphId, request.getOperationType(), submittedBy);
|
||||
return toVO(saved);
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public EditReviewVO approveReview(String graphId, String reviewId, String reviewedBy, String comment) {
|
||||
validateGraphId(graphId);
|
||||
|
||||
EditReview review = reviewRepository.findById(reviewId, graphId)
|
||||
.orElseThrow(() -> BusinessException.of(KnowledgeGraphErrorCode.REVIEW_NOT_FOUND));
|
||||
|
||||
if (!"PENDING".equals(review.getStatus())) {
|
||||
throw BusinessException.of(KnowledgeGraphErrorCode.REVIEW_ALREADY_PROCESSED);
|
||||
}
|
||||
|
||||
// Apply the change
|
||||
applyChange(review);
|
||||
|
||||
// Update review status
|
||||
review.setStatus("APPROVED");
|
||||
review.setReviewedBy(reviewedBy);
|
||||
review.setReviewComment(comment);
|
||||
review.setReviewedAt(LocalDateTime.now());
|
||||
reviewRepository.save(review);
|
||||
|
||||
log.info("Review approved: id={}, graphId={}, type={}, by={}",
|
||||
reviewId, graphId, review.getOperationType(), reviewedBy);
|
||||
return toVO(review);
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public EditReviewVO rejectReview(String graphId, String reviewId, String reviewedBy, String comment) {
|
||||
validateGraphId(graphId);
|
||||
|
||||
EditReview review = reviewRepository.findById(reviewId, graphId)
|
||||
.orElseThrow(() -> BusinessException.of(KnowledgeGraphErrorCode.REVIEW_NOT_FOUND));
|
||||
|
||||
if (!"PENDING".equals(review.getStatus())) {
|
||||
throw BusinessException.of(KnowledgeGraphErrorCode.REVIEW_ALREADY_PROCESSED);
|
||||
}
|
||||
|
||||
review.setStatus("REJECTED");
|
||||
review.setReviewedBy(reviewedBy);
|
||||
review.setReviewComment(comment);
|
||||
review.setReviewedAt(LocalDateTime.now());
|
||||
reviewRepository.save(review);
|
||||
|
||||
log.info("Review rejected: id={}, graphId={}, type={}, by={}",
|
||||
reviewId, graphId, review.getOperationType(), reviewedBy);
|
||||
return toVO(review);
|
||||
}
|
||||
|
||||
public PagedResponse<EditReviewVO> listPendingReviews(String graphId, int page, int size) {
|
||||
validateGraphId(graphId);
|
||||
|
||||
int safePage = Math.max(0, page);
|
||||
int safeSize = Math.max(1, Math.min(size, 200));
|
||||
long skip = (long) safePage * safeSize;
|
||||
if (skip > MAX_SKIP) {
|
||||
throw BusinessException.of(SystemErrorCode.INVALID_PARAMETER, "分页偏移量过大");
|
||||
}
|
||||
|
||||
List<EditReview> reviews = reviewRepository.findPendingByGraphId(graphId, skip, safeSize);
|
||||
long total = reviewRepository.countPendingByGraphId(graphId);
|
||||
long totalPages = safeSize > 0 ? (total + safeSize - 1) / safeSize : 0;
|
||||
|
||||
List<EditReviewVO> content = reviews.stream().map(EditReviewService::toVO).toList();
|
||||
return PagedResponse.of(content, safePage, total, totalPages);
|
||||
}
|
||||
|
||||
public PagedResponse<EditReviewVO> listReviews(String graphId, String status, int page, int size) {
|
||||
validateGraphId(graphId);
|
||||
|
||||
int safePage = Math.max(0, page);
|
||||
int safeSize = Math.max(1, Math.min(size, 200));
|
||||
long skip = (long) safePage * safeSize;
|
||||
if (skip > MAX_SKIP) {
|
||||
throw BusinessException.of(SystemErrorCode.INVALID_PARAMETER, "分页偏移量过大");
|
||||
}
|
||||
|
||||
List<EditReview> reviews = reviewRepository.findByGraphId(graphId, status, skip, safeSize);
|
||||
long total = reviewRepository.countByGraphId(graphId, status);
|
||||
long totalPages = safeSize > 0 ? (total + safeSize - 1) / safeSize : 0;
|
||||
|
||||
List<EditReviewVO> content = reviews.stream().map(EditReviewService::toVO).toList();
|
||||
return PagedResponse.of(content, safePage, total, totalPages);
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// 执行变更
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
private void applyChange(EditReview review) {
|
||||
String graphId = review.getGraphId();
|
||||
String type = review.getOperationType();
|
||||
|
||||
try {
|
||||
switch (type) {
|
||||
case "CREATE_ENTITY" -> {
|
||||
CreateEntityRequest req = MAPPER.readValue(review.getPayload(), CreateEntityRequest.class);
|
||||
entityService.createEntity(graphId, req);
|
||||
}
|
||||
case "UPDATE_ENTITY" -> {
|
||||
UpdateEntityRequest req = MAPPER.readValue(review.getPayload(), UpdateEntityRequest.class);
|
||||
entityService.updateEntity(graphId, review.getEntityId(), req);
|
||||
}
|
||||
case "DELETE_ENTITY" -> {
|
||||
entityService.deleteEntity(graphId, review.getEntityId());
|
||||
}
|
||||
case "BATCH_DELETE_ENTITY" -> {
|
||||
BatchDeleteRequest req = MAPPER.readValue(review.getPayload(), BatchDeleteRequest.class);
|
||||
entityService.batchDeleteEntities(graphId, req.getIds());
|
||||
}
|
||||
case "CREATE_RELATION" -> {
|
||||
CreateRelationRequest req = MAPPER.readValue(review.getPayload(), CreateRelationRequest.class);
|
||||
relationService.createRelation(graphId, req);
|
||||
}
|
||||
case "UPDATE_RELATION" -> {
|
||||
UpdateRelationRequest req = MAPPER.readValue(review.getPayload(), UpdateRelationRequest.class);
|
||||
relationService.updateRelation(graphId, review.getRelationId(), req);
|
||||
}
|
||||
case "DELETE_RELATION" -> {
|
||||
relationService.deleteRelation(graphId, review.getRelationId());
|
||||
}
|
||||
case "BATCH_DELETE_RELATION" -> {
|
||||
BatchDeleteRequest req = MAPPER.readValue(review.getPayload(), BatchDeleteRequest.class);
|
||||
relationService.batchDeleteRelations(graphId, req.getIds());
|
||||
}
|
||||
default -> throw BusinessException.of(SystemErrorCode.INVALID_PARAMETER, "未知操作类型: " + type);
|
||||
}
|
||||
} catch (JsonProcessingException e) {
|
||||
throw BusinessException.of(SystemErrorCode.INVALID_PARAMETER, "变更载荷解析失败: " + e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// 转换
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
private static EditReviewVO toVO(EditReview review) {
|
||||
return EditReviewVO.builder()
|
||||
.id(review.getId())
|
||||
.graphId(review.getGraphId())
|
||||
.operationType(review.getOperationType())
|
||||
.entityId(review.getEntityId())
|
||||
.relationId(review.getRelationId())
|
||||
.payload(review.getPayload())
|
||||
.status(review.getStatus())
|
||||
.submittedBy(review.getSubmittedBy())
|
||||
.reviewedBy(review.getReviewedBy())
|
||||
.reviewComment(review.getReviewComment())
|
||||
.createdAt(review.getCreatedAt())
|
||||
.reviewedAt(review.getReviewedAt())
|
||||
.build();
|
||||
}
|
||||
|
||||
private void validateGraphId(String graphId) {
|
||||
if (graphId == null || !UUID_PATTERN.matcher(graphId).matches()) {
|
||||
throw BusinessException.of(SystemErrorCode.INVALID_PARAMETER, "graphId 格式无效");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,216 @@
|
||||
package com.datamate.knowledgegraph.application;
|
||||
|
||||
import com.datamate.common.infrastructure.exception.BusinessException;
|
||||
import com.datamate.common.infrastructure.exception.SystemErrorCode;
|
||||
import com.datamate.common.interfaces.PagedResponse;
|
||||
import com.datamate.knowledgegraph.domain.model.GraphEntity;
|
||||
import com.datamate.knowledgegraph.domain.repository.GraphEntityRepository;
|
||||
import com.datamate.knowledgegraph.infrastructure.cache.GraphCacheService;
|
||||
import com.datamate.knowledgegraph.infrastructure.cache.RedisCacheConfig;
|
||||
import com.datamate.knowledgegraph.infrastructure.exception.KnowledgeGraphErrorCode;
|
||||
import com.datamate.knowledgegraph.infrastructure.neo4j.KnowledgeGraphProperties;
|
||||
import com.datamate.knowledgegraph.interfaces.dto.CreateEntityRequest;
|
||||
import com.datamate.knowledgegraph.interfaces.dto.UpdateEntityRequest;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.springframework.cache.annotation.Cacheable;
|
||||
import org.springframework.stereotype.Service;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
@Service
|
||||
@Slf4j
|
||||
@RequiredArgsConstructor
|
||||
public class GraphEntityService {
|
||||
|
||||
/** 分页偏移量上限,防止深翻页导致 Neo4j 性能退化。 */
|
||||
private static final long MAX_SKIP = 100_000L;
|
||||
|
||||
private static final Pattern UUID_PATTERN = Pattern.compile(
|
||||
"^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$"
|
||||
);
|
||||
|
||||
private final GraphEntityRepository entityRepository;
|
||||
private final KnowledgeGraphProperties properties;
|
||||
private final GraphCacheService cacheService;
|
||||
|
||||
@Transactional
|
||||
public GraphEntity createEntity(String graphId, CreateEntityRequest request) {
|
||||
validateGraphId(graphId);
|
||||
GraphEntity entity = GraphEntity.builder()
|
||||
.name(request.getName())
|
||||
.type(request.getType())
|
||||
.description(request.getDescription())
|
||||
.aliases(request.getAliases())
|
||||
.properties(request.getProperties())
|
||||
.sourceId(request.getSourceId())
|
||||
.sourceType(request.getSourceType())
|
||||
.graphId(graphId)
|
||||
.confidence(request.getConfidence() != null ? request.getConfidence() : 1.0)
|
||||
.createdAt(LocalDateTime.now())
|
||||
.updatedAt(LocalDateTime.now())
|
||||
.build();
|
||||
GraphEntity saved = entityRepository.save(entity);
|
||||
cacheService.evictEntityCaches(graphId, saved.getId());
|
||||
cacheService.evictSearchCaches(graphId);
|
||||
return saved;
|
||||
}
|
||||
|
||||
@Cacheable(value = RedisCacheConfig.CACHE_ENTITIES,
|
||||
key = "T(com.datamate.knowledgegraph.infrastructure.cache.GraphCacheService).cacheKey(#graphId, #entityId)",
|
||||
unless = "#result == null",
|
||||
cacheManager = "knowledgeGraphCacheManager")
|
||||
public GraphEntity getEntity(String graphId, String entityId) {
|
||||
validateGraphId(graphId);
|
||||
return entityRepository.findByIdAndGraphId(entityId, graphId)
|
||||
.orElseThrow(() -> BusinessException.of(KnowledgeGraphErrorCode.ENTITY_NOT_FOUND));
|
||||
}
|
||||
|
||||
@Cacheable(value = RedisCacheConfig.CACHE_ENTITIES,
|
||||
key = "T(com.datamate.knowledgegraph.infrastructure.cache.GraphCacheService).cacheKey(#graphId, 'list')",
|
||||
cacheManager = "knowledgeGraphCacheManager")
|
||||
public List<GraphEntity> listEntities(String graphId) {
|
||||
validateGraphId(graphId);
|
||||
return entityRepository.findByGraphId(graphId);
|
||||
}
|
||||
|
||||
public List<GraphEntity> searchEntities(String graphId, String name) {
|
||||
validateGraphId(graphId);
|
||||
return entityRepository.findByGraphIdAndNameContaining(graphId, name);
|
||||
}
|
||||
|
||||
public List<GraphEntity> listEntitiesByType(String graphId, String type) {
|
||||
validateGraphId(graphId);
|
||||
return entityRepository.findByGraphIdAndType(graphId, type);
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// 分页查询
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
public PagedResponse<GraphEntity> listEntitiesPaged(String graphId, int page, int size) {
|
||||
validateGraphId(graphId);
|
||||
int safePage = Math.max(0, page);
|
||||
int safeSize = Math.max(1, Math.min(size, 200));
|
||||
long skip = (long) safePage * safeSize;
|
||||
if (skip > MAX_SKIP) {
|
||||
throw BusinessException.of(SystemErrorCode.INVALID_PARAMETER, "分页偏移量过大");
|
||||
}
|
||||
List<GraphEntity> entities = entityRepository.findByGraphIdPaged(graphId, skip, safeSize);
|
||||
long total = entityRepository.countByGraphId(graphId);
|
||||
long totalPages = safeSize > 0 ? (total + safeSize - 1) / safeSize : 0;
|
||||
return PagedResponse.of(entities, safePage, total, totalPages);
|
||||
}
|
||||
|
||||
public PagedResponse<GraphEntity> listEntitiesByTypePaged(String graphId, String type, int page, int size) {
|
||||
validateGraphId(graphId);
|
||||
int safePage = Math.max(0, page);
|
||||
int safeSize = Math.max(1, Math.min(size, 200));
|
||||
long skip = (long) safePage * safeSize;
|
||||
if (skip > MAX_SKIP) {
|
||||
throw BusinessException.of(SystemErrorCode.INVALID_PARAMETER, "分页偏移量过大");
|
||||
}
|
||||
List<GraphEntity> entities = entityRepository.findByGraphIdAndTypePaged(graphId, type, skip, safeSize);
|
||||
long total = entityRepository.countByGraphIdAndType(graphId, type);
|
||||
long totalPages = safeSize > 0 ? (total + safeSize - 1) / safeSize : 0;
|
||||
return PagedResponse.of(entities, safePage, total, totalPages);
|
||||
}
|
||||
|
||||
public PagedResponse<GraphEntity> searchEntitiesPaged(String graphId, String keyword, int page, int size) {
|
||||
validateGraphId(graphId);
|
||||
int safePage = Math.max(0, page);
|
||||
int safeSize = Math.max(1, Math.min(size, 200));
|
||||
long skip = (long) safePage * safeSize;
|
||||
if (skip > MAX_SKIP) {
|
||||
throw BusinessException.of(SystemErrorCode.INVALID_PARAMETER, "分页偏移量过大");
|
||||
}
|
||||
List<GraphEntity> entities = entityRepository.findByGraphIdAndNameContainingPaged(graphId, keyword, skip, safeSize);
|
||||
long total = entityRepository.countByGraphIdAndNameContaining(graphId, keyword);
|
||||
long totalPages = safeSize > 0 ? (total + safeSize - 1) / safeSize : 0;
|
||||
return PagedResponse.of(entities, safePage, total, totalPages);
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public GraphEntity updateEntity(String graphId, String entityId, UpdateEntityRequest request) {
|
||||
validateGraphId(graphId);
|
||||
GraphEntity entity = getEntity(graphId, entityId);
|
||||
if (request.getName() != null) {
|
||||
entity.setName(request.getName());
|
||||
}
|
||||
if (request.getDescription() != null) {
|
||||
entity.setDescription(request.getDescription());
|
||||
}
|
||||
if (request.getAliases() != null) {
|
||||
entity.setAliases(request.getAliases());
|
||||
}
|
||||
if (request.getProperties() != null) {
|
||||
entity.setProperties(request.getProperties());
|
||||
}
|
||||
if (request.getConfidence() != null) {
|
||||
entity.setConfidence(request.getConfidence());
|
||||
}
|
||||
entity.setUpdatedAt(LocalDateTime.now());
|
||||
GraphEntity saved = entityRepository.save(entity);
|
||||
cacheService.evictEntityCaches(graphId, entityId);
|
||||
cacheService.evictSearchCaches(graphId);
|
||||
return saved;
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public void deleteEntity(String graphId, String entityId) {
|
||||
validateGraphId(graphId);
|
||||
GraphEntity entity = getEntity(graphId, entityId);
|
||||
entityRepository.delete(entity);
|
||||
cacheService.evictEntityCaches(graphId, entityId);
|
||||
cacheService.evictSearchCaches(graphId);
|
||||
}
|
||||
|
||||
public List<GraphEntity> getNeighbors(String graphId, String entityId, int depth, int limit) {
|
||||
validateGraphId(graphId);
|
||||
int clampedDepth = Math.max(1, Math.min(depth, properties.getMaxDepth()));
|
||||
int clampedLimit = Math.max(1, Math.min(limit, properties.getMaxNodesPerQuery()));
|
||||
return entityRepository.findNeighbors(graphId, entityId, clampedDepth, clampedLimit);
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public Map<String, Object> batchDeleteEntities(String graphId, List<String> entityIds) {
|
||||
validateGraphId(graphId);
|
||||
int deleted = 0;
|
||||
List<String> failedIds = new ArrayList<>();
|
||||
for (String entityId : entityIds) {
|
||||
try {
|
||||
deleteEntity(graphId, entityId);
|
||||
deleted++;
|
||||
} catch (Exception e) {
|
||||
log.warn("Batch delete: failed to delete entity {}: {}", entityId, e.getMessage());
|
||||
failedIds.add(entityId);
|
||||
}
|
||||
}
|
||||
Map<String, Object> result = Map.of(
|
||||
"deleted", deleted,
|
||||
"total", entityIds.size(),
|
||||
"failedIds", failedIds
|
||||
);
|
||||
return result;
|
||||
}
|
||||
|
||||
public long countEntities(String graphId) {
|
||||
validateGraphId(graphId);
|
||||
return entityRepository.countByGraphId(graphId);
|
||||
}
|
||||
|
||||
/**
|
||||
* 校验 graphId 格式(UUID)。
|
||||
* 防止恶意构造的 graphId 注入 Cypher 查询。
|
||||
*/
|
||||
private void validateGraphId(String graphId) {
|
||||
if (graphId == null || !UUID_PATTERN.matcher(graphId).matches()) {
|
||||
throw BusinessException.of(SystemErrorCode.INVALID_PARAMETER, "graphId 格式无效");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,990 @@
|
||||
package com.datamate.knowledgegraph.application;
|
||||
|
||||
import com.datamate.common.auth.application.ResourceAccessService;
|
||||
import com.datamate.common.infrastructure.exception.BusinessException;
|
||||
import com.datamate.common.infrastructure.exception.SystemErrorCode;
|
||||
import com.datamate.common.interfaces.PagedResponse;
|
||||
import com.datamate.knowledgegraph.domain.model.GraphEntity;
|
||||
import com.datamate.knowledgegraph.domain.repository.GraphEntityRepository;
|
||||
import com.datamate.knowledgegraph.infrastructure.cache.GraphCacheService;
|
||||
import com.datamate.knowledgegraph.infrastructure.cache.RedisCacheConfig;
|
||||
import com.datamate.knowledgegraph.infrastructure.exception.KnowledgeGraphErrorCode;
|
||||
import com.datamate.knowledgegraph.infrastructure.neo4j.KnowledgeGraphProperties;
|
||||
import com.datamate.knowledgegraph.interfaces.dto.*;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.neo4j.driver.Driver;
|
||||
import org.neo4j.driver.Record;
|
||||
import org.neo4j.driver.Session;
|
||||
import org.neo4j.driver.TransactionConfig;
|
||||
import org.neo4j.driver.Value;
|
||||
import org.neo4j.driver.types.MapAccessor;
|
||||
import org.springframework.cache.annotation.Cacheable;
|
||||
import org.springframework.data.neo4j.core.Neo4jClient;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import java.time.Duration;
|
||||
import java.util.*;
|
||||
import java.util.function.Function;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
/**
|
||||
* 知识图谱查询服务。
|
||||
* <p>
|
||||
* 提供图遍历(N 跳邻居、最短路径、所有路径、子图提取、子图导出)和全文搜索功能。
|
||||
* 使用 {@link Neo4jClient} 执行复杂 Cypher 查询。
|
||||
* <p>
|
||||
* 查询结果根据用户权限进行过滤:
|
||||
* <ul>
|
||||
* <li>管理员:不过滤,看到全部数据</li>
|
||||
* <li>普通用户:按 {@code created_by} 过滤,只看到自己创建的业务实体;
|
||||
* 结构型实体(User、Org、Field 等无 created_by 的实体)对所有用户可见</li>
|
||||
* </ul>
|
||||
*/
|
||||
@Service
|
||||
@Slf4j
|
||||
@RequiredArgsConstructor
|
||||
public class GraphQueryService {
|
||||
|
||||
private static final String REL_TYPE = "RELATED_TO";
|
||||
private static final long MAX_SKIP = 100_000L;
|
||||
|
||||
/** 结构型实体类型白名单:对所有用户可见,不按 created_by 过滤 */
|
||||
private static final Set<String> STRUCTURAL_ENTITY_TYPES = Set.of("User", "Org", "Field");
|
||||
|
||||
private static final Pattern UUID_PATTERN = Pattern.compile(
|
||||
"^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$"
|
||||
);
|
||||
|
||||
private final Neo4jClient neo4jClient;
|
||||
private final Driver neo4jDriver;
|
||||
private final GraphEntityRepository entityRepository;
|
||||
private final KnowledgeGraphProperties properties;
|
||||
private final ResourceAccessService resourceAccessService;
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// N 跳邻居
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* 查询实体的 N 跳邻居,返回邻居节点和连接边。
|
||||
*
|
||||
* @param depth 跳数(1-3,由配置上限约束)
|
||||
* @param limit 返回节点数上限
|
||||
*/
|
||||
@Cacheable(value = RedisCacheConfig.CACHE_QUERIES,
|
||||
key = "T(com.datamate.knowledgegraph.infrastructure.cache.GraphCacheService).cacheKey(#graphId, #entityId, #depth, #limit, @resourceAccessService.resolveOwnerFilterUserId(), @resourceAccessService.canViewConfidential())",
|
||||
cacheManager = "knowledgeGraphCacheManager")
|
||||
public SubgraphVO getNeighborGraph(String graphId, String entityId, int depth, int limit) {
|
||||
validateGraphId(graphId);
|
||||
String filterUserId = resolveOwnerFilter();
|
||||
boolean excludeConfidential = filterUserId != null && !resourceAccessService.canViewConfidential();
|
||||
|
||||
// 校验实体存在 + 权限
|
||||
GraphEntity startEntity = entityRepository.findByIdAndGraphId(entityId, graphId)
|
||||
.orElseThrow(() -> BusinessException.of(KnowledgeGraphErrorCode.ENTITY_NOT_FOUND));
|
||||
|
||||
if (filterUserId != null) {
|
||||
assertEntityAccess(startEntity, filterUserId, excludeConfidential);
|
||||
}
|
||||
|
||||
int clampedDepth = Math.max(1, Math.min(depth, properties.getMaxDepth()));
|
||||
int clampedLimit = Math.max(1, Math.min(limit, properties.getMaxNodesPerQuery()));
|
||||
|
||||
// 路径级全节点权限过滤(与 getShortestPath 一致)
|
||||
String permFilter = "";
|
||||
if (filterUserId != null) {
|
||||
StringBuilder pf = new StringBuilder("AND ALL(n IN nodes(p) WHERE ");
|
||||
pf.append("(n.type IN ['User', 'Org', 'Field'] OR n.`properties.created_by` = $filterUserId)");
|
||||
if (excludeConfidential) {
|
||||
pf.append(" AND (toUpper(trim(n.`properties.sensitivity`)) IS NULL OR toUpper(trim(n.`properties.sensitivity`)) <> 'CONFIDENTIAL')");
|
||||
}
|
||||
pf.append(") ");
|
||||
permFilter = pf.toString();
|
||||
}
|
||||
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("graphId", graphId);
|
||||
params.put("entityId", entityId);
|
||||
params.put("limit", clampedLimit);
|
||||
if (filterUserId != null) {
|
||||
params.put("filterUserId", filterUserId);
|
||||
}
|
||||
|
||||
// 查询邻居节点(路径变量约束中间节点与关系均属于同一图谱,权限过滤覆盖路径全节点)
|
||||
List<EntitySummaryVO> nodes = neo4jClient
|
||||
.query(
|
||||
"MATCH p = (e:Entity {graph_id: $graphId, id: $entityId})" +
|
||||
"-[:" + REL_TYPE + "*1.." + clampedDepth + "]-(neighbor:Entity) " +
|
||||
"WHERE e <> neighbor " +
|
||||
" AND ALL(n IN nodes(p) WHERE n.graph_id = $graphId) " +
|
||||
" AND ALL(r IN relationships(p) WHERE r.graph_id = $graphId) " +
|
||||
permFilter +
|
||||
"WITH DISTINCT neighbor LIMIT $limit " +
|
||||
"RETURN neighbor.id AS id, neighbor.name AS name, neighbor.type AS type, " +
|
||||
"neighbor.description AS description"
|
||||
)
|
||||
.bindAll(params)
|
||||
.fetchAs(EntitySummaryVO.class)
|
||||
.mappedBy((ts, record) -> EntitySummaryVO.builder()
|
||||
.id(record.get("id").asString(null))
|
||||
.name(record.get("name").asString(null))
|
||||
.type(record.get("type").asString(null))
|
||||
.description(record.get("description").asString(null))
|
||||
.build())
|
||||
.all()
|
||||
.stream().toList();
|
||||
|
||||
// 收集所有节点 ID(包括起始节点)
|
||||
Set<String> nodeIds = new LinkedHashSet<>();
|
||||
nodeIds.add(entityId);
|
||||
nodes.forEach(n -> nodeIds.add(n.getId()));
|
||||
|
||||
// 查询这些节点之间的边
|
||||
List<EdgeSummaryVO> edges = queryEdgesBetween(graphId, new ArrayList<>(nodeIds));
|
||||
|
||||
// 将起始节点加入节点列表
|
||||
List<EntitySummaryVO> allNodes = new ArrayList<>();
|
||||
allNodes.add(EntitySummaryVO.builder()
|
||||
.id(startEntity.getId())
|
||||
.name(startEntity.getName())
|
||||
.type(startEntity.getType())
|
||||
.description(startEntity.getDescription())
|
||||
.build());
|
||||
allNodes.addAll(nodes);
|
||||
|
||||
return SubgraphVO.builder()
|
||||
.nodes(allNodes)
|
||||
.edges(edges)
|
||||
.nodeCount(allNodes.size())
|
||||
.edgeCount(edges.size())
|
||||
.build();
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// 最短路径
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* 查询两个实体之间的最短路径。
|
||||
*
|
||||
* @param maxDepth 最大搜索深度(由配置上限约束)
|
||||
* @return 路径结果,如果不存在路径则返回空路径
|
||||
*/
|
||||
public PathVO getShortestPath(String graphId, String sourceId, String targetId, int maxDepth) {
|
||||
validateGraphId(graphId);
|
||||
String filterUserId = resolveOwnerFilter();
|
||||
boolean excludeConfidential = filterUserId != null && !resourceAccessService.canViewConfidential();
|
||||
|
||||
// 校验两个实体存在 + 权限
|
||||
GraphEntity sourceEntity = entityRepository.findByIdAndGraphId(sourceId, graphId)
|
||||
.orElseThrow(() -> BusinessException.of(
|
||||
KnowledgeGraphErrorCode.ENTITY_NOT_FOUND, "源实体不存在"));
|
||||
|
||||
if (filterUserId != null) {
|
||||
assertEntityAccess(sourceEntity, filterUserId, excludeConfidential);
|
||||
}
|
||||
|
||||
entityRepository.findByIdAndGraphId(targetId, graphId)
|
||||
.ifPresentOrElse(
|
||||
targetEntity -> {
|
||||
if (filterUserId != null && !sourceId.equals(targetId)) {
|
||||
assertEntityAccess(targetEntity, filterUserId, excludeConfidential);
|
||||
}
|
||||
},
|
||||
() -> { throw BusinessException.of(
|
||||
KnowledgeGraphErrorCode.ENTITY_NOT_FOUND, "目标实体不存在"); }
|
||||
);
|
||||
|
||||
if (sourceId.equals(targetId)) {
|
||||
// 起止相同,返回单节点路径
|
||||
EntitySummaryVO node = EntitySummaryVO.builder()
|
||||
.id(sourceEntity.getId())
|
||||
.name(sourceEntity.getName())
|
||||
.type(sourceEntity.getType())
|
||||
.description(sourceEntity.getDescription())
|
||||
.build();
|
||||
return PathVO.builder()
|
||||
.nodes(List.of(node))
|
||||
.edges(List.of())
|
||||
.pathLength(0)
|
||||
.build();
|
||||
}
|
||||
|
||||
int clampedDepth = Math.max(1, Math.min(maxDepth, properties.getMaxDepth()));
|
||||
|
||||
String permFilter = "";
|
||||
if (filterUserId != null) {
|
||||
StringBuilder pf = new StringBuilder("AND ALL(n IN nodes(path) WHERE ");
|
||||
pf.append("(n.type IN ['User', 'Org', 'Field'] OR n.`properties.created_by` = $filterUserId)");
|
||||
if (excludeConfidential) {
|
||||
pf.append(" AND (toUpper(trim(n.`properties.sensitivity`)) IS NULL OR toUpper(trim(n.`properties.sensitivity`)) <> 'CONFIDENTIAL')");
|
||||
}
|
||||
pf.append(") ");
|
||||
permFilter = pf.toString();
|
||||
}
|
||||
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("graphId", graphId);
|
||||
params.put("sourceId", sourceId);
|
||||
params.put("targetId", targetId);
|
||||
if (filterUserId != null) {
|
||||
params.put("filterUserId", filterUserId);
|
||||
}
|
||||
|
||||
// 使用 Neo4j shortestPath 函数
|
||||
String cypher =
|
||||
"MATCH (s:Entity {graph_id: $graphId, id: $sourceId}), " +
|
||||
" (t:Entity {graph_id: $graphId, id: $targetId}), " +
|
||||
" path = shortestPath((s)-[:" + REL_TYPE + "*1.." + clampedDepth + "]-(t)) " +
|
||||
"WHERE ALL(n IN nodes(path) WHERE n.graph_id = $graphId) " +
|
||||
" AND ALL(r IN relationships(path) WHERE r.graph_id = $graphId) " +
|
||||
permFilter +
|
||||
"RETURN " +
|
||||
" [n IN nodes(path) | {id: n.id, name: n.name, type: n.type, description: n.description}] AS pathNodes, " +
|
||||
" [r IN relationships(path) | {id: r.id, relation_type: r.relation_type, weight: r.weight, " +
|
||||
" source: startNode(r).id, target: endNode(r).id}] AS pathEdges, " +
|
||||
" length(path) AS pathLength";
|
||||
|
||||
return neo4jClient.query(cypher)
|
||||
.bindAll(params)
|
||||
.fetchAs(PathVO.class)
|
||||
.mappedBy((ts, record) -> mapPathRecord(record))
|
||||
.one()
|
||||
.orElse(PathVO.builder()
|
||||
.nodes(List.of())
|
||||
.edges(List.of())
|
||||
.pathLength(-1)
|
||||
.build());
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// 所有路径
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* 查询两个实体之间的所有路径。
|
||||
*
|
||||
* @param maxDepth 最大搜索深度(由配置上限约束)
|
||||
* @param maxPaths 返回路径数上限
|
||||
* @return 所有路径结果,按路径长度升序排列
|
||||
*/
|
||||
public AllPathsVO findAllPaths(String graphId, String sourceId, String targetId, int maxDepth, int maxPaths) {
|
||||
validateGraphId(graphId);
|
||||
String filterUserId = resolveOwnerFilter();
|
||||
boolean excludeConfidential = filterUserId != null && !resourceAccessService.canViewConfidential();
|
||||
|
||||
// 校验两个实体存在 + 权限
|
||||
GraphEntity sourceEntity = entityRepository.findByIdAndGraphId(sourceId, graphId)
|
||||
.orElseThrow(() -> BusinessException.of(
|
||||
KnowledgeGraphErrorCode.ENTITY_NOT_FOUND, "源实体不存在"));
|
||||
|
||||
if (filterUserId != null) {
|
||||
assertEntityAccess(sourceEntity, filterUserId, excludeConfidential);
|
||||
}
|
||||
|
||||
entityRepository.findByIdAndGraphId(targetId, graphId)
|
||||
.ifPresentOrElse(
|
||||
targetEntity -> {
|
||||
if (filterUserId != null && !sourceId.equals(targetId)) {
|
||||
assertEntityAccess(targetEntity, filterUserId, excludeConfidential);
|
||||
}
|
||||
},
|
||||
() -> { throw BusinessException.of(
|
||||
KnowledgeGraphErrorCode.ENTITY_NOT_FOUND, "目标实体不存在"); }
|
||||
);
|
||||
|
||||
if (sourceId.equals(targetId)) {
|
||||
EntitySummaryVO node = EntitySummaryVO.builder()
|
||||
.id(sourceEntity.getId())
|
||||
.name(sourceEntity.getName())
|
||||
.type(sourceEntity.getType())
|
||||
.description(sourceEntity.getDescription())
|
||||
.build();
|
||||
PathVO singlePath = PathVO.builder()
|
||||
.nodes(List.of(node))
|
||||
.edges(List.of())
|
||||
.pathLength(0)
|
||||
.build();
|
||||
return AllPathsVO.builder()
|
||||
.paths(List.of(singlePath))
|
||||
.pathCount(1)
|
||||
.build();
|
||||
}
|
||||
|
||||
int clampedDepth = Math.max(1, Math.min(maxDepth, properties.getMaxDepth()));
|
||||
int clampedMaxPaths = Math.max(1, Math.min(maxPaths, properties.getMaxNodesPerQuery()));
|
||||
|
||||
String permFilter = "";
|
||||
if (filterUserId != null) {
|
||||
StringBuilder pf = new StringBuilder("AND ALL(n IN nodes(path) WHERE ");
|
||||
pf.append("(n.type IN ['User', 'Org', 'Field'] OR n.`properties.created_by` = $filterUserId)");
|
||||
if (excludeConfidential) {
|
||||
pf.append(" AND (toUpper(trim(n.`properties.sensitivity`)) IS NULL OR toUpper(trim(n.`properties.sensitivity`)) <> 'CONFIDENTIAL')");
|
||||
}
|
||||
pf.append(") ");
|
||||
permFilter = pf.toString();
|
||||
}
|
||||
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("graphId", graphId);
|
||||
params.put("sourceId", sourceId);
|
||||
params.put("targetId", targetId);
|
||||
params.put("maxPaths", clampedMaxPaths);
|
||||
if (filterUserId != null) {
|
||||
params.put("filterUserId", filterUserId);
|
||||
}
|
||||
|
||||
String cypher =
|
||||
"MATCH (s:Entity {graph_id: $graphId, id: $sourceId}), " +
|
||||
" (t:Entity {graph_id: $graphId, id: $targetId}), " +
|
||||
" path = (s)-[:" + REL_TYPE + "*1.." + clampedDepth + "]-(t) " +
|
||||
"WHERE ALL(n IN nodes(path) WHERE n.graph_id = $graphId) " +
|
||||
" AND ALL(r IN relationships(path) WHERE r.graph_id = $graphId) " +
|
||||
permFilter +
|
||||
"RETURN " +
|
||||
" [n IN nodes(path) | {id: n.id, name: n.name, type: n.type, description: n.description}] AS pathNodes, " +
|
||||
" [r IN relationships(path) | {id: r.id, relation_type: r.relation_type, weight: r.weight, " +
|
||||
" source: startNode(r).id, target: endNode(r).id}] AS pathEdges, " +
|
||||
" length(path) AS pathLength " +
|
||||
"ORDER BY length(path) ASC " +
|
||||
"LIMIT $maxPaths";
|
||||
|
||||
List<PathVO> paths = queryWithTimeout(cypher, params, record -> mapPathRecord(record));
|
||||
|
||||
return AllPathsVO.builder()
|
||||
.paths(paths)
|
||||
.pathCount(paths.size())
|
||||
.build();
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// 子图提取
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* 提取指定实体集合之间的关系网络(子图)。
|
||||
*
|
||||
* @param entityIds 实体 ID 集合
|
||||
*/
|
||||
public SubgraphVO getSubgraph(String graphId, List<String> entityIds) {
|
||||
validateGraphId(graphId);
|
||||
String filterUserId = resolveOwnerFilter();
|
||||
boolean excludeConfidential = filterUserId != null && !resourceAccessService.canViewConfidential();
|
||||
|
||||
if (entityIds == null || entityIds.isEmpty()) {
|
||||
return SubgraphVO.builder()
|
||||
.nodes(List.of())
|
||||
.edges(List.of())
|
||||
.nodeCount(0)
|
||||
.edgeCount(0)
|
||||
.build();
|
||||
}
|
||||
|
||||
int maxNodes = properties.getMaxNodesPerQuery();
|
||||
if (entityIds.size() > maxNodes) {
|
||||
throw BusinessException.of(KnowledgeGraphErrorCode.MAX_NODES_EXCEEDED,
|
||||
"实体数量超出限制(最大 " + maxNodes + ")");
|
||||
}
|
||||
|
||||
// 查询存在的实体
|
||||
List<GraphEntity> entities = entityRepository.findByGraphIdAndIdIn(graphId, entityIds);
|
||||
|
||||
// 权限过滤:非管理员只能看到自己创建的业务实体和结构型实体
|
||||
if (filterUserId != null) {
|
||||
entities = entities.stream()
|
||||
.filter(e -> isEntityAccessible(e, filterUserId, excludeConfidential))
|
||||
.toList();
|
||||
}
|
||||
|
||||
List<EntitySummaryVO> nodes = entities.stream()
|
||||
.map(e -> EntitySummaryVO.builder()
|
||||
.id(e.getId())
|
||||
.name(e.getName())
|
||||
.type(e.getType())
|
||||
.description(e.getDescription())
|
||||
.build())
|
||||
.toList();
|
||||
|
||||
if (nodes.isEmpty()) {
|
||||
return SubgraphVO.builder()
|
||||
.nodes(List.of())
|
||||
.edges(List.of())
|
||||
.nodeCount(0)
|
||||
.edgeCount(0)
|
||||
.build();
|
||||
}
|
||||
|
||||
// 查询这些节点之间的边
|
||||
List<String> existingIds = entities.stream().map(GraphEntity::getId).toList();
|
||||
List<EdgeSummaryVO> edges = queryEdgesBetween(graphId, existingIds);
|
||||
|
||||
return SubgraphVO.builder()
|
||||
.nodes(nodes)
|
||||
.edges(edges)
|
||||
.nodeCount(nodes.size())
|
||||
.edgeCount(edges.size())
|
||||
.build();
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// 子图导出
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* 导出指定实体集合的子图,支持深度扩展。
|
||||
*
|
||||
* @param entityIds 种子实体 ID 列表
|
||||
* @param depth 扩展深度(0=仅种子实体,1=含 1 跳邻居,以此类推)
|
||||
* @return 包含完整属性的子图导出结果
|
||||
*/
|
||||
public SubgraphExportVO exportSubgraph(String graphId, List<String> entityIds, int depth) {
|
||||
validateGraphId(graphId);
|
||||
String filterUserId = resolveOwnerFilter();
|
||||
boolean excludeConfidential = filterUserId != null && !resourceAccessService.canViewConfidential();
|
||||
|
||||
if (entityIds == null || entityIds.isEmpty()) {
|
||||
return SubgraphExportVO.builder()
|
||||
.nodes(List.of())
|
||||
.edges(List.of())
|
||||
.nodeCount(0)
|
||||
.edgeCount(0)
|
||||
.build();
|
||||
}
|
||||
|
||||
int maxNodes = properties.getMaxNodesPerQuery();
|
||||
if (entityIds.size() > maxNodes) {
|
||||
throw BusinessException.of(KnowledgeGraphErrorCode.MAX_NODES_EXCEEDED,
|
||||
"实体数量超出限制(最大 " + maxNodes + ")");
|
||||
}
|
||||
|
||||
int clampedDepth = Math.max(0, Math.min(depth, properties.getMaxDepth()));
|
||||
List<GraphEntity> entities;
|
||||
|
||||
if (clampedDepth == 0) {
|
||||
// 仅种子实体
|
||||
entities = entityRepository.findByGraphIdAndIdIn(graphId, entityIds);
|
||||
} else {
|
||||
// 扩展邻居:先查询扩展后的节点 ID 集合
|
||||
Set<String> expandedIds = expandNeighborIds(graphId, entityIds, clampedDepth,
|
||||
filterUserId, excludeConfidential, maxNodes);
|
||||
entities = expandedIds.isEmpty()
|
||||
? List.of()
|
||||
: entityRepository.findByGraphIdAndIdIn(graphId, new ArrayList<>(expandedIds));
|
||||
}
|
||||
|
||||
// 权限过滤
|
||||
if (filterUserId != null) {
|
||||
entities = entities.stream()
|
||||
.filter(e -> isEntityAccessible(e, filterUserId, excludeConfidential))
|
||||
.toList();
|
||||
}
|
||||
|
||||
if (entities.isEmpty()) {
|
||||
return SubgraphExportVO.builder()
|
||||
.nodes(List.of())
|
||||
.edges(List.of())
|
||||
.nodeCount(0)
|
||||
.edgeCount(0)
|
||||
.build();
|
||||
}
|
||||
|
||||
List<ExportNodeVO> nodes = entities.stream()
|
||||
.map(e -> ExportNodeVO.builder()
|
||||
.id(e.getId())
|
||||
.name(e.getName())
|
||||
.type(e.getType())
|
||||
.description(e.getDescription())
|
||||
.properties(e.getProperties() != null ? e.getProperties() : Map.of())
|
||||
.build())
|
||||
.toList();
|
||||
|
||||
List<String> nodeIds = entities.stream().map(GraphEntity::getId).toList();
|
||||
List<ExportEdgeVO> edges = queryExportEdgesBetween(graphId, nodeIds);
|
||||
|
||||
return SubgraphExportVO.builder()
|
||||
.nodes(nodes)
|
||||
.edges(edges)
|
||||
.nodeCount(nodes.size())
|
||||
.edgeCount(edges.size())
|
||||
.build();
|
||||
}
|
||||
|
||||
/**
|
||||
* 将子图导出结果转换为 GraphML XML 格式。
|
||||
*/
|
||||
public String convertToGraphML(SubgraphExportVO exportVO) {
|
||||
StringBuilder xml = new StringBuilder();
|
||||
xml.append("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n");
|
||||
xml.append("<graphml xmlns=\"http://graphml.graphstruct.org/graphml\"\n");
|
||||
xml.append(" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n");
|
||||
xml.append(" xsi:schemaLocation=\"http://graphml.graphstruct.org/graphml ");
|
||||
xml.append("http://graphml.graphstruct.org/xmlns/1.0/graphml.xsd\">\n");
|
||||
|
||||
// Key 定义
|
||||
xml.append(" <key id=\"name\" for=\"node\" attr.name=\"name\" attr.type=\"string\"/>\n");
|
||||
xml.append(" <key id=\"type\" for=\"node\" attr.name=\"type\" attr.type=\"string\"/>\n");
|
||||
xml.append(" <key id=\"description\" for=\"node\" attr.name=\"description\" attr.type=\"string\"/>\n");
|
||||
xml.append(" <key id=\"relationType\" for=\"edge\" attr.name=\"relationType\" attr.type=\"string\"/>\n");
|
||||
xml.append(" <key id=\"weight\" for=\"edge\" attr.name=\"weight\" attr.type=\"double\"/>\n");
|
||||
|
||||
xml.append(" <graph id=\"G\" edgedefault=\"directed\">\n");
|
||||
|
||||
// 节点
|
||||
if (exportVO.getNodes() != null) {
|
||||
for (ExportNodeVO node : exportVO.getNodes()) {
|
||||
xml.append(" <node id=\"").append(escapeXml(node.getId())).append("\">\n");
|
||||
appendGraphMLData(xml, "name", node.getName());
|
||||
appendGraphMLData(xml, "type", node.getType());
|
||||
appendGraphMLData(xml, "description", node.getDescription());
|
||||
xml.append(" </node>\n");
|
||||
}
|
||||
}
|
||||
|
||||
// 边
|
||||
if (exportVO.getEdges() != null) {
|
||||
for (ExportEdgeVO edge : exportVO.getEdges()) {
|
||||
xml.append(" <edge id=\"").append(escapeXml(edge.getId()))
|
||||
.append("\" source=\"").append(escapeXml(edge.getSourceEntityId()))
|
||||
.append("\" target=\"").append(escapeXml(edge.getTargetEntityId()))
|
||||
.append("\">\n");
|
||||
appendGraphMLData(xml, "relationType", edge.getRelationType());
|
||||
if (edge.getWeight() != null) {
|
||||
appendGraphMLData(xml, "weight", String.valueOf(edge.getWeight()));
|
||||
}
|
||||
xml.append(" </edge>\n");
|
||||
}
|
||||
}
|
||||
|
||||
xml.append(" </graph>\n");
|
||||
xml.append("</graphml>\n");
|
||||
return xml.toString();
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// 全文搜索
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* 基于 Neo4j 全文索引搜索实体(name + description)。
|
||||
* <p>
|
||||
* 使用 GraphInitializer 创建的 {@code entity_fulltext} 索引,
|
||||
* 返回按相关度排序的结果。
|
||||
*
|
||||
* @param query 搜索关键词(支持 Lucene 查询语法)
|
||||
*/
|
||||
@Cacheable(value = RedisCacheConfig.CACHE_SEARCH,
|
||||
key = "T(com.datamate.knowledgegraph.infrastructure.cache.GraphCacheService).cacheKey(#graphId, #query, #page, #size, @resourceAccessService.resolveOwnerFilterUserId(), @resourceAccessService.canViewConfidential())",
|
||||
cacheManager = "knowledgeGraphCacheManager")
|
||||
public PagedResponse<SearchHitVO> fulltextSearch(String graphId, String query, int page, int size) {
|
||||
validateGraphId(graphId);
|
||||
String filterUserId = resolveOwnerFilter();
|
||||
boolean excludeConfidential = filterUserId != null && !resourceAccessService.canViewConfidential();
|
||||
|
||||
if (query == null || query.isBlank()) {
|
||||
return PagedResponse.of(List.of(), 0, 0, 0);
|
||||
}
|
||||
|
||||
int safePage = Math.max(0, page);
|
||||
int safeSize = Math.max(1, Math.min(size, 200));
|
||||
long skip = (long) safePage * safeSize;
|
||||
if (skip > MAX_SKIP) {
|
||||
throw BusinessException.of(SystemErrorCode.INVALID_PARAMETER, "分页偏移量过大");
|
||||
}
|
||||
|
||||
// 对搜索关键词进行安全处理:转义 Lucene 特殊字符
|
||||
String safeQuery = escapeLuceneQuery(query);
|
||||
String permFilter = buildPermissionPredicate("node", filterUserId, excludeConfidential);
|
||||
|
||||
Map<String, Object> searchParams = new HashMap<>();
|
||||
searchParams.put("graphId", graphId);
|
||||
searchParams.put("query", safeQuery);
|
||||
searchParams.put("skip", skip);
|
||||
searchParams.put("size", safeSize);
|
||||
if (filterUserId != null) {
|
||||
searchParams.put("filterUserId", filterUserId);
|
||||
}
|
||||
|
||||
List<SearchHitVO> results = neo4jClient
|
||||
.query(
|
||||
"CALL db.index.fulltext.queryNodes('entity_fulltext', $query) YIELD node, score " +
|
||||
"WHERE node.graph_id = $graphId " +
|
||||
permFilter +
|
||||
"RETURN node.id AS id, node.name AS name, node.type AS type, " +
|
||||
"node.description AS description, score " +
|
||||
"ORDER BY score DESC " +
|
||||
"SKIP $skip LIMIT $size"
|
||||
)
|
||||
.bindAll(searchParams)
|
||||
.fetchAs(SearchHitVO.class)
|
||||
.mappedBy((ts, record) -> SearchHitVO.builder()
|
||||
.id(record.get("id").asString(null))
|
||||
.name(record.get("name").asString(null))
|
||||
.type(record.get("type").asString(null))
|
||||
.description(record.get("description").asString(null))
|
||||
.score(record.get("score").asDouble())
|
||||
.build())
|
||||
.all()
|
||||
.stream().toList();
|
||||
|
||||
Map<String, Object> countParams = new HashMap<>();
|
||||
countParams.put("graphId", graphId);
|
||||
countParams.put("query", safeQuery);
|
||||
if (filterUserId != null) {
|
||||
countParams.put("filterUserId", filterUserId);
|
||||
}
|
||||
|
||||
long total = neo4jClient
|
||||
.query(
|
||||
"CALL db.index.fulltext.queryNodes('entity_fulltext', $query) YIELD node, score " +
|
||||
"WHERE node.graph_id = $graphId " +
|
||||
permFilter +
|
||||
"RETURN count(*) AS total"
|
||||
)
|
||||
.bindAll(countParams)
|
||||
.fetchAs(Long.class)
|
||||
.mappedBy((ts, record) -> record.get("total").asLong())
|
||||
.one()
|
||||
.orElse(0L);
|
||||
|
||||
long totalPages = safeSize > 0 ? (total + safeSize - 1) / safeSize : 0;
|
||||
return PagedResponse.of(results, safePage, total, totalPages);
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// 权限过滤
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* 获取 owner 过滤用户 ID。
|
||||
* 管理员返回 null(不过滤),普通用户返回当前 userId。
|
||||
*/
|
||||
private String resolveOwnerFilter() {
|
||||
return resourceAccessService.resolveOwnerFilterUserId();
|
||||
}
|
||||
|
||||
/**
|
||||
* 构建 Cypher 权限过滤条件片段。
|
||||
* <p>
|
||||
* 管理员返回空字符串(不过滤);
|
||||
* 普通用户返回 AND 子句:仅保留结构型实体(User、Org、Field)
|
||||
* 和 {@code created_by} 等于当前用户的业务实体。
|
||||
* 若无保密数据权限,额外过滤 sensitivity=CONFIDENTIAL。
|
||||
*/
|
||||
private static String buildPermissionPredicate(String nodeAlias, String filterUserId, boolean excludeConfidential) {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
if (filterUserId != null) {
|
||||
sb.append("AND (").append(nodeAlias).append(".type IN ['User', 'Org', 'Field'] OR ")
|
||||
.append(nodeAlias).append(".`properties.created_by` = $filterUserId) ");
|
||||
}
|
||||
if (excludeConfidential) {
|
||||
sb.append("AND (toUpper(trim(").append(nodeAlias).append(".`properties.sensitivity`)) IS NULL OR ")
|
||||
.append("toUpper(trim(").append(nodeAlias).append(".`properties.sensitivity`)) <> 'CONFIDENTIAL') ");
|
||||
}
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* 校验非管理员用户对实体的访问权限。
|
||||
* 保密数据需要 canViewConfidential 权限;
|
||||
* 结构型实体(User、Org、Field)对所有用户可见;
|
||||
* 业务实体必须匹配 created_by。
|
||||
*/
|
||||
private static void assertEntityAccess(GraphEntity entity, String filterUserId, boolean excludeConfidential) {
|
||||
// 保密数据检查(大小写不敏感,与 data-management 一致)
|
||||
if (excludeConfidential) {
|
||||
Object sensitivity = entity.getProperties() != null
|
||||
? entity.getProperties().get("sensitivity") : null;
|
||||
if (sensitivity != null && sensitivity.toString().trim().equalsIgnoreCase("CONFIDENTIAL")) {
|
||||
throw BusinessException.of(SystemErrorCode.INSUFFICIENT_PERMISSIONS, "无权访问保密数据");
|
||||
}
|
||||
}
|
||||
// 结构型实体按类型白名单放行
|
||||
if (STRUCTURAL_ENTITY_TYPES.contains(entity.getType())) {
|
||||
return;
|
||||
}
|
||||
// 业务实体必须匹配 owner
|
||||
Object createdBy = entity.getProperties() != null
|
||||
? entity.getProperties().get("created_by") : null;
|
||||
if (createdBy == null || !filterUserId.equals(createdBy.toString())) {
|
||||
throw BusinessException.of(SystemErrorCode.INSUFFICIENT_PERMISSIONS, "无权访问该实体");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 判断实体是否对指定用户可访问。
|
||||
* 保密数据需要 canViewConfidential 权限;
|
||||
* 结构型实体(User、Org、Field)对所有用户可见;
|
||||
* 业务实体必须匹配 created_by。
|
||||
*/
|
||||
private static boolean isEntityAccessible(GraphEntity entity, String filterUserId, boolean excludeConfidential) {
|
||||
// 保密数据检查(大小写不敏感,与 data-management 一致)
|
||||
if (excludeConfidential) {
|
||||
Object sensitivity = entity.getProperties() != null
|
||||
? entity.getProperties().get("sensitivity") : null;
|
||||
if (sensitivity != null && sensitivity.toString().trim().equalsIgnoreCase("CONFIDENTIAL")) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
// 结构型实体按类型白名单放行
|
||||
if (STRUCTURAL_ENTITY_TYPES.contains(entity.getType())) {
|
||||
return true;
|
||||
}
|
||||
// 业务实体必须匹配 owner
|
||||
Object createdBy = entity.getProperties() != null
|
||||
? entity.getProperties().get("created_by") : null;
|
||||
return createdBy != null && filterUserId.equals(createdBy.toString());
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// 内部方法
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* 查询指定节点集合之间的所有边。
|
||||
*/
|
||||
private List<EdgeSummaryVO> queryEdgesBetween(String graphId, List<String> nodeIds) {
|
||||
if (nodeIds.size() < 2) {
|
||||
return List.of();
|
||||
}
|
||||
|
||||
return neo4jClient
|
||||
.query(
|
||||
"MATCH (s:Entity {graph_id: $graphId})-[r:" + REL_TYPE + " {graph_id: $graphId}]->(t:Entity {graph_id: $graphId}) " +
|
||||
"WHERE s.id IN $nodeIds AND t.id IN $nodeIds " +
|
||||
"RETURN r.id AS id, s.id AS sourceEntityId, t.id AS targetEntityId, " +
|
||||
"r.relation_type AS relationType, r.weight AS weight"
|
||||
)
|
||||
.bindAll(Map.of("graphId", graphId, "nodeIds", nodeIds))
|
||||
.fetchAs(EdgeSummaryVO.class)
|
||||
.mappedBy((ts, record) -> EdgeSummaryVO.builder()
|
||||
.id(record.get("id").asString(null))
|
||||
.sourceEntityId(record.get("sourceEntityId").asString(null))
|
||||
.targetEntityId(record.get("targetEntityId").asString(null))
|
||||
.relationType(record.get("relationType").asString(null))
|
||||
.weight(record.get("weight").isNull() ? null : record.get("weight").asDouble())
|
||||
.build())
|
||||
.all()
|
||||
.stream().toList();
|
||||
}
|
||||
|
||||
private PathVO mapPathRecord(MapAccessor record) {
|
||||
// 解析路径节点
|
||||
List<EntitySummaryVO> nodes = new ArrayList<>();
|
||||
Value pathNodes = record.get("pathNodes");
|
||||
if (pathNodes != null && !pathNodes.isNull()) {
|
||||
for (Value nodeVal : pathNodes.asList(v -> v)) {
|
||||
nodes.add(EntitySummaryVO.builder()
|
||||
.id(getStringOrNull(nodeVal, "id"))
|
||||
.name(getStringOrNull(nodeVal, "name"))
|
||||
.type(getStringOrNull(nodeVal, "type"))
|
||||
.description(getStringOrNull(nodeVal, "description"))
|
||||
.build());
|
||||
}
|
||||
}
|
||||
|
||||
// 解析路径边
|
||||
List<EdgeSummaryVO> edges = new ArrayList<>();
|
||||
Value pathEdges = record.get("pathEdges");
|
||||
if (pathEdges != null && !pathEdges.isNull()) {
|
||||
for (Value edgeVal : pathEdges.asList(v -> v)) {
|
||||
edges.add(EdgeSummaryVO.builder()
|
||||
.id(getStringOrNull(edgeVal, "id"))
|
||||
.sourceEntityId(getStringOrNull(edgeVal, "source"))
|
||||
.targetEntityId(getStringOrNull(edgeVal, "target"))
|
||||
.relationType(getStringOrNull(edgeVal, "relation_type"))
|
||||
.weight(getDoubleOrNull(edgeVal, "weight"))
|
||||
.build());
|
||||
}
|
||||
}
|
||||
|
||||
int pathLength = record.get("pathLength").asInt(0);
|
||||
|
||||
return PathVO.builder()
|
||||
.nodes(nodes)
|
||||
.edges(edges)
|
||||
.pathLength(pathLength)
|
||||
.build();
|
||||
}
|
||||
|
||||
/**
|
||||
* 转义 Lucene 查询中的特殊字符,防止查询注入。
|
||||
*/
|
||||
private static String escapeLuceneQuery(String query) {
|
||||
// Lucene 特殊字符: + - && || ! ( ) { } [ ] ^ " ~ * ? : \ /
|
||||
StringBuilder sb = new StringBuilder();
|
||||
for (char c : query.toCharArray()) {
|
||||
if ("+-&|!(){}[]^\"~*?:\\/".indexOf(c) >= 0) {
|
||||
sb.append('\\');
|
||||
}
|
||||
sb.append(c);
|
||||
}
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
private static String getStringOrNull(Value value, String key) {
|
||||
Value v = value.get(key);
|
||||
return (v == null || v.isNull()) ? null : v.asString();
|
||||
}
|
||||
|
||||
private static Double getDoubleOrNull(Value value, String key) {
|
||||
Value v = value.get(key);
|
||||
return (v == null || v.isNull()) ? null : v.asDouble();
|
||||
}
|
||||
|
||||
/**
|
||||
* 查询指定节点集合之间的所有边(导出用,包含完整属性)。
|
||||
*/
|
||||
private List<ExportEdgeVO> queryExportEdgesBetween(String graphId, List<String> nodeIds) {
|
||||
if (nodeIds.size() < 2) {
|
||||
return List.of();
|
||||
}
|
||||
|
||||
return neo4jClient
|
||||
.query(
|
||||
"MATCH (s:Entity {graph_id: $graphId})-[r:" + REL_TYPE + " {graph_id: $graphId}]->(t:Entity {graph_id: $graphId}) " +
|
||||
"WHERE s.id IN $nodeIds AND t.id IN $nodeIds " +
|
||||
"RETURN r.id AS id, s.id AS sourceEntityId, t.id AS targetEntityId, " +
|
||||
"r.relation_type AS relationType, r.weight AS weight, " +
|
||||
"r.confidence AS confidence, r.source_id AS sourceId"
|
||||
)
|
||||
.bindAll(Map.of("graphId", graphId, "nodeIds", nodeIds))
|
||||
.fetchAs(ExportEdgeVO.class)
|
||||
.mappedBy((ts, record) -> ExportEdgeVO.builder()
|
||||
.id(record.get("id").asString(null))
|
||||
.sourceEntityId(record.get("sourceEntityId").asString(null))
|
||||
.targetEntityId(record.get("targetEntityId").asString(null))
|
||||
.relationType(record.get("relationType").asString(null))
|
||||
.weight(record.get("weight").isNull() ? null : record.get("weight").asDouble())
|
||||
.confidence(record.get("confidence").isNull() ? null : record.get("confidence").asDouble())
|
||||
.sourceId(record.get("sourceId").asString(null))
|
||||
.build())
|
||||
.all()
|
||||
.stream().toList();
|
||||
}
|
||||
|
||||
/**
|
||||
* 从种子实体扩展 N 跳邻居,返回所有节点 ID(含种子)。
|
||||
* <p>
|
||||
* 使用事务超时保护,防止深度扩展导致组合爆炸。
|
||||
* 结果总数严格不超过 maxNodes(含种子节点)。
|
||||
*/
|
||||
private Set<String> expandNeighborIds(String graphId, List<String> seedIds, int depth,
|
||||
String filterUserId, boolean excludeConfidential, int maxNodes) {
|
||||
String permFilter = "";
|
||||
if (filterUserId != null) {
|
||||
StringBuilder pf = new StringBuilder("AND ALL(n IN nodes(p) WHERE ");
|
||||
pf.append("(n.type IN ['User', 'Org', 'Field'] OR n.`properties.created_by` = $filterUserId)");
|
||||
if (excludeConfidential) {
|
||||
pf.append(" AND (toUpper(trim(n.`properties.sensitivity`)) IS NULL OR toUpper(trim(n.`properties.sensitivity`)) <> 'CONFIDENTIAL')");
|
||||
}
|
||||
pf.append(") ");
|
||||
permFilter = pf.toString();
|
||||
}
|
||||
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("graphId", graphId);
|
||||
params.put("seedIds", seedIds);
|
||||
params.put("maxNodes", maxNodes);
|
||||
if (filterUserId != null) {
|
||||
params.put("filterUserId", filterUserId);
|
||||
}
|
||||
|
||||
// 种子节点在 Cypher 中纳入 LIMIT 约束,确保总数不超过 maxNodes
|
||||
String cypher =
|
||||
"MATCH (seed:Entity {graph_id: $graphId}) " +
|
||||
"WHERE seed.id IN $seedIds " +
|
||||
"WITH collect(DISTINCT seed) AS seeds " +
|
||||
"UNWIND seeds AS s " +
|
||||
"OPTIONAL MATCH p = (s)-[:" + REL_TYPE + "*1.." + depth + "]-(neighbor:Entity) " +
|
||||
"WHERE ALL(n IN nodes(p) WHERE n.graph_id = $graphId) " +
|
||||
" AND ALL(r IN relationships(p) WHERE r.graph_id = $graphId) " +
|
||||
permFilter +
|
||||
"WITH seeds + collect(DISTINCT neighbor) AS allNodes " +
|
||||
"UNWIND allNodes AS node " +
|
||||
"WITH DISTINCT node " +
|
||||
"WHERE node IS NOT NULL " +
|
||||
"RETURN node.id AS id " +
|
||||
"LIMIT $maxNodes";
|
||||
|
||||
List<String> ids = queryWithTimeout(cypher, params,
|
||||
record -> record.get("id").asString(null));
|
||||
|
||||
return new LinkedHashSet<>(ids);
|
||||
}
|
||||
|
||||
private static void appendGraphMLData(StringBuilder xml, String key, String value) {
|
||||
if (value != null) {
|
||||
xml.append(" <data key=\"").append(key).append("\">")
|
||||
.append(escapeXml(value))
|
||||
.append("</data>\n");
|
||||
}
|
||||
}
|
||||
|
||||
private static String escapeXml(String text) {
|
||||
if (text == null) {
|
||||
return "";
|
||||
}
|
||||
return text.replace("&", "&")
|
||||
.replace("<", "<")
|
||||
.replace(">", ">")
|
||||
.replace("\"", """)
|
||||
.replace("'", "'");
|
||||
}
|
||||
|
||||
private void validateGraphId(String graphId) {
|
||||
if (graphId == null || !UUID_PATTERN.matcher(graphId).matches()) {
|
||||
throw BusinessException.of(SystemErrorCode.INVALID_PARAMETER, "graphId 格式无效");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 使用 Neo4j Driver 直接执行查询,附带事务级超时保护。
|
||||
* <p>
|
||||
* 用于路径枚举等可能触发组合爆炸的高开销查询,
|
||||
* 超时后 Neo4j 服务端会主动终止事务,避免资源耗尽。
|
||||
*/
|
||||
private <T> List<T> queryWithTimeout(String cypher, Map<String, Object> params,
|
||||
Function<Record, T> mapper) {
|
||||
int timeoutSeconds = properties.getQueryTimeoutSeconds();
|
||||
TransactionConfig txConfig = TransactionConfig.builder()
|
||||
.withTimeout(Duration.ofSeconds(timeoutSeconds))
|
||||
.build();
|
||||
try (Session session = neo4jDriver.session()) {
|
||||
return session.executeRead(tx -> {
|
||||
var result = tx.run(cypher, params);
|
||||
List<T> items = new ArrayList<>();
|
||||
while (result.hasNext()) {
|
||||
items.add(mapper.apply(result.next()));
|
||||
}
|
||||
return items;
|
||||
}, txConfig);
|
||||
} catch (Exception e) {
|
||||
if (isTransactionTimeout(e)) {
|
||||
log.warn("图查询超时({}秒): {}", timeoutSeconds, cypher.substring(0, Math.min(cypher.length(), 120)));
|
||||
throw BusinessException.of(KnowledgeGraphErrorCode.QUERY_TIMEOUT,
|
||||
"查询超时(" + timeoutSeconds + "秒),请缩小搜索范围或减少深度");
|
||||
}
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 判断异常是否为 Neo4j 事务超时。
|
||||
*/
|
||||
private static boolean isTransactionTimeout(Exception e) {
|
||||
// Neo4j 事务超时时抛出的异常链中通常包含 "terminated" 或 "timeout"
|
||||
Throwable current = e;
|
||||
while (current != null) {
|
||||
String msg = current.getMessage();
|
||||
if (msg != null) {
|
||||
String lower = msg.toLowerCase(Locale.ROOT);
|
||||
if (lower.contains("transaction has been terminated") || lower.contains("timed out")) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
current = current.getCause();
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,251 @@
|
||||
package com.datamate.knowledgegraph.application;
|
||||
|
||||
import com.datamate.common.infrastructure.exception.BusinessException;
|
||||
import com.datamate.common.infrastructure.exception.SystemErrorCode;
|
||||
import com.datamate.common.interfaces.PagedResponse;
|
||||
import com.datamate.knowledgegraph.domain.model.RelationDetail;
|
||||
import com.datamate.knowledgegraph.domain.repository.GraphEntityRepository;
|
||||
import com.datamate.knowledgegraph.domain.repository.GraphRelationRepository;
|
||||
import com.datamate.knowledgegraph.infrastructure.cache.GraphCacheService;
|
||||
import com.datamate.knowledgegraph.infrastructure.exception.KnowledgeGraphErrorCode;
|
||||
import com.datamate.knowledgegraph.interfaces.dto.CreateRelationRequest;
|
||||
import com.datamate.knowledgegraph.interfaces.dto.RelationVO;
|
||||
import com.datamate.knowledgegraph.interfaces.dto.UpdateRelationRequest;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.springframework.stereotype.Service;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
/**
|
||||
* 知识图谱关系业务服务。
|
||||
* <p>
|
||||
* <b>信任边界说明</b>:本服务仅通过内网被 API Gateway / Java 后端调用,
|
||||
* 网关层已完成用户身份认证与权限校验,服务层不再重复鉴权,
|
||||
* 仅校验 graphId 格式(防 Cypher 注入)与数据完整性约束。
|
||||
*/
|
||||
@Service
|
||||
@Slf4j
|
||||
@RequiredArgsConstructor
|
||||
public class GraphRelationService {
|
||||
|
||||
/** 分页偏移量上限,防止深翻页导致 Neo4j 性能退化。 */
|
||||
private static final long MAX_SKIP = 100_000L;
|
||||
|
||||
/** 合法的关系查询方向。 */
|
||||
private static final Set<String> VALID_DIRECTIONS = Set.of("all", "in", "out");
|
||||
|
||||
private static final Pattern UUID_PATTERN = Pattern.compile(
|
||||
"^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$"
|
||||
);
|
||||
|
||||
private final GraphRelationRepository relationRepository;
|
||||
private final GraphEntityRepository entityRepository;
|
||||
private final GraphCacheService cacheService;
|
||||
|
||||
@Transactional
|
||||
public RelationVO createRelation(String graphId, CreateRelationRequest request) {
|
||||
validateGraphId(graphId);
|
||||
|
||||
// 校验源实体存在
|
||||
entityRepository.findByIdAndGraphId(request.getSourceEntityId(), graphId)
|
||||
.orElseThrow(() -> BusinessException.of(
|
||||
KnowledgeGraphErrorCode.ENTITY_NOT_FOUND, "源实体不存在"));
|
||||
|
||||
// 校验目标实体存在
|
||||
entityRepository.findByIdAndGraphId(request.getTargetEntityId(), graphId)
|
||||
.orElseThrow(() -> BusinessException.of(
|
||||
KnowledgeGraphErrorCode.ENTITY_NOT_FOUND, "目标实体不存在"));
|
||||
|
||||
RelationDetail detail = relationRepository.create(
|
||||
graphId,
|
||||
request.getSourceEntityId(),
|
||||
request.getTargetEntityId(),
|
||||
request.getRelationType(),
|
||||
request.getProperties(),
|
||||
request.getWeight(),
|
||||
request.getSourceId(),
|
||||
request.getConfidence()
|
||||
).orElseThrow(() -> BusinessException.of(
|
||||
KnowledgeGraphErrorCode.INVALID_RELATION, "关系创建失败"));
|
||||
|
||||
log.info("Relation created: id={}, graphId={}, type={}, source={} -> target={}",
|
||||
detail.getId(), graphId, request.getRelationType(),
|
||||
request.getSourceEntityId(), request.getTargetEntityId());
|
||||
cacheService.evictEntityCaches(graphId, request.getSourceEntityId());
|
||||
return toVO(detail);
|
||||
}
|
||||
|
||||
public RelationVO getRelation(String graphId, String relationId) {
|
||||
validateGraphId(graphId);
|
||||
RelationDetail detail = relationRepository.findByIdAndGraphId(relationId, graphId)
|
||||
.orElseThrow(() -> BusinessException.of(KnowledgeGraphErrorCode.RELATION_NOT_FOUND));
|
||||
return toVO(detail);
|
||||
}
|
||||
|
||||
public PagedResponse<RelationVO> listRelations(String graphId, String type, int page, int size) {
|
||||
validateGraphId(graphId);
|
||||
|
||||
int safePage = Math.max(0, page);
|
||||
int safeSize = Math.max(1, Math.min(size, 200));
|
||||
long skip = (long) safePage * safeSize;
|
||||
if (skip > MAX_SKIP) {
|
||||
throw BusinessException.of(SystemErrorCode.INVALID_PARAMETER, "分页偏移量过大");
|
||||
}
|
||||
|
||||
List<RelationDetail> details = relationRepository.findByGraphId(graphId, type, skip, safeSize);
|
||||
long total = relationRepository.countByGraphId(graphId, type);
|
||||
long totalPages = safeSize > 0 ? (total + safeSize - 1) / safeSize : 0;
|
||||
|
||||
List<RelationVO> content = details.stream().map(GraphRelationService::toVO).toList();
|
||||
return PagedResponse.of(content, safePage, total, totalPages);
|
||||
}
|
||||
|
||||
/**
|
||||
* 查询实体的关系列表。
|
||||
*
|
||||
* @param direction "all"、"in" 或 "out"
|
||||
*/
|
||||
public PagedResponse<RelationVO> listEntityRelations(String graphId, String entityId,
|
||||
String direction, String type,
|
||||
int page, int size) {
|
||||
validateGraphId(graphId);
|
||||
|
||||
// 校验实体存在
|
||||
entityRepository.findByIdAndGraphId(entityId, graphId)
|
||||
.orElseThrow(() -> BusinessException.of(KnowledgeGraphErrorCode.ENTITY_NOT_FOUND));
|
||||
|
||||
int safePage = Math.max(0, page);
|
||||
int safeSize = Math.max(1, Math.min(size, 200));
|
||||
long skip = (long) safePage * safeSize;
|
||||
if (skip > MAX_SKIP) {
|
||||
throw BusinessException.of(SystemErrorCode.INVALID_PARAMETER, "分页偏移量过大");
|
||||
}
|
||||
|
||||
String safeDirection = (direction != null) ? direction : "all";
|
||||
if (!VALID_DIRECTIONS.contains(safeDirection)) {
|
||||
throw BusinessException.of(SystemErrorCode.INVALID_PARAMETER,
|
||||
"direction 参数无效,允许值:all, in, out");
|
||||
}
|
||||
|
||||
List<RelationDetail> details;
|
||||
switch (safeDirection) {
|
||||
case "in":
|
||||
details = relationRepository.findInboundByEntityId(graphId, entityId, type, skip, safeSize);
|
||||
break;
|
||||
case "out":
|
||||
details = relationRepository.findOutboundByEntityId(graphId, entityId, type, skip, safeSize);
|
||||
break;
|
||||
default:
|
||||
details = relationRepository.findByEntityId(graphId, entityId, type, skip, safeSize);
|
||||
break;
|
||||
}
|
||||
|
||||
long total = relationRepository.countByEntityId(graphId, entityId, type, safeDirection);
|
||||
long totalPages = safeSize > 0 ? (total + safeSize - 1) / safeSize : 0;
|
||||
|
||||
List<RelationVO> content = details.stream().map(GraphRelationService::toVO).toList();
|
||||
return PagedResponse.of(content, safePage, total, totalPages);
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public RelationVO updateRelation(String graphId, String relationId, UpdateRelationRequest request) {
|
||||
validateGraphId(graphId);
|
||||
|
||||
// 确认关系存在
|
||||
relationRepository.findByIdAndGraphId(relationId, graphId)
|
||||
.orElseThrow(() -> BusinessException.of(KnowledgeGraphErrorCode.RELATION_NOT_FOUND));
|
||||
|
||||
RelationDetail detail = relationRepository.update(
|
||||
relationId, graphId,
|
||||
request.getRelationType(),
|
||||
request.getProperties(),
|
||||
request.getWeight(),
|
||||
request.getConfidence()
|
||||
).orElseThrow(() -> BusinessException.of(KnowledgeGraphErrorCode.RELATION_NOT_FOUND));
|
||||
|
||||
log.info("Relation updated: id={}, graphId={}", relationId, graphId);
|
||||
cacheService.evictEntityCaches(graphId, detail.getSourceEntityId());
|
||||
return toVO(detail);
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public void deleteRelation(String graphId, String relationId) {
|
||||
validateGraphId(graphId);
|
||||
|
||||
// 确认关系存在并保留关系两端实体 ID,用于精准缓存失效
|
||||
RelationDetail detail = relationRepository.findByIdAndGraphId(relationId, graphId)
|
||||
.orElseThrow(() -> BusinessException.of(KnowledgeGraphErrorCode.RELATION_NOT_FOUND));
|
||||
|
||||
long deleted = relationRepository.deleteByIdAndGraphId(relationId, graphId);
|
||||
if (deleted <= 0) {
|
||||
throw BusinessException.of(KnowledgeGraphErrorCode.RELATION_NOT_FOUND);
|
||||
}
|
||||
log.info("Relation deleted: id={}, graphId={}", relationId, graphId);
|
||||
cacheService.evictEntityCaches(graphId, detail.getSourceEntityId());
|
||||
if (detail.getTargetEntityId() != null
|
||||
&& !detail.getTargetEntityId().equals(detail.getSourceEntityId())) {
|
||||
cacheService.evictEntityCaches(graphId, detail.getTargetEntityId());
|
||||
}
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public Map<String, Object> batchDeleteRelations(String graphId, List<String> relationIds) {
|
||||
validateGraphId(graphId);
|
||||
int deleted = 0;
|
||||
List<String> failedIds = new ArrayList<>();
|
||||
for (String relationId : relationIds) {
|
||||
try {
|
||||
deleteRelation(graphId, relationId);
|
||||
deleted++;
|
||||
} catch (Exception e) {
|
||||
log.warn("Batch delete: failed to delete relation {}: {}", relationId, e.getMessage());
|
||||
failedIds.add(relationId);
|
||||
}
|
||||
}
|
||||
Map<String, Object> result = Map.of(
|
||||
"deleted", deleted,
|
||||
"total", relationIds.size(),
|
||||
"failedIds", failedIds
|
||||
);
|
||||
return result;
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// 领域对象 → 视图对象 转换
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
private static RelationVO toVO(RelationDetail detail) {
|
||||
return RelationVO.builder()
|
||||
.id(detail.getId())
|
||||
.sourceEntityId(detail.getSourceEntityId())
|
||||
.sourceEntityName(detail.getSourceEntityName())
|
||||
.sourceEntityType(detail.getSourceEntityType())
|
||||
.targetEntityId(detail.getTargetEntityId())
|
||||
.targetEntityName(detail.getTargetEntityName())
|
||||
.targetEntityType(detail.getTargetEntityType())
|
||||
.relationType(detail.getRelationType())
|
||||
.properties(detail.getProperties())
|
||||
.weight(detail.getWeight())
|
||||
.confidence(detail.getConfidence())
|
||||
.sourceId(detail.getSourceId())
|
||||
.graphId(detail.getGraphId())
|
||||
.createdAt(detail.getCreatedAt())
|
||||
.build();
|
||||
}
|
||||
|
||||
/**
|
||||
* 校验 graphId 格式(UUID)。
|
||||
* 防止恶意构造的 graphId 注入 Cypher 查询。
|
||||
*/
|
||||
private void validateGraphId(String graphId) {
|
||||
if (graphId == null || !UUID_PATTERN.matcher(graphId).matches()) {
|
||||
throw BusinessException.of(SystemErrorCode.INVALID_PARAMETER, "graphId 格式无效");
|
||||
}
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,95 @@
|
||||
package com.datamate.knowledgegraph.application;
|
||||
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.springframework.data.neo4j.core.Neo4jClient;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* 索引健康检查服务。
|
||||
* <p>
|
||||
* 提供 Neo4j 索引状态查询,用于运维监控和启动验证。
|
||||
*/
|
||||
@Service
|
||||
@Slf4j
|
||||
@RequiredArgsConstructor
|
||||
public class IndexHealthService {
|
||||
|
||||
private final Neo4jClient neo4jClient;
|
||||
|
||||
/**
|
||||
* 获取所有索引状态信息。
|
||||
*
|
||||
* @return 索引名称到状态的映射列表,每项包含 name, state, type, entityType, labelsOrTypes, properties
|
||||
*/
|
||||
public List<Map<String, Object>> getIndexStatus() {
|
||||
return neo4jClient
|
||||
.query("SHOW INDEXES YIELD name, state, type, entityType, labelsOrTypes, properties " +
|
||||
"RETURN name, state, type, entityType, labelsOrTypes, properties " +
|
||||
"ORDER BY name")
|
||||
.fetchAs(Map.class)
|
||||
.mappedBy((ts, record) -> {
|
||||
Map<String, Object> info = new java.util.LinkedHashMap<>();
|
||||
info.put("name", record.get("name").asString(null));
|
||||
info.put("state", record.get("state").asString(null));
|
||||
info.put("type", record.get("type").asString(null));
|
||||
info.put("entityType", record.get("entityType").asString(null));
|
||||
var labelsOrTypes = record.get("labelsOrTypes");
|
||||
info.put("labelsOrTypes", labelsOrTypes.isNull() ? List.of() : labelsOrTypes.asList(v -> v.asString(null)));
|
||||
var properties = record.get("properties");
|
||||
info.put("properties", properties.isNull() ? List.of() : properties.asList(v -> v.asString(null)));
|
||||
return info;
|
||||
})
|
||||
.all()
|
||||
.stream()
|
||||
.map(m -> (Map<String, Object>) m)
|
||||
.toList();
|
||||
}
|
||||
|
||||
/**
|
||||
* 检查是否存在非 ONLINE 状态的索引。
|
||||
*
|
||||
* @return true 表示所有索引健康(ONLINE 状态)
|
||||
*/
|
||||
public boolean allIndexesOnline() {
|
||||
List<Map<String, Object>> indexes = getIndexStatus();
|
||||
if (indexes.isEmpty()) {
|
||||
log.warn("No indexes found in Neo4j database");
|
||||
return false;
|
||||
}
|
||||
for (Map<String, Object> idx : indexes) {
|
||||
String state = (String) idx.get("state");
|
||||
if (!"ONLINE".equals(state)) {
|
||||
log.warn("Index '{}' is in state '{}' (expected ONLINE)", idx.get("name"), state);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取数据库统计信息(节点数、关系数)。
|
||||
*
|
||||
* @return 包含 nodeCount 和 relationshipCount 的映射
|
||||
*/
|
||||
public Map<String, Long> getDatabaseStats() {
|
||||
Long nodeCount = neo4jClient
|
||||
.query("MATCH (n:Entity) RETURN count(n) AS cnt")
|
||||
.fetchAs(Long.class)
|
||||
.mappedBy((ts, record) -> record.get("cnt").asLong())
|
||||
.one()
|
||||
.orElse(0L);
|
||||
|
||||
Long relCount = neo4jClient
|
||||
.query("MATCH ()-[r:RELATED_TO]->() RETURN count(r) AS cnt")
|
||||
.fetchAs(Long.class)
|
||||
.mappedBy((ts, record) -> record.get("cnt").asLong())
|
||||
.one()
|
||||
.orElse(0L);
|
||||
|
||||
return Map.of("nodeCount", nodeCount, "relationshipCount", relCount);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,55 @@
|
||||
package com.datamate.knowledgegraph.domain.model;
|
||||
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Builder;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
|
||||
/**
|
||||
* 知识图谱编辑审核记录。
|
||||
* <p>
|
||||
* 在 Neo4j 中作为 {@code EditReview} 节点存储,
|
||||
* 记录实体/关系的增删改请求及审核状态。
|
||||
*/
|
||||
@Data
|
||||
@Builder
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class EditReview {
|
||||
|
||||
private String id;
|
||||
|
||||
/** 所属图谱 ID */
|
||||
private String graphId;
|
||||
|
||||
/** 操作类型:CREATE_ENTITY, UPDATE_ENTITY, DELETE_ENTITY, BATCH_DELETE_ENTITY, CREATE_RELATION, UPDATE_RELATION, DELETE_RELATION, BATCH_DELETE_RELATION */
|
||||
private String operationType;
|
||||
|
||||
/** 目标实体 ID(实体操作时非空) */
|
||||
private String entityId;
|
||||
|
||||
/** 目标关系 ID(关系操作时非空) */
|
||||
private String relationId;
|
||||
|
||||
/** 变更载荷(JSON 序列化的请求体) */
|
||||
private String payload;
|
||||
|
||||
/** 审核状态:PENDING, APPROVED, REJECTED */
|
||||
@Builder.Default
|
||||
private String status = "PENDING";
|
||||
|
||||
/** 提交人 ID */
|
||||
private String submittedBy;
|
||||
|
||||
/** 审核人 ID */
|
||||
private String reviewedBy;
|
||||
|
||||
/** 审核意见 */
|
||||
private String reviewComment;
|
||||
|
||||
private LocalDateTime createdAt;
|
||||
|
||||
private LocalDateTime reviewedAt;
|
||||
}
|
||||
@@ -0,0 +1,81 @@
|
||||
package com.datamate.knowledgegraph.domain.model;
|
||||
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Builder;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
import org.springframework.data.neo4j.core.schema.DynamicLabels;
|
||||
import org.springframework.data.neo4j.core.schema.GeneratedValue;
|
||||
import org.springframework.data.neo4j.core.schema.Id;
|
||||
import org.springframework.data.neo4j.core.schema.Node;
|
||||
import org.springframework.data.neo4j.core.schema.Property;
|
||||
import org.springframework.data.neo4j.core.support.UUIDStringGenerator;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* 知识图谱实体节点。
|
||||
* <p>
|
||||
* 在 Neo4j 中,每个实体作为一个节点存储,
|
||||
* 通过 {@code type} 属性区分具体类型(Person, Organization, Concept 等),
|
||||
* 并支持通过 {@code properties} 存储灵活的扩展属性。
|
||||
*/
|
||||
@Node("Entity")
|
||||
@Data
|
||||
@Builder
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class GraphEntity {
|
||||
|
||||
@Id
|
||||
@GeneratedValue(UUIDStringGenerator.class)
|
||||
private String id;
|
||||
|
||||
@Property("name")
|
||||
private String name;
|
||||
|
||||
@Property("type")
|
||||
private String type;
|
||||
|
||||
@Property("description")
|
||||
private String description;
|
||||
|
||||
@DynamicLabels
|
||||
@Builder.Default
|
||||
private List<String> labels = new ArrayList<>();
|
||||
|
||||
@Property("aliases")
|
||||
@Builder.Default
|
||||
private List<String> aliases = new ArrayList<>();
|
||||
|
||||
@Property("properties")
|
||||
@Builder.Default
|
||||
private Map<String, Object> properties = new HashMap<>();
|
||||
|
||||
/** 来源数据集/知识库的 ID */
|
||||
@Property("source_id")
|
||||
private String sourceId;
|
||||
|
||||
/** 来源类型:ANNOTATION, KNOWLEDGE_BASE, IMPORT, MANUAL */
|
||||
@Property("source_type")
|
||||
private String sourceType;
|
||||
|
||||
/** 所属图谱 ID(对应 MySQL 中的 t_dm_knowledge_graphs.id) */
|
||||
@Property("graph_id")
|
||||
private String graphId;
|
||||
|
||||
/** 自动抽取的置信度 */
|
||||
@Property("confidence")
|
||||
@Builder.Default
|
||||
private Double confidence = 1.0;
|
||||
|
||||
@Property("created_at")
|
||||
private LocalDateTime createdAt;
|
||||
|
||||
@Property("updated_at")
|
||||
private LocalDateTime updatedAt;
|
||||
}
|
||||
@@ -0,0 +1,61 @@
|
||||
package com.datamate.knowledgegraph.domain.model;
|
||||
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Builder;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
import org.springframework.data.neo4j.core.schema.GeneratedValue;
|
||||
import org.springframework.data.neo4j.core.schema.Id;
|
||||
import org.springframework.data.neo4j.core.schema.Property;
|
||||
import org.springframework.data.neo4j.core.schema.RelationshipProperties;
|
||||
import org.springframework.data.neo4j.core.schema.TargetNode;
|
||||
import org.springframework.data.neo4j.core.support.UUIDStringGenerator;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* 知识图谱关系(边)。
|
||||
* <p>
|
||||
* 使用 Spring Data Neo4j 的 {@code @RelationshipProperties} 表示带属性的关系。
|
||||
* 关系的具体类型通过 {@code relationType} 表达(如 belongs_to, located_in)。
|
||||
*/
|
||||
@RelationshipProperties
|
||||
@Data
|
||||
@Builder
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class GraphRelation {
|
||||
|
||||
@Id
|
||||
@GeneratedValue(UUIDStringGenerator.class)
|
||||
private String id;
|
||||
|
||||
@TargetNode
|
||||
private GraphEntity target;
|
||||
|
||||
@Property("relation_type")
|
||||
private String relationType;
|
||||
|
||||
@Property("properties")
|
||||
@Builder.Default
|
||||
private Map<String, Object> properties = new HashMap<>();
|
||||
|
||||
@Property("weight")
|
||||
@Builder.Default
|
||||
private Double weight = 1.0;
|
||||
|
||||
@Property("source_id")
|
||||
private String sourceId;
|
||||
|
||||
@Property("confidence")
|
||||
@Builder.Default
|
||||
private Double confidence = 1.0;
|
||||
|
||||
@Property("graph_id")
|
||||
private String graphId;
|
||||
|
||||
@Property("created_at")
|
||||
private LocalDateTime createdAt;
|
||||
}
|
||||
@@ -0,0 +1,54 @@
|
||||
package com.datamate.knowledgegraph.domain.model;
|
||||
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Builder;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* 关系及其端点实体摘要,用于仓储层查询返回。
|
||||
* <p>
|
||||
* 由于 {@link GraphRelation} 使用 {@code @RelationshipProperties} 且仅持有
|
||||
* 目标节点引用,无法完整表达 Cypher 查询返回的"源节点 + 关系 + 目标节点"结构,
|
||||
* 因此使用该领域对象作为仓储层的返回类型。
|
||||
*/
|
||||
@Data
|
||||
@Builder
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class RelationDetail {
|
||||
|
||||
private String id;
|
||||
|
||||
private String sourceEntityId;
|
||||
|
||||
private String sourceEntityName;
|
||||
|
||||
private String sourceEntityType;
|
||||
|
||||
private String targetEntityId;
|
||||
|
||||
private String targetEntityName;
|
||||
|
||||
private String targetEntityType;
|
||||
|
||||
private String relationType;
|
||||
|
||||
@Builder.Default
|
||||
private Map<String, Object> properties = new HashMap<>();
|
||||
|
||||
private Double weight;
|
||||
|
||||
private Double confidence;
|
||||
|
||||
/** 来源数据集/知识库的 ID */
|
||||
private String sourceId;
|
||||
|
||||
private String graphId;
|
||||
|
||||
private LocalDateTime createdAt;
|
||||
}
|
||||
@@ -0,0 +1,194 @@
|
||||
package com.datamate.knowledgegraph.domain.model;
|
||||
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Builder;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
import org.springframework.data.annotation.Transient;
|
||||
import org.springframework.data.neo4j.core.schema.GeneratedValue;
|
||||
import org.springframework.data.neo4j.core.schema.Id;
|
||||
import org.springframework.data.neo4j.core.schema.Node;
|
||||
import org.springframework.data.neo4j.core.schema.Property;
|
||||
import org.springframework.data.neo4j.core.support.UUIDStringGenerator;
|
||||
|
||||
import java.time.Duration;
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* 同步操作元数据,用于记录每次同步的整体状态和统计信息。
|
||||
* <p>
|
||||
* 同时作为 Neo4j 节点持久化到图数据库,支持历史查询和问题排查。
|
||||
*/
|
||||
@Node("SyncHistory")
|
||||
@Data
|
||||
@Builder
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class SyncMetadata {
|
||||
|
||||
public static final String STATUS_SUCCESS = "SUCCESS";
|
||||
public static final String STATUS_FAILED = "FAILED";
|
||||
public static final String STATUS_PARTIAL = "PARTIAL";
|
||||
|
||||
public static final String TYPE_FULL = "FULL";
|
||||
public static final String TYPE_INCREMENTAL = "INCREMENTAL";
|
||||
public static final String TYPE_DATASETS = "DATASETS";
|
||||
public static final String TYPE_FIELDS = "FIELDS";
|
||||
public static final String TYPE_USERS = "USERS";
|
||||
public static final String TYPE_ORGS = "ORGS";
|
||||
public static final String TYPE_WORKFLOWS = "WORKFLOWS";
|
||||
public static final String TYPE_JOBS = "JOBS";
|
||||
public static final String TYPE_LABEL_TASKS = "LABEL_TASKS";
|
||||
public static final String TYPE_KNOWLEDGE_SETS = "KNOWLEDGE_SETS";
|
||||
|
||||
@Id
|
||||
@GeneratedValue(UUIDStringGenerator.class)
|
||||
private String id;
|
||||
|
||||
@Property("sync_id")
|
||||
private String syncId;
|
||||
|
||||
@Property("graph_id")
|
||||
private String graphId;
|
||||
|
||||
/** 同步类型:FULL / DATASETS / WORKFLOWS 等 */
|
||||
@Property("sync_type")
|
||||
private String syncType;
|
||||
|
||||
/** 同步状态:SUCCESS / FAILED / PARTIAL */
|
||||
@Property("status")
|
||||
private String status;
|
||||
|
||||
@Property("started_at")
|
||||
private LocalDateTime startedAt;
|
||||
|
||||
@Property("completed_at")
|
||||
private LocalDateTime completedAt;
|
||||
|
||||
@Property("duration_millis")
|
||||
private long durationMillis;
|
||||
|
||||
@Property("total_created")
|
||||
@Builder.Default
|
||||
private int totalCreated = 0;
|
||||
|
||||
@Property("total_updated")
|
||||
@Builder.Default
|
||||
private int totalUpdated = 0;
|
||||
|
||||
@Property("total_skipped")
|
||||
@Builder.Default
|
||||
private int totalSkipped = 0;
|
||||
|
||||
@Property("total_failed")
|
||||
@Builder.Default
|
||||
private int totalFailed = 0;
|
||||
|
||||
@Property("total_purged")
|
||||
@Builder.Default
|
||||
private int totalPurged = 0;
|
||||
|
||||
/** 增量同步的时间窗口起始 */
|
||||
@Property("updated_from")
|
||||
private LocalDateTime updatedFrom;
|
||||
|
||||
/** 增量同步的时间窗口结束 */
|
||||
@Property("updated_to")
|
||||
private LocalDateTime updatedTo;
|
||||
|
||||
/** 同步失败时的错误信息 */
|
||||
@Property("error_message")
|
||||
private String errorMessage;
|
||||
|
||||
/** 各步骤的摘要,如 "Dataset(+5/~2/-0/purged:1)" */
|
||||
@Property("step_summaries")
|
||||
@Builder.Default
|
||||
private List<String> stepSummaries = new ArrayList<>();
|
||||
|
||||
/** 详细的各步骤结果(不持久化到 Neo4j,仅在返回时携带) */
|
||||
@Transient
|
||||
private List<SyncResult> results;
|
||||
|
||||
public int totalEntities() {
|
||||
return totalCreated + totalUpdated + totalSkipped + totalFailed;
|
||||
}
|
||||
|
||||
/**
|
||||
* 从 SyncResult 列表构建元数据。
|
||||
*/
|
||||
public static SyncMetadata fromResults(String syncId, String graphId, String syncType,
|
||||
LocalDateTime startedAt, List<SyncResult> results) {
|
||||
LocalDateTime completedAt = LocalDateTime.now();
|
||||
long duration = Duration.between(startedAt, completedAt).toMillis();
|
||||
|
||||
int created = 0, updated = 0, skipped = 0, failed = 0, purged = 0;
|
||||
List<String> summaries = new ArrayList<>();
|
||||
boolean hasFailures = false;
|
||||
|
||||
for (SyncResult r : results) {
|
||||
created += r.getCreated();
|
||||
updated += r.getUpdated();
|
||||
skipped += r.getSkipped();
|
||||
failed += r.getFailed();
|
||||
purged += r.getPurged();
|
||||
if (r.getFailed() > 0) {
|
||||
hasFailures = true;
|
||||
}
|
||||
summaries.add(formatStepSummary(r));
|
||||
}
|
||||
|
||||
String status = hasFailures ? STATUS_PARTIAL : STATUS_SUCCESS;
|
||||
|
||||
return SyncMetadata.builder()
|
||||
.syncId(syncId)
|
||||
.graphId(graphId)
|
||||
.syncType(syncType)
|
||||
.status(status)
|
||||
.startedAt(startedAt)
|
||||
.completedAt(completedAt)
|
||||
.durationMillis(duration)
|
||||
.totalCreated(created)
|
||||
.totalUpdated(updated)
|
||||
.totalSkipped(skipped)
|
||||
.totalFailed(failed)
|
||||
.totalPurged(purged)
|
||||
.stepSummaries(summaries)
|
||||
.results(results)
|
||||
.build();
|
||||
}
|
||||
|
||||
/**
|
||||
* 构建失败的元数据。
|
||||
*/
|
||||
public static SyncMetadata failed(String syncId, String graphId, String syncType,
|
||||
LocalDateTime startedAt, String errorMessage) {
|
||||
LocalDateTime completedAt = LocalDateTime.now();
|
||||
long duration = Duration.between(startedAt, completedAt).toMillis();
|
||||
|
||||
return SyncMetadata.builder()
|
||||
.syncId(syncId)
|
||||
.graphId(graphId)
|
||||
.syncType(syncType)
|
||||
.status(STATUS_FAILED)
|
||||
.startedAt(startedAt)
|
||||
.completedAt(completedAt)
|
||||
.durationMillis(duration)
|
||||
.errorMessage(errorMessage)
|
||||
.build();
|
||||
}
|
||||
|
||||
private static String formatStepSummary(SyncResult r) {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
sb.append(r.getSyncType())
|
||||
.append("(+").append(r.getCreated())
|
||||
.append("/~").append(r.getUpdated())
|
||||
.append("/-").append(r.getFailed());
|
||||
if (r.getPurged() > 0) {
|
||||
sb.append("/purged:").append(r.getPurged());
|
||||
}
|
||||
sb.append(")");
|
||||
return sb.toString();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,81 @@
|
||||
package com.datamate.knowledgegraph.domain.model;
|
||||
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Builder;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* 同步操作结果统计。
|
||||
*/
|
||||
@Data
|
||||
@Builder
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class SyncResult {
|
||||
|
||||
/** 本次同步的追踪标识 */
|
||||
private String syncId;
|
||||
|
||||
/** 同步的实体/关系类型 */
|
||||
private String syncType;
|
||||
|
||||
@Builder.Default
|
||||
private int created = 0;
|
||||
|
||||
@Builder.Default
|
||||
private int updated = 0;
|
||||
|
||||
@Builder.Default
|
||||
private int skipped = 0;
|
||||
|
||||
@Builder.Default
|
||||
private int failed = 0;
|
||||
|
||||
/** 全量对账删除的过期实体数 */
|
||||
@Builder.Default
|
||||
private int purged = 0;
|
||||
|
||||
/** 标记为占位符的步骤(功能尚未实现,结果无实际数据) */
|
||||
@Builder.Default
|
||||
private boolean placeholder = false;
|
||||
|
||||
@Builder.Default
|
||||
private List<String> errors = new ArrayList<>();
|
||||
|
||||
private LocalDateTime startedAt;
|
||||
|
||||
private LocalDateTime completedAt;
|
||||
|
||||
public int total() {
|
||||
return created + updated + skipped + failed;
|
||||
}
|
||||
|
||||
public long durationMillis() {
|
||||
if (startedAt == null || completedAt == null) {
|
||||
return 0;
|
||||
}
|
||||
return java.time.Duration.between(startedAt, completedAt).toMillis();
|
||||
}
|
||||
|
||||
public void incrementCreated() {
|
||||
created++;
|
||||
}
|
||||
|
||||
public void incrementUpdated() {
|
||||
updated++;
|
||||
}
|
||||
|
||||
public void incrementSkipped() {
|
||||
skipped++;
|
||||
}
|
||||
|
||||
public void addError(String error) {
|
||||
failed++;
|
||||
errors.add(error);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,193 @@
|
||||
package com.datamate.knowledgegraph.domain.repository;
|
||||
|
||||
import com.datamate.knowledgegraph.domain.model.EditReview;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.neo4j.driver.Value;
|
||||
import org.neo4j.driver.types.MapAccessor;
|
||||
import org.springframework.data.neo4j.core.Neo4jClient;
|
||||
import org.springframework.stereotype.Repository;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.*;
|
||||
|
||||
/**
|
||||
* 编辑审核仓储。
|
||||
* <p>
|
||||
* 使用 {@code Neo4jClient} 管理 {@code EditReview} 节点。
|
||||
*/
|
||||
@Repository
|
||||
@Slf4j
|
||||
@RequiredArgsConstructor
|
||||
public class EditReviewRepository {
|
||||
|
||||
private final Neo4jClient neo4jClient;
|
||||
|
||||
public EditReview save(EditReview review) {
|
||||
if (review.getId() == null) {
|
||||
review.setId(UUID.randomUUID().toString());
|
||||
}
|
||||
if (review.getCreatedAt() == null) {
|
||||
review.setCreatedAt(LocalDateTime.now());
|
||||
}
|
||||
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("id", review.getId());
|
||||
params.put("graphId", review.getGraphId());
|
||||
params.put("operationType", review.getOperationType());
|
||||
params.put("entityId", review.getEntityId() != null ? review.getEntityId() : "");
|
||||
params.put("relationId", review.getRelationId() != null ? review.getRelationId() : "");
|
||||
params.put("payload", review.getPayload() != null ? review.getPayload() : "");
|
||||
params.put("status", review.getStatus());
|
||||
params.put("submittedBy", review.getSubmittedBy() != null ? review.getSubmittedBy() : "");
|
||||
params.put("reviewedBy", review.getReviewedBy() != null ? review.getReviewedBy() : "");
|
||||
params.put("reviewComment", review.getReviewComment() != null ? review.getReviewComment() : "");
|
||||
params.put("createdAt", review.getCreatedAt());
|
||||
|
||||
// reviewed_at 为 null 时(PENDING 状态)不写入 SET,避免 null 参数导致属性缺失
|
||||
String reviewedAtSet = "";
|
||||
if (review.getReviewedAt() != null) {
|
||||
reviewedAtSet = ", r.reviewed_at = $reviewedAt";
|
||||
params.put("reviewedAt", review.getReviewedAt());
|
||||
}
|
||||
|
||||
neo4jClient
|
||||
.query(
|
||||
"MERGE (r:EditReview {id: $id}) " +
|
||||
"SET r.graph_id = $graphId, " +
|
||||
" r.operation_type = $operationType, " +
|
||||
" r.entity_id = $entityId, " +
|
||||
" r.relation_id = $relationId, " +
|
||||
" r.payload = $payload, " +
|
||||
" r.status = $status, " +
|
||||
" r.submitted_by = $submittedBy, " +
|
||||
" r.reviewed_by = $reviewedBy, " +
|
||||
" r.review_comment = $reviewComment, " +
|
||||
" r.created_at = $createdAt" +
|
||||
reviewedAtSet + " " +
|
||||
"RETURN r"
|
||||
)
|
||||
.bindAll(params)
|
||||
.run();
|
||||
|
||||
return review;
|
||||
}
|
||||
|
||||
public Optional<EditReview> findById(String reviewId, String graphId) {
|
||||
return neo4jClient
|
||||
.query("MATCH (r:EditReview {id: $id, graph_id: $graphId}) RETURN r")
|
||||
.bindAll(Map.of("id", reviewId, "graphId", graphId))
|
||||
.fetchAs(EditReview.class)
|
||||
.mappedBy((typeSystem, record) -> mapRecord(record))
|
||||
.one();
|
||||
}
|
||||
|
||||
public List<EditReview> findPendingByGraphId(String graphId, long skip, int size) {
|
||||
return neo4jClient
|
||||
.query(
|
||||
"MATCH (r:EditReview {graph_id: $graphId, status: 'PENDING'}) " +
|
||||
"RETURN r ORDER BY r.created_at DESC SKIP $skip LIMIT $size"
|
||||
)
|
||||
.bindAll(Map.of("graphId", graphId, "skip", skip, "size", size))
|
||||
.fetchAs(EditReview.class)
|
||||
.mappedBy((typeSystem, record) -> mapRecord(record))
|
||||
.all()
|
||||
.stream().toList();
|
||||
}
|
||||
|
||||
public long countPendingByGraphId(String graphId) {
|
||||
return neo4jClient
|
||||
.query("MATCH (r:EditReview {graph_id: $graphId, status: 'PENDING'}) RETURN count(r) AS cnt")
|
||||
.bindAll(Map.of("graphId", graphId))
|
||||
.fetchAs(Long.class)
|
||||
.mappedBy((typeSystem, record) -> record.get("cnt").asLong())
|
||||
.one()
|
||||
.orElse(0L);
|
||||
}
|
||||
|
||||
public List<EditReview> findByGraphId(String graphId, String status, long skip, int size) {
|
||||
String statusFilter = (status != null && !status.isBlank())
|
||||
? "AND r.status = $status "
|
||||
: "";
|
||||
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("graphId", graphId);
|
||||
params.put("status", status != null ? status : "");
|
||||
params.put("skip", skip);
|
||||
params.put("size", size);
|
||||
|
||||
return neo4jClient
|
||||
.query(
|
||||
"MATCH (r:EditReview {graph_id: $graphId}) " +
|
||||
"WHERE true " + statusFilter +
|
||||
"RETURN r ORDER BY r.created_at DESC SKIP $skip LIMIT $size"
|
||||
)
|
||||
.bindAll(params)
|
||||
.fetchAs(EditReview.class)
|
||||
.mappedBy((typeSystem, record) -> mapRecord(record))
|
||||
.all()
|
||||
.stream().toList();
|
||||
}
|
||||
|
||||
public long countByGraphId(String graphId, String status) {
|
||||
String statusFilter = (status != null && !status.isBlank())
|
||||
? "AND r.status = $status "
|
||||
: "";
|
||||
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("graphId", graphId);
|
||||
params.put("status", status != null ? status : "");
|
||||
|
||||
return neo4jClient
|
||||
.query(
|
||||
"MATCH (r:EditReview {graph_id: $graphId}) " +
|
||||
"WHERE true " + statusFilter +
|
||||
"RETURN count(r) AS cnt"
|
||||
)
|
||||
.bindAll(params)
|
||||
.fetchAs(Long.class)
|
||||
.mappedBy((typeSystem, record) -> record.get("cnt").asLong())
|
||||
.one()
|
||||
.orElse(0L);
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// 内部映射
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
private EditReview mapRecord(MapAccessor record) {
|
||||
Value r = record.get("r");
|
||||
|
||||
return EditReview.builder()
|
||||
.id(getStringOrNull(r, "id"))
|
||||
.graphId(getStringOrNull(r, "graph_id"))
|
||||
.operationType(getStringOrNull(r, "operation_type"))
|
||||
.entityId(getStringOrEmpty(r, "entity_id"))
|
||||
.relationId(getStringOrEmpty(r, "relation_id"))
|
||||
.payload(getStringOrNull(r, "payload"))
|
||||
.status(getStringOrNull(r, "status"))
|
||||
.submittedBy(getStringOrEmpty(r, "submitted_by"))
|
||||
.reviewedBy(getStringOrEmpty(r, "reviewed_by"))
|
||||
.reviewComment(getStringOrEmpty(r, "review_comment"))
|
||||
.createdAt(getLocalDateTimeOrNull(r, "created_at"))
|
||||
.reviewedAt(getLocalDateTimeOrNull(r, "reviewed_at"))
|
||||
.build();
|
||||
}
|
||||
|
||||
private static String getStringOrNull(Value value, String key) {
|
||||
Value v = value.get(key);
|
||||
return (v == null || v.isNull()) ? null : v.asString();
|
||||
}
|
||||
|
||||
private static String getStringOrEmpty(Value value, String key) {
|
||||
Value v = value.get(key);
|
||||
if (v == null || v.isNull()) return null;
|
||||
String s = v.asString();
|
||||
return s.isEmpty() ? null : s;
|
||||
}
|
||||
|
||||
private static LocalDateTime getLocalDateTimeOrNull(Value value, String key) {
|
||||
Value v = value.get(key);
|
||||
return (v == null || v.isNull()) ? null : v.asLocalDateTime();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,103 @@
|
||||
package com.datamate.knowledgegraph.domain.repository;
|
||||
|
||||
import com.datamate.knowledgegraph.domain.model.GraphEntity;
|
||||
import org.springframework.data.neo4j.repository.Neo4jRepository;
|
||||
import org.springframework.data.neo4j.repository.query.Query;
|
||||
import org.springframework.data.repository.query.Param;
|
||||
import org.springframework.stereotype.Repository;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
@Repository
|
||||
public interface GraphEntityRepository extends Neo4jRepository<GraphEntity, String> {
|
||||
|
||||
@Query("MATCH (e:Entity {graph_id: $graphId}) WHERE e.id = $entityId RETURN e")
|
||||
Optional<GraphEntity> findByIdAndGraphId(
|
||||
@Param("entityId") String entityId,
|
||||
@Param("graphId") String graphId);
|
||||
|
||||
List<GraphEntity> findByGraphId(String graphId);
|
||||
|
||||
List<GraphEntity> findByGraphIdAndType(String graphId, String type);
|
||||
|
||||
List<GraphEntity> findByGraphIdAndNameContaining(String graphId, String name);
|
||||
|
||||
@Query("MATCH (e:Entity {graph_id: $graphId}) " +
|
||||
"WHERE e.name = $name AND e.type = $type " +
|
||||
"RETURN e")
|
||||
List<GraphEntity> findByGraphIdAndNameAndType(
|
||||
@Param("graphId") String graphId,
|
||||
@Param("name") String name,
|
||||
@Param("type") String type);
|
||||
|
||||
@Query("MATCH p = (e:Entity {graph_id: $graphId, id: $entityId})-[*1..$depth]-(neighbor:Entity) " +
|
||||
"WHERE e <> neighbor " +
|
||||
" AND ALL(n IN nodes(p) WHERE n.graph_id = $graphId) " +
|
||||
" AND ALL(r IN relationships(p) WHERE r.graph_id = $graphId) " +
|
||||
"RETURN DISTINCT neighbor LIMIT $limit")
|
||||
List<GraphEntity> findNeighbors(
|
||||
@Param("graphId") String graphId,
|
||||
@Param("entityId") String entityId,
|
||||
@Param("depth") int depth,
|
||||
@Param("limit") int limit);
|
||||
|
||||
@Query("MATCH (e:Entity {graph_id: $graphId}) RETURN count(e)")
|
||||
long countByGraphId(@Param("graphId") String graphId);
|
||||
|
||||
@Query("MATCH (e:Entity {graph_id: $graphId}) " +
|
||||
"WHERE e.source_id = $sourceId AND e.type = $type " +
|
||||
"RETURN e")
|
||||
Optional<GraphEntity> findByGraphIdAndSourceIdAndType(
|
||||
@Param("graphId") String graphId,
|
||||
@Param("sourceId") String sourceId,
|
||||
@Param("type") String type);
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// 分页查询
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Query("MATCH (e:Entity {graph_id: $graphId}) " +
|
||||
"RETURN e ORDER BY e.created_at DESC SKIP $skip LIMIT $limit")
|
||||
List<GraphEntity> findByGraphIdPaged(
|
||||
@Param("graphId") String graphId,
|
||||
@Param("skip") long skip,
|
||||
@Param("limit") int limit);
|
||||
|
||||
@Query("MATCH (e:Entity {graph_id: $graphId}) WHERE e.type = $type " +
|
||||
"RETURN e ORDER BY e.created_at DESC SKIP $skip LIMIT $limit")
|
||||
List<GraphEntity> findByGraphIdAndTypePaged(
|
||||
@Param("graphId") String graphId,
|
||||
@Param("type") String type,
|
||||
@Param("skip") long skip,
|
||||
@Param("limit") int limit);
|
||||
|
||||
@Query("MATCH (e:Entity {graph_id: $graphId}) WHERE e.type = $type " +
|
||||
"RETURN count(e)")
|
||||
long countByGraphIdAndType(
|
||||
@Param("graphId") String graphId,
|
||||
@Param("type") String type);
|
||||
|
||||
@Query("MATCH (e:Entity {graph_id: $graphId}) WHERE e.name CONTAINS $name " +
|
||||
"RETURN e ORDER BY e.created_at DESC SKIP $skip LIMIT $limit")
|
||||
List<GraphEntity> findByGraphIdAndNameContainingPaged(
|
||||
@Param("graphId") String graphId,
|
||||
@Param("name") String name,
|
||||
@Param("skip") long skip,
|
||||
@Param("limit") int limit);
|
||||
|
||||
@Query("MATCH (e:Entity {graph_id: $graphId}) WHERE e.name CONTAINS $name " +
|
||||
"RETURN count(e)")
|
||||
long countByGraphIdAndNameContaining(
|
||||
@Param("graphId") String graphId,
|
||||
@Param("name") String name);
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// 图查询
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Query("MATCH (e:Entity {graph_id: $graphId}) WHERE e.id IN $entityIds RETURN e")
|
||||
List<GraphEntity> findByGraphIdAndIdIn(
|
||||
@Param("graphId") String graphId,
|
||||
@Param("entityIds") List<String> entityIds);
|
||||
}
|
||||
@@ -0,0 +1,499 @@
|
||||
package com.datamate.knowledgegraph.domain.repository;
|
||||
|
||||
import com.datamate.knowledgegraph.domain.model.RelationDetail;
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.core.type.TypeReference;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.neo4j.driver.Value;
|
||||
import org.neo4j.driver.types.MapAccessor;
|
||||
import org.springframework.data.neo4j.core.Neo4jClient;
|
||||
import org.springframework.stereotype.Repository;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.*;
|
||||
|
||||
/**
|
||||
* 知识图谱关系仓储。
|
||||
* <p>
|
||||
* 由于 {@code GraphRelation} 使用 {@code @RelationshipProperties},
|
||||
* 无法通过 {@code Neo4jRepository} 直接管理,
|
||||
* 因此使用 {@code Neo4jClient} 执行 Cypher 查询实现 CRUD。
|
||||
* <p>
|
||||
* Neo4j 中使用统一的 {@code RELATED_TO} 关系类型,
|
||||
* 语义类型通过 {@code relation_type} 属性区分。
|
||||
* 扩展属性(properties)序列化为 JSON 字符串存储在 {@code properties_json} 属性中。
|
||||
*/
|
||||
@Repository
|
||||
@Slf4j
|
||||
@RequiredArgsConstructor
|
||||
public class GraphRelationRepository {
|
||||
|
||||
private static final String REL_TYPE = "RELATED_TO";
|
||||
private static final TypeReference<Map<String, Object>> MAP_TYPE = new TypeReference<>() {};
|
||||
private static final ObjectMapper MAPPER = new ObjectMapper();
|
||||
|
||||
/** 查询返回列(源节点 + 关系 + 目标节点)。 */
|
||||
private static final String RETURN_COLUMNS =
|
||||
"RETURN r, " +
|
||||
"s.id AS sourceEntityId, s.name AS sourceEntityName, s.type AS sourceEntityType, " +
|
||||
"t.id AS targetEntityId, t.name AS targetEntityName, t.type AS targetEntityType";
|
||||
|
||||
private final Neo4jClient neo4jClient;
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// 查询
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
public Optional<RelationDetail> findByIdAndGraphId(String relationId, String graphId) {
|
||||
return neo4jClient
|
||||
.query(
|
||||
"MATCH (s:Entity {graph_id: $graphId})" +
|
||||
"-[r:" + REL_TYPE + " {id: $relationId, graph_id: $graphId}]->" +
|
||||
"(t:Entity {graph_id: $graphId}) " +
|
||||
RETURN_COLUMNS
|
||||
)
|
||||
.bindAll(Map.of("graphId", graphId, "relationId", relationId))
|
||||
.fetchAs(RelationDetail.class)
|
||||
.mappedBy((typeSystem, record) -> mapRecord(record))
|
||||
.one();
|
||||
}
|
||||
|
||||
public List<RelationDetail> findByGraphId(String graphId, String type, long skip, int size) {
|
||||
String typeFilter = (type != null && !type.isBlank())
|
||||
? "AND r.relation_type = $type "
|
||||
: "";
|
||||
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("graphId", graphId);
|
||||
params.put("type", type != null ? type : "");
|
||||
params.put("skip", skip);
|
||||
params.put("size", size);
|
||||
|
||||
return neo4jClient
|
||||
.query(
|
||||
"MATCH (s:Entity {graph_id: $graphId})" +
|
||||
"-[r:" + REL_TYPE + " {graph_id: $graphId}]->" +
|
||||
"(t:Entity {graph_id: $graphId}) " +
|
||||
"WHERE true " + typeFilter +
|
||||
RETURN_COLUMNS + " " +
|
||||
"ORDER BY r.created_at DESC " +
|
||||
"SKIP $skip LIMIT $size"
|
||||
)
|
||||
.bindAll(params)
|
||||
.fetchAs(RelationDetail.class)
|
||||
.mappedBy((typeSystem, record) -> mapRecord(record))
|
||||
.all()
|
||||
.stream().toList();
|
||||
}
|
||||
|
||||
/**
|
||||
* 查询实体的所有关系(出边 + 入边)。
|
||||
* <p>
|
||||
* 使用 {@code CALL{UNION ALL}} 分别锚定出边和入边查询,
|
||||
* 避免全图扫描后再过滤的性能瓶颈。
|
||||
* {@code WITH DISTINCT} 处理自环关系的去重。
|
||||
*/
|
||||
public List<RelationDetail> findByEntityId(String graphId, String entityId, String type,
|
||||
long skip, int size) {
|
||||
String typeFilter = (type != null && !type.isBlank())
|
||||
? "WHERE r.relation_type = $type "
|
||||
: "";
|
||||
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("graphId", graphId);
|
||||
params.put("entityId", entityId);
|
||||
params.put("type", type != null ? type : "");
|
||||
params.put("skip", skip);
|
||||
params.put("size", size);
|
||||
|
||||
return neo4jClient
|
||||
.query(
|
||||
"CALL { " +
|
||||
"MATCH (s:Entity {graph_id: $graphId, id: $entityId})" +
|
||||
"-[r:" + REL_TYPE + " {graph_id: $graphId}]->" +
|
||||
"(t:Entity {graph_id: $graphId}) " +
|
||||
typeFilter +
|
||||
"RETURN r, s, t " +
|
||||
"UNION ALL " +
|
||||
"MATCH (s:Entity {graph_id: $graphId})" +
|
||||
"-[r:" + REL_TYPE + " {graph_id: $graphId}]->" +
|
||||
"(t:Entity {graph_id: $graphId, id: $entityId}) " +
|
||||
typeFilter +
|
||||
"RETURN r, s, t " +
|
||||
"} " +
|
||||
"WITH DISTINCT r, s, t " +
|
||||
"ORDER BY r.created_at DESC SKIP $skip LIMIT $size " +
|
||||
RETURN_COLUMNS
|
||||
)
|
||||
.bindAll(params)
|
||||
.fetchAs(RelationDetail.class)
|
||||
.mappedBy((typeSystem, record) -> mapRecord(record))
|
||||
.all()
|
||||
.stream().toList();
|
||||
}
|
||||
|
||||
/**
|
||||
* 查询实体的入边关系(该实体为目标节点)。
|
||||
*/
|
||||
public List<RelationDetail> findInboundByEntityId(String graphId, String entityId, String type,
|
||||
long skip, int size) {
|
||||
String typeFilter = (type != null && !type.isBlank())
|
||||
? "AND r.relation_type = $type "
|
||||
: "";
|
||||
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("graphId", graphId);
|
||||
params.put("entityId", entityId);
|
||||
params.put("type", type != null ? type : "");
|
||||
params.put("skip", skip);
|
||||
params.put("size", size);
|
||||
|
||||
return neo4jClient
|
||||
.query(
|
||||
"MATCH (s:Entity {graph_id: $graphId})" +
|
||||
"-[r:" + REL_TYPE + " {graph_id: $graphId}]->" +
|
||||
"(t:Entity {graph_id: $graphId, id: $entityId}) " +
|
||||
"WHERE true " + typeFilter +
|
||||
RETURN_COLUMNS + " " +
|
||||
"ORDER BY r.created_at DESC " +
|
||||
"SKIP $skip LIMIT $size"
|
||||
)
|
||||
.bindAll(params)
|
||||
.fetchAs(RelationDetail.class)
|
||||
.mappedBy((typeSystem, record) -> mapRecord(record))
|
||||
.all()
|
||||
.stream().toList();
|
||||
}
|
||||
|
||||
/**
|
||||
* 查询实体的出边关系(该实体为源节点)。
|
||||
*/
|
||||
public List<RelationDetail> findOutboundByEntityId(String graphId, String entityId, String type,
|
||||
long skip, int size) {
|
||||
String typeFilter = (type != null && !type.isBlank())
|
||||
? "AND r.relation_type = $type "
|
||||
: "";
|
||||
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("graphId", graphId);
|
||||
params.put("entityId", entityId);
|
||||
params.put("type", type != null ? type : "");
|
||||
params.put("skip", skip);
|
||||
params.put("size", size);
|
||||
|
||||
return neo4jClient
|
||||
.query(
|
||||
"MATCH (s:Entity {graph_id: $graphId, id: $entityId})" +
|
||||
"-[r:" + REL_TYPE + " {graph_id: $graphId}]->" +
|
||||
"(t:Entity {graph_id: $graphId}) " +
|
||||
"WHERE true " + typeFilter +
|
||||
RETURN_COLUMNS + " " +
|
||||
"ORDER BY r.created_at DESC " +
|
||||
"SKIP $skip LIMIT $size"
|
||||
)
|
||||
.bindAll(params)
|
||||
.fetchAs(RelationDetail.class)
|
||||
.mappedBy((typeSystem, record) -> mapRecord(record))
|
||||
.all()
|
||||
.stream().toList();
|
||||
}
|
||||
|
||||
/**
|
||||
* 统计实体的关系数量。
|
||||
* <p>
|
||||
* 各方向均以实体锚定 MATCH 模式,避免全图扫描。
|
||||
* "all" 方向使用 {@code CALL{UNION}} 自动去重自环关系。
|
||||
*
|
||||
* @param direction "all"、"in" 或 "out"
|
||||
*/
|
||||
public long countByEntityId(String graphId, String entityId, String type, String direction) {
|
||||
String typeFilter = (type != null && !type.isBlank())
|
||||
? "WHERE r.relation_type = $type "
|
||||
: "";
|
||||
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("graphId", graphId);
|
||||
params.put("entityId", entityId);
|
||||
params.put("type", type != null ? type : "");
|
||||
|
||||
String cypher;
|
||||
switch (direction) {
|
||||
case "in":
|
||||
cypher = "MATCH (:Entity {graph_id: $graphId})" +
|
||||
"-[r:" + REL_TYPE + " {graph_id: $graphId}]->" +
|
||||
"(:Entity {graph_id: $graphId, id: $entityId}) " +
|
||||
typeFilter +
|
||||
"RETURN count(r) AS cnt";
|
||||
break;
|
||||
case "out":
|
||||
cypher = "MATCH (:Entity {graph_id: $graphId, id: $entityId})" +
|
||||
"-[r:" + REL_TYPE + " {graph_id: $graphId}]->" +
|
||||
"(:Entity {graph_id: $graphId}) " +
|
||||
typeFilter +
|
||||
"RETURN count(r) AS cnt";
|
||||
break;
|
||||
default:
|
||||
cypher = "CALL { " +
|
||||
"MATCH (:Entity {graph_id: $graphId, id: $entityId})" +
|
||||
"-[r:" + REL_TYPE + " {graph_id: $graphId}]->" +
|
||||
"(:Entity {graph_id: $graphId}) " +
|
||||
typeFilter +
|
||||
"RETURN r " +
|
||||
"UNION " +
|
||||
"MATCH (:Entity {graph_id: $graphId})" +
|
||||
"-[r:" + REL_TYPE + " {graph_id: $graphId}]->" +
|
||||
"(:Entity {graph_id: $graphId, id: $entityId}) " +
|
||||
typeFilter +
|
||||
"RETURN r " +
|
||||
"} " +
|
||||
"RETURN count(r) AS cnt";
|
||||
break;
|
||||
}
|
||||
|
||||
return neo4jClient
|
||||
.query(cypher)
|
||||
.bindAll(params)
|
||||
.fetchAs(Long.class)
|
||||
.mappedBy((typeSystem, record) -> record.get("cnt").asLong())
|
||||
.one()
|
||||
.orElse(0L);
|
||||
}
|
||||
|
||||
public List<RelationDetail> findBySourceAndTarget(String graphId, String sourceEntityId, String targetEntityId) {
|
||||
return neo4jClient
|
||||
.query(
|
||||
"MATCH (s:Entity {graph_id: $graphId, id: $sourceEntityId})" +
|
||||
"-[r:" + REL_TYPE + " {graph_id: $graphId}]->" +
|
||||
"(t:Entity {graph_id: $graphId, id: $targetEntityId}) " +
|
||||
RETURN_COLUMNS
|
||||
)
|
||||
.bindAll(Map.of(
|
||||
"graphId", graphId,
|
||||
"sourceEntityId", sourceEntityId,
|
||||
"targetEntityId", targetEntityId
|
||||
))
|
||||
.fetchAs(RelationDetail.class)
|
||||
.mappedBy((typeSystem, record) -> mapRecord(record))
|
||||
.all()
|
||||
.stream().toList();
|
||||
}
|
||||
|
||||
public List<RelationDetail> findByType(String graphId, String type) {
|
||||
return neo4jClient
|
||||
.query(
|
||||
"MATCH (s:Entity {graph_id: $graphId})" +
|
||||
"-[r:" + REL_TYPE + " {graph_id: $graphId, relation_type: $type}]->" +
|
||||
"(t:Entity {graph_id: $graphId}) " +
|
||||
RETURN_COLUMNS
|
||||
)
|
||||
.bindAll(Map.of("graphId", graphId, "type", type))
|
||||
.fetchAs(RelationDetail.class)
|
||||
.mappedBy((typeSystem, record) -> mapRecord(record))
|
||||
.all()
|
||||
.stream().toList();
|
||||
}
|
||||
|
||||
public long countByGraphId(String graphId, String type) {
|
||||
String typeFilter = (type != null && !type.isBlank())
|
||||
? "AND r.relation_type = $type "
|
||||
: "";
|
||||
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("graphId", graphId);
|
||||
params.put("type", type != null ? type : "");
|
||||
|
||||
return neo4jClient
|
||||
.query(
|
||||
"MATCH (:Entity {graph_id: $graphId})" +
|
||||
"-[r:" + REL_TYPE + " {graph_id: $graphId}]->" +
|
||||
"(:Entity {graph_id: $graphId}) " +
|
||||
"WHERE true " + typeFilter +
|
||||
"RETURN count(r) AS cnt"
|
||||
)
|
||||
.bindAll(params)
|
||||
.fetchAs(Long.class)
|
||||
.mappedBy((typeSystem, record) -> record.get("cnt").asLong())
|
||||
.one()
|
||||
.orElse(0L);
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// 写入
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
public Optional<RelationDetail> create(String graphId, String sourceEntityId, String targetEntityId,
|
||||
String relationType, Map<String, Object> properties,
|
||||
Double weight, String sourceId, Double confidence) {
|
||||
String id = UUID.randomUUID().toString();
|
||||
LocalDateTime now = LocalDateTime.now();
|
||||
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("graphId", graphId);
|
||||
params.put("sourceEntityId", sourceEntityId);
|
||||
params.put("targetEntityId", targetEntityId);
|
||||
params.put("id", id);
|
||||
params.put("relationType", relationType);
|
||||
params.put("weight", weight != null ? weight : 1.0);
|
||||
params.put("confidence", confidence != null ? confidence : 1.0);
|
||||
params.put("sourceId", sourceId != null ? sourceId : "");
|
||||
params.put("propertiesJson", serializeProperties(properties));
|
||||
params.put("createdAt", now);
|
||||
|
||||
return neo4jClient
|
||||
.query(
|
||||
"MATCH (s:Entity {graph_id: $graphId, id: $sourceEntityId}) " +
|
||||
"MATCH (t:Entity {graph_id: $graphId, id: $targetEntityId}) " +
|
||||
"MERGE (s)-[r:" + REL_TYPE + " {graph_id: $graphId, relation_type: $relationType}]->(t) " +
|
||||
"ON CREATE SET r.id = $id, r.weight = $weight, r.confidence = $confidence, " +
|
||||
" r.source_id = $sourceId, r.properties_json = $propertiesJson, r.created_at = $createdAt " +
|
||||
"ON MATCH SET r.weight = CASE WHEN $weight IS NOT NULL THEN $weight ELSE r.weight END, " +
|
||||
" r.confidence = CASE WHEN $confidence IS NOT NULL THEN $confidence ELSE r.confidence END, " +
|
||||
" r.source_id = CASE WHEN $sourceId <> '' THEN $sourceId ELSE r.source_id END, " +
|
||||
" r.properties_json = CASE WHEN $propertiesJson <> '{}' THEN $propertiesJson ELSE r.properties_json END " +
|
||||
RETURN_COLUMNS
|
||||
)
|
||||
.bindAll(params)
|
||||
.fetchAs(RelationDetail.class)
|
||||
.mappedBy((typeSystem, record) -> mapRecord(record))
|
||||
.one();
|
||||
}
|
||||
|
||||
public Optional<RelationDetail> update(String relationId, String graphId,
|
||||
String relationType, Map<String, Object> properties,
|
||||
Double weight, Double confidence) {
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("graphId", graphId);
|
||||
params.put("relationId", relationId);
|
||||
|
||||
StringBuilder setClauses = new StringBuilder();
|
||||
if (relationType != null) {
|
||||
setClauses.append("SET r.relation_type = $relationType ");
|
||||
params.put("relationType", relationType);
|
||||
}
|
||||
if (properties != null) {
|
||||
setClauses.append("SET r.properties_json = $propertiesJson ");
|
||||
params.put("propertiesJson", serializeProperties(properties));
|
||||
}
|
||||
if (weight != null) {
|
||||
setClauses.append("SET r.weight = $weight ");
|
||||
params.put("weight", weight);
|
||||
}
|
||||
if (confidence != null) {
|
||||
setClauses.append("SET r.confidence = $confidence ");
|
||||
params.put("confidence", confidence);
|
||||
}
|
||||
|
||||
if (setClauses.isEmpty()) {
|
||||
return findByIdAndGraphId(relationId, graphId);
|
||||
}
|
||||
|
||||
return neo4jClient
|
||||
.query(
|
||||
"MATCH (s:Entity {graph_id: $graphId})" +
|
||||
"-[r:" + REL_TYPE + " {id: $relationId, graph_id: $graphId}]->" +
|
||||
"(t:Entity {graph_id: $graphId}) " +
|
||||
setClauses +
|
||||
RETURN_COLUMNS
|
||||
)
|
||||
.bindAll(params)
|
||||
.fetchAs(RelationDetail.class)
|
||||
.mappedBy((typeSystem, record) -> mapRecord(record))
|
||||
.one();
|
||||
}
|
||||
|
||||
/**
|
||||
* 删除指定关系,返回实际删除的数量(0 或 1)。
|
||||
*/
|
||||
public long deleteByIdAndGraphId(String relationId, String graphId) {
|
||||
// MATCH 找不到时管道为空行,count(*) 聚合后仍返回 0;
|
||||
// 找到 1 条时 DELETE 后管道保留该行,count(*) 返回 1。
|
||||
return neo4jClient
|
||||
.query(
|
||||
"MATCH (:Entity {graph_id: $graphId})" +
|
||||
"-[r:" + REL_TYPE + " {id: $relationId, graph_id: $graphId}]->" +
|
||||
"(:Entity {graph_id: $graphId}) " +
|
||||
"DELETE r " +
|
||||
"RETURN count(*) AS deleted"
|
||||
)
|
||||
.bindAll(Map.of("graphId", graphId, "relationId", relationId))
|
||||
.fetchAs(Long.class)
|
||||
.mappedBy((typeSystem, record) -> record.get("deleted").asLong())
|
||||
.one()
|
||||
.orElse(0L);
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// 内部映射
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
private RelationDetail mapRecord(MapAccessor record) {
|
||||
Value r = record.get("r");
|
||||
|
||||
return RelationDetail.builder()
|
||||
.id(getStringOrNull(r, "id"))
|
||||
.sourceEntityId(record.get("sourceEntityId").asString(null))
|
||||
.sourceEntityName(record.get("sourceEntityName").asString(null))
|
||||
.sourceEntityType(record.get("sourceEntityType").asString(null))
|
||||
.targetEntityId(record.get("targetEntityId").asString(null))
|
||||
.targetEntityName(record.get("targetEntityName").asString(null))
|
||||
.targetEntityType(record.get("targetEntityType").asString(null))
|
||||
.relationType(getStringOrNull(r, "relation_type"))
|
||||
.properties(deserializeProperties(getStringOrNull(r, "properties_json")))
|
||||
.weight(getDoubleOrNull(r, "weight"))
|
||||
.confidence(getDoubleOrNull(r, "confidence"))
|
||||
.sourceId(getStringOrNull(r, "source_id"))
|
||||
.graphId(getStringOrNull(r, "graph_id"))
|
||||
.createdAt(getLocalDateTimeOrNull(r, "created_at"))
|
||||
.build();
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// Properties JSON 序列化
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
private static String serializeProperties(Map<String, Object> properties) {
|
||||
if (properties == null || properties.isEmpty()) {
|
||||
return "{}";
|
||||
}
|
||||
try {
|
||||
return MAPPER.writeValueAsString(properties);
|
||||
} catch (JsonProcessingException e) {
|
||||
// 序列化失败不应静默吞掉,向上抛出以暴露数据问题
|
||||
throw new IllegalArgumentException("Failed to serialize relation properties to JSON", e);
|
||||
}
|
||||
}
|
||||
|
||||
private static Map<String, Object> deserializeProperties(String json) {
|
||||
if (json == null || json.isBlank()) {
|
||||
return new HashMap<>();
|
||||
}
|
||||
try {
|
||||
return MAPPER.readValue(json, MAP_TYPE);
|
||||
} catch (JsonProcessingException e) {
|
||||
log.warn("Failed to deserialize properties_json (returning empty map): json='{}', error={}",
|
||||
json.length() > 100 ? json.substring(0, 100) + "..." : json, e.getMessage());
|
||||
return new HashMap<>();
|
||||
}
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// 字段读取辅助
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
private static String getStringOrNull(Value value, String key) {
|
||||
Value v = value.get(key);
|
||||
return (v == null || v.isNull()) ? null : v.asString();
|
||||
}
|
||||
|
||||
private static Double getDoubleOrNull(Value value, String key) {
|
||||
Value v = value.get(key);
|
||||
return (v == null || v.isNull()) ? null : v.asDouble();
|
||||
}
|
||||
|
||||
private static LocalDateTime getLocalDateTimeOrNull(Value value, String key) {
|
||||
Value v = value.get(key);
|
||||
return (v == null || v.isNull()) ? null : v.asLocalDateTime();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,43 @@
|
||||
package com.datamate.knowledgegraph.domain.repository;
|
||||
|
||||
import com.datamate.knowledgegraph.domain.model.SyncMetadata;
|
||||
import org.springframework.data.neo4j.repository.Neo4jRepository;
|
||||
import org.springframework.data.neo4j.repository.query.Query;
|
||||
import org.springframework.data.repository.query.Param;
|
||||
import org.springframework.stereotype.Repository;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
@Repository
|
||||
public interface SyncHistoryRepository extends Neo4jRepository<SyncMetadata, String> {
|
||||
|
||||
@Query("MATCH (h:SyncHistory {graph_id: $graphId}) " +
|
||||
"RETURN h ORDER BY h.started_at DESC LIMIT $limit")
|
||||
List<SyncMetadata> findByGraphId(
|
||||
@Param("graphId") String graphId,
|
||||
@Param("limit") int limit);
|
||||
|
||||
@Query("MATCH (h:SyncHistory {graph_id: $graphId, status: $status}) " +
|
||||
"RETURN h ORDER BY h.started_at DESC LIMIT $limit")
|
||||
List<SyncMetadata> findByGraphIdAndStatus(
|
||||
@Param("graphId") String graphId,
|
||||
@Param("status") String status,
|
||||
@Param("limit") int limit);
|
||||
|
||||
@Query("MATCH (h:SyncHistory {graph_id: $graphId, sync_id: $syncId}) RETURN h")
|
||||
Optional<SyncMetadata> findByGraphIdAndSyncId(
|
||||
@Param("graphId") String graphId,
|
||||
@Param("syncId") String syncId);
|
||||
|
||||
@Query("MATCH (h:SyncHistory {graph_id: $graphId}) " +
|
||||
"WHERE h.started_at >= $from AND h.started_at <= $to " +
|
||||
"RETURN h ORDER BY h.started_at DESC SKIP $skip LIMIT $limit")
|
||||
List<SyncMetadata> findByGraphIdAndTimeRange(
|
||||
@Param("graphId") String graphId,
|
||||
@Param("from") LocalDateTime from,
|
||||
@Param("to") LocalDateTime to,
|
||||
@Param("skip") long skip,
|
||||
@Param("limit") int limit);
|
||||
}
|
||||
@@ -0,0 +1,149 @@
|
||||
package com.datamate.knowledgegraph.infrastructure.cache;
|
||||
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Qualifier;
|
||||
import org.springframework.cache.Cache;
|
||||
import org.springframework.cache.CacheManager;
|
||||
import org.springframework.data.redis.core.StringRedisTemplate;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
* 图谱缓存管理服务。
|
||||
* <p>
|
||||
* 提供缓存失效操作,在写操作(增删改)后由 Service 层调用,
|
||||
* 确保缓存与数据库的最终一致性。
|
||||
* <p>
|
||||
* 当 {@link StringRedisTemplate} 可用时,使用按 graphId 前缀的细粒度失效,
|
||||
* 避免跨图谱缓存刷新;否则退化为清空整个缓存区域。
|
||||
*/
|
||||
@Service
|
||||
@Slf4j
|
||||
public class GraphCacheService {
|
||||
|
||||
private static final String KEY_PREFIX = "datamate:";
|
||||
|
||||
private final CacheManager cacheManager;
|
||||
private StringRedisTemplate redisTemplate;
|
||||
|
||||
public GraphCacheService(@Qualifier("knowledgeGraphCacheManager") CacheManager cacheManager) {
|
||||
this.cacheManager = cacheManager;
|
||||
}
|
||||
|
||||
@Autowired(required = false)
|
||||
public void setRedisTemplate(StringRedisTemplate redisTemplate) {
|
||||
this.redisTemplate = redisTemplate;
|
||||
}
|
||||
|
||||
/**
|
||||
* 失效指定图谱的全部缓存。
|
||||
* <p>
|
||||
* 在 sync、批量操作后调用,确保缓存一致性。
|
||||
* 当 Redis 可用时仅失效该 graphId 的缓存条目,避免影响其他图谱。
|
||||
*/
|
||||
public void evictGraphCaches(String graphId) {
|
||||
log.debug("Evicting all caches for graph_id={}", graphId);
|
||||
evictByGraphPrefix(RedisCacheConfig.CACHE_ENTITIES, graphId);
|
||||
evictByGraphPrefix(RedisCacheConfig.CACHE_QUERIES, graphId);
|
||||
evictByGraphPrefix(RedisCacheConfig.CACHE_SEARCH, graphId);
|
||||
}
|
||||
|
||||
/**
|
||||
* 失效指定实体相关的缓存。
|
||||
* <p>
|
||||
* 在单实体增删改后调用。精确失效该实体缓存和 list 缓存,
|
||||
* 并清除该图谱的查询缓存(因邻居关系可能变化)。
|
||||
*/
|
||||
public void evictEntityCaches(String graphId, String entityId) {
|
||||
log.debug("Evicting entity caches: graph_id={}, entity_id={}", graphId, entityId);
|
||||
// 精确失效具体实体和 list 缓存
|
||||
evictKey(RedisCacheConfig.CACHE_ENTITIES, cacheKey(graphId, entityId));
|
||||
evictKey(RedisCacheConfig.CACHE_ENTITIES, cacheKey(graphId, "list"));
|
||||
// 按 graphId 前缀失效查询缓存
|
||||
evictByGraphPrefix(RedisCacheConfig.CACHE_QUERIES, graphId);
|
||||
}
|
||||
|
||||
/**
|
||||
* 失效指定图谱的搜索缓存。
|
||||
* <p>
|
||||
* 在实体名称/描述变更后调用。
|
||||
*/
|
||||
public void evictSearchCaches(String graphId) {
|
||||
log.debug("Evicting search caches for graph_id={}", graphId);
|
||||
evictByGraphPrefix(RedisCacheConfig.CACHE_SEARCH, graphId);
|
||||
}
|
||||
|
||||
/**
|
||||
* 失效所有搜索缓存(无 graphId 上下文时使用)。
|
||||
*/
|
||||
public void evictSearchCaches() {
|
||||
log.debug("Evicting all search caches");
|
||||
evictCache(RedisCacheConfig.CACHE_SEARCH);
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// 内部方法
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* 按 graphId 前缀失效缓存条目。
|
||||
* <p>
|
||||
* 所有缓存 key 均以 {@code graphId:} 开头,因此可通过前缀模式匹配。
|
||||
* 当 Redis 不可用时退化为清空整个缓存区域。
|
||||
*/
|
||||
private void evictByGraphPrefix(String cacheName, String graphId) {
|
||||
if (redisTemplate != null) {
|
||||
try {
|
||||
String pattern = KEY_PREFIX + cacheName + "::" + graphId + ":*";
|
||||
Set<String> keys = redisTemplate.keys(pattern);
|
||||
if (keys != null && !keys.isEmpty()) {
|
||||
redisTemplate.delete(keys);
|
||||
log.debug("Evicted {} keys for graph_id={} in cache={}", keys.size(), graphId, cacheName);
|
||||
}
|
||||
return;
|
||||
} catch (Exception e) {
|
||||
log.warn("Failed to evict by graph prefix, falling back to full cache clear: {}", e.getMessage());
|
||||
}
|
||||
}
|
||||
// 降级:清空整个缓存区域
|
||||
evictCache(cacheName);
|
||||
}
|
||||
|
||||
/**
|
||||
* 精确失效单个缓存条目。
|
||||
*/
|
||||
private void evictKey(String cacheName, String key) {
|
||||
Cache cache = cacheManager.getCache(cacheName);
|
||||
if (cache != null) {
|
||||
cache.evict(key);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 清空整个缓存区域。
|
||||
*/
|
||||
private void evictCache(String cacheName) {
|
||||
Cache cache = cacheManager.getCache(cacheName);
|
||||
if (cache != null) {
|
||||
cache.clear();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 生成缓存 key。
|
||||
* <p>
|
||||
* 将多个参数拼接为冒号分隔的字符串 key,用于 {@code @Cacheable} 的 key 表达式。
|
||||
* <b>约定</b>:graphId 必须作为第一个参数,以支持按 graphId 前缀失效。
|
||||
*/
|
||||
public static String cacheKey(Object... parts) {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
for (int i = 0; i < parts.length; i++) {
|
||||
if (i > 0) sb.append(':');
|
||||
sb.append(Objects.toString(parts[i], "null"));
|
||||
}
|
||||
return sb.toString();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,83 @@
|
||||
package com.datamate.knowledgegraph.infrastructure.cache;
|
||||
|
||||
import com.datamate.knowledgegraph.infrastructure.neo4j.KnowledgeGraphProperties;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||
import org.springframework.cache.CacheManager;
|
||||
import org.springframework.cache.annotation.EnableCaching;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.context.annotation.Primary;
|
||||
import org.springframework.data.redis.cache.RedisCacheConfiguration;
|
||||
import org.springframework.data.redis.cache.RedisCacheManager;
|
||||
import org.springframework.data.redis.connection.RedisConnectionFactory;
|
||||
import org.springframework.data.redis.serializer.GenericJackson2JsonRedisSerializer;
|
||||
import org.springframework.data.redis.serializer.RedisSerializationContext;
|
||||
import org.springframework.data.redis.serializer.StringRedisSerializer;
|
||||
|
||||
import java.time.Duration;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* Redis 缓存配置。
|
||||
* <p>
|
||||
* 当 {@code datamate.knowledge-graph.cache.enabled=true} 时激活,
|
||||
* 为不同缓存区域配置独立的 TTL。
|
||||
*/
|
||||
@Slf4j
|
||||
@Configuration
|
||||
@EnableCaching
|
||||
@ConditionalOnProperty(
|
||||
prefix = "datamate.knowledge-graph.cache",
|
||||
name = "enabled",
|
||||
havingValue = "true",
|
||||
matchIfMissing = true
|
||||
)
|
||||
public class RedisCacheConfig {
|
||||
|
||||
/** 实体缓存:单实体查询、实体列表 */
|
||||
public static final String CACHE_ENTITIES = "kg:entities";
|
||||
|
||||
/** 查询缓存:邻居图、子图、路径查询 */
|
||||
public static final String CACHE_QUERIES = "kg:queries";
|
||||
|
||||
/** 搜索缓存:全文搜索结果 */
|
||||
public static final String CACHE_SEARCH = "kg:search";
|
||||
|
||||
@Primary
|
||||
@Bean("knowledgeGraphCacheManager")
|
||||
public CacheManager knowledgeGraphCacheManager(
|
||||
RedisConnectionFactory connectionFactory,
|
||||
KnowledgeGraphProperties properties
|
||||
) {
|
||||
KnowledgeGraphProperties.Cache cacheProps = properties.getCache();
|
||||
|
||||
// JSON 序列化,确保缓存数据可读且兼容版本变更
|
||||
var jsonSerializer = new GenericJackson2JsonRedisSerializer();
|
||||
var serializationPair = RedisSerializationContext.SerializationPair.fromSerializer(jsonSerializer);
|
||||
|
||||
RedisCacheConfiguration defaultConfig = RedisCacheConfiguration.defaultCacheConfig()
|
||||
.serializeKeysWith(RedisSerializationContext.SerializationPair.fromSerializer(new StringRedisSerializer()))
|
||||
.serializeValuesWith(serializationPair)
|
||||
.disableCachingNullValues()
|
||||
.prefixCacheNameWith("datamate:");
|
||||
|
||||
// 各缓存区域独立 TTL
|
||||
Map<String, RedisCacheConfiguration> cacheConfigs = Map.of(
|
||||
CACHE_ENTITIES, defaultConfig.entryTtl(Duration.ofSeconds(cacheProps.getEntityTtlSeconds())),
|
||||
CACHE_QUERIES, defaultConfig.entryTtl(Duration.ofSeconds(cacheProps.getQueryTtlSeconds())),
|
||||
CACHE_SEARCH, defaultConfig.entryTtl(Duration.ofSeconds(cacheProps.getSearchTtlSeconds()))
|
||||
);
|
||||
|
||||
log.info("Redis cache enabled: entity TTL={}s, query TTL={}s, search TTL={}s",
|
||||
cacheProps.getEntityTtlSeconds(),
|
||||
cacheProps.getQueryTtlSeconds(),
|
||||
cacheProps.getSearchTtlSeconds());
|
||||
|
||||
return RedisCacheManager.builder(connectionFactory)
|
||||
.cacheDefaults(defaultConfig.entryTtl(Duration.ofSeconds(cacheProps.getQueryTtlSeconds())))
|
||||
.withInitialCacheConfigurations(cacheConfigs)
|
||||
.transactionAware()
|
||||
.build();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,503 @@
|
||||
package com.datamate.knowledgegraph.infrastructure.client;
|
||||
|
||||
import com.datamate.knowledgegraph.infrastructure.neo4j.KnowledgeGraphProperties;
|
||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
||||
import lombok.Data;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.springframework.beans.factory.annotation.Qualifier;
|
||||
import org.springframework.core.ParameterizedTypeReference;
|
||||
import org.springframework.http.HttpMethod;
|
||||
import org.springframework.http.ResponseEntity;
|
||||
import org.springframework.stereotype.Component;
|
||||
import org.springframework.web.client.RestClientException;
|
||||
import org.springframework.web.client.RestTemplate;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
import java.time.format.DateTimeFormatter;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.function.Function;
|
||||
import java.net.URLEncoder;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
|
||||
/**
|
||||
* 数据管理服务 REST 客户端。
|
||||
* <p>
|
||||
* 通过 HTTP 调用 data-management-service 的 REST API,
|
||||
* 拉取数据集、文件等元数据用于同步到 Neo4j。
|
||||
*/
|
||||
@Component
|
||||
@Slf4j
|
||||
public class DataManagementClient {
|
||||
|
||||
private static final String UPDATED_FROM_PARAM = "updatedFrom";
|
||||
private static final String UPDATED_TO_PARAM = "updatedTo";
|
||||
private static final DateTimeFormatter DATETIME_QUERY_FORMATTER = DateTimeFormatter.ISO_LOCAL_DATE_TIME;
|
||||
|
||||
private final RestTemplate restTemplate;
|
||||
private final String baseUrl;
|
||||
private final String annotationBaseUrl;
|
||||
private final int pageSize;
|
||||
|
||||
public DataManagementClient(
|
||||
@Qualifier("kgRestTemplate") RestTemplate restTemplate,
|
||||
KnowledgeGraphProperties properties) {
|
||||
this.restTemplate = restTemplate;
|
||||
this.baseUrl = properties.getSync().getDataManagementUrl();
|
||||
this.annotationBaseUrl = properties.getSync().getAnnotationServiceUrl();
|
||||
this.pageSize = properties.getSync().getPageSize();
|
||||
}
|
||||
|
||||
/**
|
||||
* 拉取所有数据集(自动分页)。
|
||||
*/
|
||||
public List<DatasetDTO> listAllDatasets() {
|
||||
return listAllDatasets(null, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* 拉取所有数据集(自动分页)并按更新时间窗口过滤。
|
||||
* <p>
|
||||
* 时间窗口参数会透传给上游服务;同时在本地再过滤一次,
|
||||
* 以兼容上游暂未支持该查询参数的场景。
|
||||
*/
|
||||
public List<DatasetDTO> listAllDatasets(LocalDateTime updatedFrom, LocalDateTime updatedTo) {
|
||||
Map<String, String> timeWindowQuery = buildTimeWindowQuery(updatedFrom, updatedTo);
|
||||
List<DatasetDTO> datasets = fetchAllPaged(
|
||||
baseUrl + "/data-management/datasets",
|
||||
new ParameterizedTypeReference<PagedResult<DatasetDTO>>() {},
|
||||
"datasets",
|
||||
timeWindowQuery);
|
||||
return filterByUpdatedAt(datasets, DatasetDTO::getUpdatedAt, updatedFrom, updatedTo);
|
||||
}
|
||||
|
||||
/**
|
||||
* 拉取所有工作流(自动分页)。
|
||||
*/
|
||||
public List<WorkflowDTO> listAllWorkflows() {
|
||||
return listAllWorkflows(null, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* 拉取所有工作流(自动分页)并按更新时间窗口过滤。
|
||||
*/
|
||||
public List<WorkflowDTO> listAllWorkflows(LocalDateTime updatedFrom, LocalDateTime updatedTo) {
|
||||
Map<String, String> timeWindowQuery = buildTimeWindowQuery(updatedFrom, updatedTo);
|
||||
List<WorkflowDTO> workflows = fetchAllPaged(
|
||||
baseUrl + "/data-management/workflows",
|
||||
new ParameterizedTypeReference<PagedResult<WorkflowDTO>>() {},
|
||||
"workflows",
|
||||
timeWindowQuery);
|
||||
return filterByUpdatedAt(workflows, WorkflowDTO::getUpdatedAt, updatedFrom, updatedTo);
|
||||
}
|
||||
|
||||
/**
|
||||
* 拉取所有作业(自动分页)。
|
||||
*/
|
||||
public List<JobDTO> listAllJobs() {
|
||||
return listAllJobs(null, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* 拉取所有作业(自动分页)并按更新时间窗口过滤。
|
||||
*/
|
||||
public List<JobDTO> listAllJobs(LocalDateTime updatedFrom, LocalDateTime updatedTo) {
|
||||
Map<String, String> timeWindowQuery = buildTimeWindowQuery(updatedFrom, updatedTo);
|
||||
List<JobDTO> jobs = fetchAllPaged(
|
||||
baseUrl + "/data-management/jobs",
|
||||
new ParameterizedTypeReference<PagedResult<JobDTO>>() {},
|
||||
"jobs",
|
||||
timeWindowQuery);
|
||||
return filterByUpdatedAt(jobs, JobDTO::getUpdatedAt, updatedFrom, updatedTo);
|
||||
}
|
||||
|
||||
/**
|
||||
* 拉取所有标注任务(自动分页,从标注服务)。
|
||||
*/
|
||||
public List<LabelTaskDTO> listAllLabelTasks() {
|
||||
return listAllLabelTasks(null, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* 拉取所有标注任务(自动分页,从标注服务)并按更新时间窗口过滤。
|
||||
*/
|
||||
public List<LabelTaskDTO> listAllLabelTasks(LocalDateTime updatedFrom, LocalDateTime updatedTo) {
|
||||
Map<String, String> timeWindowQuery = buildTimeWindowQuery(updatedFrom, updatedTo);
|
||||
List<LabelTaskDTO> tasks = fetchAllPaged(
|
||||
annotationBaseUrl + "/annotation/label-tasks",
|
||||
new ParameterizedTypeReference<PagedResult<LabelTaskDTO>>() {},
|
||||
"label-tasks",
|
||||
timeWindowQuery);
|
||||
return filterByUpdatedAt(tasks, LabelTaskDTO::getUpdatedAt, updatedFrom, updatedTo);
|
||||
}
|
||||
|
||||
/**
|
||||
* 拉取所有知识集(自动分页)。
|
||||
*/
|
||||
public List<KnowledgeSetDTO> listAllKnowledgeSets() {
|
||||
return listAllKnowledgeSets(null, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* 拉取所有知识集(自动分页)并按更新时间窗口过滤。
|
||||
*/
|
||||
public List<KnowledgeSetDTO> listAllKnowledgeSets(LocalDateTime updatedFrom, LocalDateTime updatedTo) {
|
||||
Map<String, String> timeWindowQuery = buildTimeWindowQuery(updatedFrom, updatedTo);
|
||||
List<KnowledgeSetDTO> sets = fetchAllPaged(
|
||||
baseUrl + "/data-management/knowledge-sets",
|
||||
new ParameterizedTypeReference<PagedResult<KnowledgeSetDTO>>() {},
|
||||
"knowledge-sets",
|
||||
timeWindowQuery);
|
||||
return filterByUpdatedAt(sets, KnowledgeSetDTO::getUpdatedAt, updatedFrom, updatedTo);
|
||||
}
|
||||
|
||||
/**
|
||||
* 拉取所有数据集(自动分页)。
|
||||
*/
|
||||
public List<DatasetDTO> listAllDatasetsLegacy() {
|
||||
return fetchAllPaged(
|
||||
baseUrl + "/data-management/datasets",
|
||||
new ParameterizedTypeReference<PagedResult<DatasetDTO>>() {},
|
||||
"datasets");
|
||||
}
|
||||
|
||||
/**
|
||||
* 拉取所有工作流(自动分页)。
|
||||
*/
|
||||
public List<WorkflowDTO> listAllWorkflowsLegacy() {
|
||||
return fetchAllPaged(
|
||||
baseUrl + "/data-management/workflows",
|
||||
new ParameterizedTypeReference<PagedResult<WorkflowDTO>>() {},
|
||||
"workflows");
|
||||
}
|
||||
|
||||
/**
|
||||
* 拉取所有作业(自动分页)。
|
||||
*/
|
||||
public List<JobDTO> listAllJobsLegacy() {
|
||||
return fetchAllPaged(
|
||||
baseUrl + "/data-management/jobs",
|
||||
new ParameterizedTypeReference<PagedResult<JobDTO>>() {},
|
||||
"jobs");
|
||||
}
|
||||
|
||||
/**
|
||||
* 拉取所有标注任务(自动分页,从标注服务)。
|
||||
*/
|
||||
public List<LabelTaskDTO> listAllLabelTasksLegacy() {
|
||||
return fetchAllPaged(
|
||||
annotationBaseUrl + "/annotation/label-tasks",
|
||||
new ParameterizedTypeReference<PagedResult<LabelTaskDTO>>() {},
|
||||
"label-tasks");
|
||||
}
|
||||
|
||||
/**
|
||||
* 拉取所有知识集(自动分页)。
|
||||
*/
|
||||
public List<KnowledgeSetDTO> listAllKnowledgeSetsLegacy() {
|
||||
return fetchAllPaged(
|
||||
baseUrl + "/data-management/knowledge-sets",
|
||||
new ParameterizedTypeReference<PagedResult<KnowledgeSetDTO>>() {},
|
||||
"knowledge-sets");
|
||||
}
|
||||
|
||||
/**
|
||||
* 拉取所有用户的组织映射。
|
||||
*/
|
||||
public Map<String, String> fetchUserOrganizationMap() {
|
||||
String url = baseUrl + "/auth/users/organizations";
|
||||
log.debug("Fetching user-organization mappings from: {}", url);
|
||||
try {
|
||||
ResponseEntity<List<UserOrgDTO>> response = restTemplate.exchange(
|
||||
url, HttpMethod.GET, null,
|
||||
new ParameterizedTypeReference<List<UserOrgDTO>>() {});
|
||||
|
||||
List<UserOrgDTO> body = response.getBody();
|
||||
if (body == null || body.isEmpty()) {
|
||||
log.warn("No user-organization mappings returned from auth service");
|
||||
return Collections.emptyMap();
|
||||
}
|
||||
|
||||
Map<String, String> result = new LinkedHashMap<>();
|
||||
for (UserOrgDTO dto : body) {
|
||||
if (dto.getUsername() != null && !dto.getUsername().isBlank()) {
|
||||
result.put(dto.getUsername(), dto.getOrganization());
|
||||
}
|
||||
}
|
||||
log.info("Fetched {} user-organization mappings", result.size());
|
||||
return result;
|
||||
} catch (RestClientException e) {
|
||||
log.error("Failed to fetch user-organization mappings from: {}", url, e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 通用自动分页拉取方法。
|
||||
*/
|
||||
private <T> List<T> fetchAllPaged(String baseEndpoint,
|
||||
ParameterizedTypeReference<PagedResult<T>> typeRef,
|
||||
String resourceName) {
|
||||
return fetchAllPaged(baseEndpoint, typeRef, resourceName, Collections.emptyMap());
|
||||
}
|
||||
|
||||
/**
|
||||
* 通用自动分页拉取方法(支持附加查询参数)。
|
||||
*/
|
||||
private <T> List<T> fetchAllPaged(String baseEndpoint,
|
||||
ParameterizedTypeReference<PagedResult<T>> typeRef,
|
||||
String resourceName,
|
||||
Map<String, String> extraQueryParams) {
|
||||
List<T> allItems = new ArrayList<>();
|
||||
int page = 0;
|
||||
|
||||
while (true) {
|
||||
String url = buildPagedUrl(baseEndpoint, page, extraQueryParams);
|
||||
log.debug("Fetching {}: page={}, size={}", resourceName, page, pageSize);
|
||||
|
||||
try {
|
||||
ResponseEntity<PagedResult<T>> response = restTemplate.exchange(
|
||||
url, HttpMethod.GET, null, typeRef);
|
||||
|
||||
PagedResult<T> body = response.getBody();
|
||||
if (body == null || body.getContent() == null || body.getContent().isEmpty()) {
|
||||
break;
|
||||
}
|
||||
|
||||
allItems.addAll(body.getContent());
|
||||
log.debug("Fetched {} {} (page {}), total so far: {}",
|
||||
body.getContent().size(), resourceName, page, allItems.size());
|
||||
|
||||
if (page >= body.getTotalPages() - 1) {
|
||||
break;
|
||||
}
|
||||
page++;
|
||||
} catch (RestClientException e) {
|
||||
log.error("Failed to fetch {} : page={}, url={}", resourceName, page, url, e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
log.info("Fetched {} {} in total", allItems.size(), resourceName);
|
||||
return allItems;
|
||||
}
|
||||
|
||||
private String buildPagedUrl(String baseEndpoint, int page, Map<String, String> extraQueryParams) {
|
||||
StringBuilder builder = new StringBuilder(baseEndpoint)
|
||||
.append("?page=").append(page)
|
||||
.append("&size=").append(pageSize);
|
||||
|
||||
if (extraQueryParams != null && !extraQueryParams.isEmpty()) {
|
||||
extraQueryParams.forEach((key, value) -> {
|
||||
if (key == null || key.isBlank() || value == null || value.isBlank()) {
|
||||
return;
|
||||
}
|
||||
builder.append("&")
|
||||
.append(URLEncoder.encode(key, StandardCharsets.UTF_8))
|
||||
.append("=")
|
||||
.append(URLEncoder.encode(value, StandardCharsets.UTF_8));
|
||||
});
|
||||
}
|
||||
return builder.toString();
|
||||
}
|
||||
|
||||
private static Map<String, String> buildTimeWindowQuery(LocalDateTime updatedFrom, LocalDateTime updatedTo) {
|
||||
if (updatedFrom != null && updatedTo != null && updatedFrom.isAfter(updatedTo)) {
|
||||
throw new IllegalArgumentException("updatedFrom must be less than or equal to updatedTo");
|
||||
}
|
||||
|
||||
Map<String, String> query = new LinkedHashMap<>();
|
||||
if (updatedFrom != null) {
|
||||
query.put(UPDATED_FROM_PARAM, DATETIME_QUERY_FORMATTER.format(updatedFrom));
|
||||
}
|
||||
if (updatedTo != null) {
|
||||
query.put(UPDATED_TO_PARAM, DATETIME_QUERY_FORMATTER.format(updatedTo));
|
||||
}
|
||||
return query;
|
||||
}
|
||||
|
||||
private static <T> List<T> filterByUpdatedAt(
|
||||
List<T> items,
|
||||
Function<T, LocalDateTime> updatedAtGetter,
|
||||
LocalDateTime updatedFrom,
|
||||
LocalDateTime updatedTo) {
|
||||
if ((updatedFrom == null && updatedTo == null) || items == null || items.isEmpty()) {
|
||||
return items;
|
||||
}
|
||||
|
||||
return items.stream()
|
||||
.filter(item -> {
|
||||
if (item == null) {
|
||||
return false;
|
||||
}
|
||||
LocalDateTime updatedAt = updatedAtGetter.apply(item);
|
||||
if (updatedAt == null) {
|
||||
return false;
|
||||
}
|
||||
if (updatedFrom != null && updatedAt.isBefore(updatedFrom)) {
|
||||
return false;
|
||||
}
|
||||
return updatedTo == null || !updatedAt.isAfter(updatedTo);
|
||||
})
|
||||
.toList();
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// 响应 DTO(仅包含同步所需字段)
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Data
|
||||
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||
public static class PagedResult<T> {
|
||||
private List<T> content;
|
||||
private long page;
|
||||
private long totalElements;
|
||||
private long totalPages;
|
||||
}
|
||||
|
||||
/**
|
||||
* 与 data-management-service 的 DatasetResponse 对齐。
|
||||
*/
|
||||
@Data
|
||||
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||
public static class DatasetDTO {
|
||||
private String id;
|
||||
private String name;
|
||||
private String description;
|
||||
private String parentDatasetId;
|
||||
private String datasetType;
|
||||
private String status;
|
||||
private Long totalSize;
|
||||
private Integer fileCount;
|
||||
private String createdBy;
|
||||
private String updatedBy;
|
||||
private LocalDateTime createdAt;
|
||||
private LocalDateTime updatedAt;
|
||||
private List<TagDTO> tags;
|
||||
}
|
||||
|
||||
/**
|
||||
* 与 data-management-service 的 TagResponse 对齐。
|
||||
*/
|
||||
@Data
|
||||
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||
public static class TagDTO {
|
||||
private String id;
|
||||
private String name;
|
||||
private String color;
|
||||
private String description;
|
||||
}
|
||||
|
||||
/**
|
||||
* 与 data-management-service / data-cleaning-service 的 Workflow 对齐。
|
||||
*/
|
||||
@Data
|
||||
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||
public static class WorkflowDTO {
|
||||
private String id;
|
||||
private String name;
|
||||
private String description;
|
||||
private String workflowType;
|
||||
private String status;
|
||||
private String version;
|
||||
private Integer operatorCount;
|
||||
private String schedule;
|
||||
private String createdBy;
|
||||
private String updatedBy;
|
||||
private LocalDateTime createdAt;
|
||||
private LocalDateTime updatedAt;
|
||||
/** 工作流使用的输入数据集 ID 列表 */
|
||||
private List<String> inputDatasetIds;
|
||||
}
|
||||
|
||||
/**
|
||||
* 与 data-management-service 的 Job / CleaningTask / DataSynthInstance 等对齐。
|
||||
*/
|
||||
@Data
|
||||
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||
public static class JobDTO {
|
||||
private String id;
|
||||
private String name;
|
||||
private String description;
|
||||
private String jobType;
|
||||
private String status;
|
||||
private String startedAt;
|
||||
private String completedAt;
|
||||
private Long durationSeconds;
|
||||
private Long inputCount;
|
||||
private Long outputCount;
|
||||
private String errorMessage;
|
||||
private String createdBy;
|
||||
private String updatedBy;
|
||||
private LocalDateTime createdAt;
|
||||
private LocalDateTime updatedAt;
|
||||
/** 输入数据集 ID */
|
||||
private String inputDatasetId;
|
||||
/** 输出数据集 ID */
|
||||
private String outputDatasetId;
|
||||
/** 所属工作流 ID(TRIGGERS 关系) */
|
||||
private String workflowId;
|
||||
/** 依赖的作业 ID(DEPENDS_ON 关系) */
|
||||
private String dependsOnJobId;
|
||||
}
|
||||
|
||||
/**
|
||||
* 与 data-annotation-service 的 LabelingProject / AutoAnnotationTask 对齐。
|
||||
*/
|
||||
@Data
|
||||
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||
public static class LabelTaskDTO {
|
||||
private String id;
|
||||
private String name;
|
||||
private String description;
|
||||
private String taskMode;
|
||||
private String dataType;
|
||||
private String labelingType;
|
||||
private String status;
|
||||
private Double progress;
|
||||
private String templateName;
|
||||
private String createdBy;
|
||||
private String updatedBy;
|
||||
private LocalDateTime createdAt;
|
||||
private LocalDateTime updatedAt;
|
||||
/** 标注使用的数据集 ID(USES_DATASET 关系) */
|
||||
private String datasetId;
|
||||
}
|
||||
|
||||
/**
|
||||
* 与 data-management-service 的 KnowledgeSet 对齐。
|
||||
*/
|
||||
@Data
|
||||
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||
public static class KnowledgeSetDTO {
|
||||
private String id;
|
||||
private String name;
|
||||
private String description;
|
||||
private String status;
|
||||
private String domain;
|
||||
private String businessLine;
|
||||
private String sensitivity;
|
||||
private Integer itemCount;
|
||||
private String validFrom;
|
||||
private String validTo;
|
||||
private String createdBy;
|
||||
private String updatedBy;
|
||||
private LocalDateTime createdAt;
|
||||
private LocalDateTime updatedAt;
|
||||
/** 来源数据集 ID 列表(SOURCED_FROM 关系) */
|
||||
private List<String> sourceDatasetIds;
|
||||
}
|
||||
|
||||
/**
|
||||
* 用户-组织映射 DTO(与 AuthController.listUserOrganizations 对齐)。
|
||||
*/
|
||||
@Data
|
||||
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||
public static class UserOrgDTO {
|
||||
private String username;
|
||||
private String organization;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,36 @@
|
||||
package com.datamate.knowledgegraph.infrastructure.exception;
|
||||
|
||||
import com.datamate.common.infrastructure.exception.ErrorCode;
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Getter;
|
||||
|
||||
/**
|
||||
* 知识图谱模块错误码
|
||||
*/
|
||||
@Getter
|
||||
@AllArgsConstructor
|
||||
public enum KnowledgeGraphErrorCode implements ErrorCode {
|
||||
|
||||
ENTITY_NOT_FOUND("knowledge_graph.0001", "实体不存在"),
|
||||
RELATION_NOT_FOUND("knowledge_graph.0002", "关系不存在"),
|
||||
GRAPH_NOT_FOUND("knowledge_graph.0003", "图谱不存在"),
|
||||
DUPLICATE_ENTITY("knowledge_graph.0004", "实体已存在"),
|
||||
INVALID_RELATION("knowledge_graph.0005", "无效的关系定义"),
|
||||
IMPORT_FAILED("knowledge_graph.0006", "图谱导入失败"),
|
||||
QUERY_DEPTH_EXCEEDED("knowledge_graph.0007", "查询深度超出限制"),
|
||||
MAX_NODES_EXCEEDED("knowledge_graph.0008", "查询结果节点数超出限制"),
|
||||
SYNC_FAILED("knowledge_graph.0009", "数据同步失败"),
|
||||
EMPTY_SNAPSHOT_PURGE_BLOCKED("knowledge_graph.0010", "空快照保护:上游返回空列表,已阻止 purge 操作"),
|
||||
SCHEMA_INIT_FAILED("knowledge_graph.0011", "图谱 Schema 初始化失败"),
|
||||
INSECURE_DEFAULT_CREDENTIALS("knowledge_graph.0012", "检测到默认凭据,生产环境禁止使用默认密码"),
|
||||
UNAUTHORIZED_INTERNAL_CALL("knowledge_graph.0013", "内部调用未授权:X-Internal-Token 校验失败"),
|
||||
QUERY_TIMEOUT("knowledge_graph.0014", "图查询超时,请缩小搜索范围或减少深度"),
|
||||
SCHEMA_MIGRATION_FAILED("knowledge_graph.0015", "Schema 迁移执行失败"),
|
||||
SCHEMA_CHECKSUM_MISMATCH("knowledge_graph.0016", "Schema 迁移 checksum 不匹配:已应用的迁移被修改"),
|
||||
SCHEMA_MIGRATION_LOCKED("knowledge_graph.0017", "Schema 迁移锁被占用,其他实例正在执行迁移"),
|
||||
REVIEW_NOT_FOUND("knowledge_graph.0018", "审核记录不存在"),
|
||||
REVIEW_ALREADY_PROCESSED("knowledge_graph.0019", "审核记录已处理");
|
||||
|
||||
private final String code;
|
||||
private final String message;
|
||||
}
|
||||
@@ -0,0 +1,63 @@
|
||||
package com.datamate.knowledgegraph.infrastructure.neo4j;
|
||||
|
||||
import com.datamate.knowledgegraph.infrastructure.neo4j.migration.SchemaMigrationService;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.boot.ApplicationArguments;
|
||||
import org.springframework.boot.ApplicationRunner;
|
||||
import org.springframework.core.annotation.Order;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
import java.util.Set;
|
||||
import java.util.UUID;
|
||||
|
||||
/**
|
||||
* 图谱 Schema 初始化器。
|
||||
* <p>
|
||||
* 应用启动时通过 {@link SchemaMigrationService} 执行版本化 Schema 迁移。
|
||||
* <p>
|
||||
* <b>安全自检</b>:在非开发环境中,检测到默认 Neo4j 密码时拒绝启动。
|
||||
*/
|
||||
@Component
|
||||
@Slf4j
|
||||
@RequiredArgsConstructor
|
||||
@Order(1)
|
||||
public class GraphInitializer implements ApplicationRunner {
|
||||
|
||||
/** 已知的弱默认密码,启动时拒绝。 */
|
||||
private static final Set<String> BLOCKED_DEFAULT_PASSWORDS = Set.of(
|
||||
"datamate123", "neo4j", "password", "123456", "admin"
|
||||
);
|
||||
|
||||
private final KnowledgeGraphProperties properties;
|
||||
private final SchemaMigrationService schemaMigrationService;
|
||||
|
||||
@Value("${spring.neo4j.authentication.password:}")
|
||||
private String neo4jPassword;
|
||||
|
||||
@Value("${spring.profiles.active:default}")
|
||||
private String activeProfile;
|
||||
|
||||
@Override
|
||||
public void run(ApplicationArguments args) {
|
||||
// ── 安全自检:默认凭据检测(已禁用) ──
|
||||
// validateCredentials();
|
||||
|
||||
if (!properties.getSync().isAutoInitSchema()) {
|
||||
log.info("Schema auto-init is disabled, skipping");
|
||||
return;
|
||||
}
|
||||
|
||||
schemaMigrationService.migrate(UUID.randomUUID().toString());
|
||||
}
|
||||
|
||||
/**
|
||||
* 检测是否使用了默认凭据。
|
||||
* <p>
|
||||
* <b>注意:密码安全检查已禁用。</b>
|
||||
*/
|
||||
private void validateCredentials() {
|
||||
// 密码安全检查已禁用,开发环境跳过
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,117 @@
|
||||
package com.datamate.knowledgegraph.infrastructure.neo4j;
|
||||
|
||||
import jakarta.validation.constraints.Min;
|
||||
import lombok.Data;
|
||||
import org.springframework.boot.context.properties.ConfigurationProperties;
|
||||
import org.springframework.stereotype.Component;
|
||||
import org.springframework.validation.annotation.Validated;
|
||||
|
||||
@Data
|
||||
@Component
|
||||
@Validated
|
||||
@ConfigurationProperties(prefix = "datamate.knowledge-graph")
|
||||
public class KnowledgeGraphProperties {
|
||||
|
||||
/** 默认查询跳数限制 */
|
||||
private int maxDepth = 3;
|
||||
|
||||
/** 子图返回最大节点数 */
|
||||
private int maxNodesPerQuery = 500;
|
||||
|
||||
/** 复杂图查询超时(秒),防止路径枚举等高开销查询失控 */
|
||||
@Min(value = 1, message = "queryTimeoutSeconds 必须 >= 1")
|
||||
private int queryTimeoutSeconds = 10;
|
||||
|
||||
/** 批量导入批次大小(必须 >= 1,否则取模运算会抛异常) */
|
||||
@Min(value = 1, message = "importBatchSize 必须 >= 1")
|
||||
private int importBatchSize = 100;
|
||||
|
||||
/** 同步相关配置 */
|
||||
private Sync sync = new Sync();
|
||||
|
||||
/** 安全相关配置 */
|
||||
private Security security = new Security();
|
||||
|
||||
/** Schema 迁移配置 */
|
||||
private Migration migration = new Migration();
|
||||
|
||||
/** 缓存配置 */
|
||||
private Cache cache = new Cache();
|
||||
|
||||
@Data
|
||||
public static class Security {
|
||||
|
||||
/** 内部服务调用 Token,用于校验 sync 端点的 X-Internal-Token 请求头 */
|
||||
private String internalToken;
|
||||
|
||||
/**
|
||||
* 是否跳过内部 Token 校验(默认 false,即 fail-closed)。
|
||||
* <p>
|
||||
* 仅允许在 dev/test 环境显式设置为 true 以跳过校验。
|
||||
* 生产环境必须保持 false 并配置 {@code internal-token}。
|
||||
*/
|
||||
private boolean skipTokenCheck = false;
|
||||
}
|
||||
|
||||
@Data
|
||||
public static class Sync {
|
||||
|
||||
/** 数据管理服务基础 URL */
|
||||
private String dataManagementUrl = "http://localhost:8080/api";
|
||||
|
||||
/** 标注服务基础 URL */
|
||||
private String annotationServiceUrl = "http://localhost:8080/api";
|
||||
|
||||
/** 同步每页拉取数量 */
|
||||
private int pageSize = 200;
|
||||
|
||||
/** HTTP 连接超时(毫秒) */
|
||||
private int connectTimeout = 5000;
|
||||
|
||||
/** HTTP 读取超时(毫秒) */
|
||||
private int readTimeout = 30000;
|
||||
|
||||
/** 失败时最大重试次数 */
|
||||
private int maxRetries = 3;
|
||||
|
||||
/** 重试间隔(毫秒) */
|
||||
private long retryInterval = 1000;
|
||||
|
||||
/** 是否在启动时自动初始化 Schema */
|
||||
private boolean autoInitSchema = true;
|
||||
|
||||
/**
|
||||
* 是否允许空快照触发 purge(默认 false)。
|
||||
* <p>
|
||||
* 当上游返回空列表时,如果该开关为 false,purge 将被跳过以防误删全部同步实体。
|
||||
* 仅在确认数据源确实为空时才应开启此开关。
|
||||
*/
|
||||
private boolean allowPurgeOnEmptySnapshot = false;
|
||||
}
|
||||
|
||||
@Data
|
||||
public static class Migration {
|
||||
|
||||
/** 是否启用 Schema 版本化迁移 */
|
||||
private boolean enabled = true;
|
||||
|
||||
/** 是否校验已应用迁移的 checksum(防止迁移被篡改) */
|
||||
private boolean validateChecksums = true;
|
||||
}
|
||||
|
||||
@Data
|
||||
public static class Cache {
|
||||
|
||||
/** 是否启用缓存 */
|
||||
private boolean enabled = true;
|
||||
|
||||
/** 实体缓存 TTL(秒) */
|
||||
private long entityTtlSeconds = 3600;
|
||||
|
||||
/** 查询结果缓存 TTL(秒) */
|
||||
private long queryTtlSeconds = 300;
|
||||
|
||||
/** 全文搜索结果缓存 TTL(秒) */
|
||||
private long searchTtlSeconds = 180;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,20 @@
|
||||
package com.datamate.knowledgegraph.infrastructure.neo4j.migration;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Schema 迁移接口。
|
||||
* <p>
|
||||
* 每个实现类代表一个版本化的 Schema 变更,版本号单调递增。
|
||||
*/
|
||||
public interface SchemaMigration {
|
||||
|
||||
/** 单调递增版本号 (1, 2, 3...) */
|
||||
int getVersion();
|
||||
|
||||
/** 人类可读描述 */
|
||||
String getDescription();
|
||||
|
||||
/** Cypher DDL 语句列表 */
|
||||
List<String> getStatements();
|
||||
}
|
||||
@@ -0,0 +1,42 @@
|
||||
package com.datamate.knowledgegraph.infrastructure.neo4j.migration;
|
||||
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Builder;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
/**
|
||||
* 迁移记录数据类,映射 {@code _SchemaMigration} 节点。
|
||||
* <p>
|
||||
* 纯 POJO,不使用 SDN {@code @Node} 注解。
|
||||
*/
|
||||
@Data
|
||||
@Builder
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class SchemaMigrationRecord {
|
||||
|
||||
/** 迁移版本号 */
|
||||
private int version;
|
||||
|
||||
/** 迁移描述 */
|
||||
private String description;
|
||||
|
||||
/** 迁移语句的 SHA-256 校验和 */
|
||||
private String checksum;
|
||||
|
||||
/** 迁移应用时间(ISO-8601) */
|
||||
private String appliedAt;
|
||||
|
||||
/** 迁移执行耗时(毫秒) */
|
||||
private long executionTimeMs;
|
||||
|
||||
/** 迁移是否成功 */
|
||||
private boolean success;
|
||||
|
||||
/** 迁移语句数量 */
|
||||
private int statementsCount;
|
||||
|
||||
/** 失败时的错误信息 */
|
||||
private String errorMessage;
|
||||
}
|
||||
@@ -0,0 +1,384 @@
|
||||
package com.datamate.knowledgegraph.infrastructure.neo4j.migration;
|
||||
|
||||
import com.datamate.common.infrastructure.exception.BusinessException;
|
||||
import com.datamate.knowledgegraph.infrastructure.exception.KnowledgeGraphErrorCode;
|
||||
import com.datamate.knowledgegraph.infrastructure.neo4j.KnowledgeGraphProperties;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.springframework.data.neo4j.core.Neo4jClient;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.security.MessageDigest;
|
||||
import java.security.NoSuchAlgorithmException;
|
||||
import java.time.Instant;
|
||||
import java.util.*;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
* Schema 迁移编排器。
|
||||
* <p>
|
||||
* 参考 Flyway 设计思路,为 Neo4j 图数据库提供版本化迁移机制:
|
||||
* <ul>
|
||||
* <li>在数据库中记录已应用的迁移版本({@code _SchemaMigration} 节点)</li>
|
||||
* <li>自动检测并执行新增迁移</li>
|
||||
* <li>通过 checksum 校验防止已应用迁移被篡改</li>
|
||||
* <li>通过分布式锁({@code _SchemaLock} 节点)防止多实例并发迁移</li>
|
||||
* </ul>
|
||||
*/
|
||||
@Component
|
||||
@Slf4j
|
||||
public class SchemaMigrationService {
|
||||
|
||||
/** 分布式锁过期时间(毫秒),5 分钟 */
|
||||
private static final long LOCK_TIMEOUT_MS = 5 * 60 * 1000;
|
||||
|
||||
/** 仅识别「已存在」类错误消息的关键词,其余错误不应吞掉。 */
|
||||
private static final Set<String> ALREADY_EXISTS_KEYWORDS = Set.of(
|
||||
"already exists", "already exist", "EquivalentSchemaRuleAlreadyExists"
|
||||
);
|
||||
|
||||
private final Neo4jClient neo4jClient;
|
||||
private final KnowledgeGraphProperties properties;
|
||||
private final List<SchemaMigration> migrations;
|
||||
|
||||
public SchemaMigrationService(Neo4jClient neo4jClient,
|
||||
KnowledgeGraphProperties properties,
|
||||
List<SchemaMigration> migrations) {
|
||||
this.neo4jClient = neo4jClient;
|
||||
this.properties = properties;
|
||||
this.migrations = migrations.stream()
|
||||
.sorted(Comparator.comparingInt(SchemaMigration::getVersion))
|
||||
.toList();
|
||||
}
|
||||
|
||||
/**
|
||||
* 执行 Schema 迁移主流程。
|
||||
*
|
||||
* @param instanceId 当前实例标识,用于分布式锁
|
||||
*/
|
||||
public void migrate(String instanceId) {
|
||||
if (!properties.getMigration().isEnabled()) {
|
||||
log.info("Schema migration is disabled, skipping");
|
||||
return;
|
||||
}
|
||||
|
||||
log.info("Starting schema migration, instanceId={}", instanceId);
|
||||
|
||||
// 1. Bootstrap — 创建迁移系统自身需要的约束
|
||||
bootstrapMigrationSchema();
|
||||
|
||||
// 2. 获取分布式锁
|
||||
acquireLock(instanceId);
|
||||
|
||||
try {
|
||||
// 3. 加载已应用迁移
|
||||
List<SchemaMigrationRecord> applied = loadAppliedMigrations();
|
||||
|
||||
// 4. 校验 checksum
|
||||
if (properties.getMigration().isValidateChecksums()) {
|
||||
validateChecksums(applied, migrations);
|
||||
}
|
||||
|
||||
// 5. 过滤待执行迁移
|
||||
Set<Integer> appliedVersions = applied.stream()
|
||||
.map(SchemaMigrationRecord::getVersion)
|
||||
.collect(Collectors.toSet());
|
||||
|
||||
List<SchemaMigration> pending = migrations.stream()
|
||||
.filter(m -> !appliedVersions.contains(m.getVersion()))
|
||||
.toList();
|
||||
|
||||
if (pending.isEmpty()) {
|
||||
log.info("Schema is up to date, no pending migrations");
|
||||
return;
|
||||
}
|
||||
|
||||
// 6. 逐个执行
|
||||
executePendingMigrations(pending);
|
||||
|
||||
log.info("Schema migration completed successfully, applied {} migration(s)", pending.size());
|
||||
|
||||
} finally {
|
||||
// 7. 释放锁
|
||||
releaseLock(instanceId);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 创建迁移系统自身需要的约束(解决鸡生蛋问题)。
|
||||
*/
|
||||
void bootstrapMigrationSchema() {
|
||||
log.debug("Bootstrapping migration schema constraints");
|
||||
neo4jClient.query(
|
||||
"CREATE CONSTRAINT schema_migration_version_unique IF NOT EXISTS " +
|
||||
"FOR (n:_SchemaMigration) REQUIRE n.version IS UNIQUE"
|
||||
).run();
|
||||
neo4jClient.query(
|
||||
"CREATE CONSTRAINT schema_lock_name_unique IF NOT EXISTS " +
|
||||
"FOR (n:_SchemaLock) REQUIRE n.name IS UNIQUE"
|
||||
).run();
|
||||
|
||||
// 修复历史遗留节点:为缺失属性补充默认值,避免后续查询产生属性缺失警告
|
||||
neo4jClient.query(
|
||||
"MATCH (m:_SchemaMigration) WHERE m.description IS NULL OR m.checksum IS NULL " +
|
||||
"SET m.description = COALESCE(m.description, ''), " +
|
||||
" m.checksum = COALESCE(m.checksum, ''), " +
|
||||
" m.applied_at = COALESCE(m.applied_at, ''), " +
|
||||
" m.execution_time_ms = COALESCE(m.execution_time_ms, 0), " +
|
||||
" m.statements_count = COALESCE(m.statements_count, 0), " +
|
||||
" m.error_message = COALESCE(m.error_message, '')"
|
||||
).run();
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取分布式锁。
|
||||
* <p>
|
||||
* MERGE {@code _SchemaLock} 节点,如果锁已被其他实例持有且未过期,则抛出异常。
|
||||
* 如果锁已过期(超过 5 分钟),自动接管。
|
||||
* <p>
|
||||
* 时间戳完全使用数据库端 {@code datetime().epochMillis},避免多实例时钟偏差导致锁被误抢占。
|
||||
*/
|
||||
void acquireLock(String instanceId) {
|
||||
log.debug("Acquiring schema migration lock, instanceId={}", instanceId);
|
||||
|
||||
// 使用数据库时间(datetime().epochMillis)避免多实例时钟偏差导致锁被误抢占
|
||||
Optional<Map<String, Object>> result = neo4jClient.query(
|
||||
"MERGE (lock:_SchemaLock {name: 'schema_migration'}) " +
|
||||
"ON CREATE SET lock.locked_by = $instanceId, lock.locked_at = datetime().epochMillis " +
|
||||
"WITH lock, " +
|
||||
" CASE WHEN lock.locked_by = $instanceId THEN true " +
|
||||
" WHEN lock.locked_at < (datetime().epochMillis - $timeoutMs) THEN true " +
|
||||
" ELSE false END AS canAcquire " +
|
||||
"SET lock.locked_by = CASE WHEN canAcquire THEN $instanceId ELSE lock.locked_by END, " +
|
||||
" lock.locked_at = CASE WHEN canAcquire THEN datetime().epochMillis ELSE lock.locked_at END " +
|
||||
"RETURN lock.locked_by AS lockedBy, canAcquire"
|
||||
).bindAll(Map.of("instanceId", instanceId, "timeoutMs", LOCK_TIMEOUT_MS))
|
||||
.fetch().first();
|
||||
|
||||
if (result.isEmpty()) {
|
||||
throw new IllegalStateException("Failed to acquire schema migration lock: unexpected empty result");
|
||||
}
|
||||
|
||||
Boolean canAcquire = (Boolean) result.get().get("canAcquire");
|
||||
if (!Boolean.TRUE.equals(canAcquire)) {
|
||||
String lockedBy = (String) result.get().get("lockedBy");
|
||||
throw BusinessException.of(
|
||||
KnowledgeGraphErrorCode.SCHEMA_MIGRATION_LOCKED,
|
||||
"Schema migration lock is held by instance: " + lockedBy
|
||||
);
|
||||
}
|
||||
|
||||
log.info("Schema migration lock acquired, instanceId={}", instanceId);
|
||||
}
|
||||
|
||||
/**
|
||||
* 释放分布式锁。
|
||||
*/
|
||||
void releaseLock(String instanceId) {
|
||||
try {
|
||||
neo4jClient.query(
|
||||
"MATCH (lock:_SchemaLock {name: 'schema_migration', locked_by: $instanceId}) " +
|
||||
"DELETE lock"
|
||||
).bindAll(Map.of("instanceId", instanceId)).run();
|
||||
log.debug("Schema migration lock released, instanceId={}", instanceId);
|
||||
} catch (Exception e) {
|
||||
log.warn("Failed to release schema migration lock: {}", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 加载已应用的迁移记录。
|
||||
*/
|
||||
List<SchemaMigrationRecord> loadAppliedMigrations() {
|
||||
return neo4jClient.query(
|
||||
"MATCH (m:_SchemaMigration {success: true}) " +
|
||||
"RETURN m.version AS version, " +
|
||||
" COALESCE(m.description, '') AS description, " +
|
||||
" COALESCE(m.checksum, '') AS checksum, " +
|
||||
" COALESCE(m.applied_at, '') AS appliedAt, " +
|
||||
" COALESCE(m.execution_time_ms, 0) AS executionTimeMs, " +
|
||||
" m.success AS success, " +
|
||||
" COALESCE(m.statements_count, 0) AS statementsCount, " +
|
||||
" COALESCE(m.error_message, '') AS errorMessage " +
|
||||
"ORDER BY m.version"
|
||||
).fetch().all().stream()
|
||||
.map(row -> SchemaMigrationRecord.builder()
|
||||
.version(((Number) row.get("version")).intValue())
|
||||
.description((String) row.get("description"))
|
||||
.checksum((String) row.get("checksum"))
|
||||
.appliedAt((String) row.get("appliedAt"))
|
||||
.executionTimeMs(((Number) row.get("executionTimeMs")).longValue())
|
||||
.success(Boolean.TRUE.equals(row.get("success")))
|
||||
.statementsCount(((Number) row.get("statementsCount")).intValue())
|
||||
.errorMessage((String) row.get("errorMessage"))
|
||||
.build())
|
||||
.toList();
|
||||
}
|
||||
|
||||
/**
|
||||
* 校验已应用迁移的 checksum。
|
||||
*/
|
||||
void validateChecksums(List<SchemaMigrationRecord> applied, List<SchemaMigration> registered) {
|
||||
Map<Integer, SchemaMigration> registeredByVersion = registered.stream()
|
||||
.collect(Collectors.toMap(SchemaMigration::getVersion, m -> m));
|
||||
|
||||
for (SchemaMigrationRecord record : applied) {
|
||||
SchemaMigration migration = registeredByVersion.get(record.getVersion());
|
||||
if (migration == null) {
|
||||
continue; // 已应用但代码中不再有该迁移(可能是老版本被删除)
|
||||
}
|
||||
|
||||
// 跳过 checksum 为空的历史遗留记录(属性缺失修复后的节点)
|
||||
if (record.getChecksum() == null || record.getChecksum().isEmpty()) {
|
||||
log.warn("Migration V{} ({}) has no recorded checksum, skipping validation",
|
||||
record.getVersion(), record.getDescription());
|
||||
continue;
|
||||
}
|
||||
|
||||
String currentChecksum = computeChecksum(migration.getStatements());
|
||||
if (!currentChecksum.equals(record.getChecksum())) {
|
||||
throw BusinessException.of(
|
||||
KnowledgeGraphErrorCode.SCHEMA_CHECKSUM_MISMATCH,
|
||||
String.format("Migration V%d (%s): recorded checksum=%s, current checksum=%s",
|
||||
record.getVersion(), record.getDescription(),
|
||||
record.getChecksum(), currentChecksum)
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 逐个执行待迁移。
|
||||
*/
|
||||
void executePendingMigrations(List<SchemaMigration> pending) {
|
||||
for (SchemaMigration migration : pending) {
|
||||
log.info("Executing migration V{}: {}", migration.getVersion(), migration.getDescription());
|
||||
|
||||
long startTime = System.currentTimeMillis();
|
||||
String errorMessage = null;
|
||||
boolean success = true;
|
||||
|
||||
try {
|
||||
for (String statement : migration.getStatements()) {
|
||||
try {
|
||||
neo4jClient.query(statement).run();
|
||||
log.debug(" Statement executed: {}",
|
||||
statement.length() <= 100 ? statement : statement.substring(0, 97) + "...");
|
||||
} catch (Exception e) {
|
||||
if (isAlreadyExistsError(e)) {
|
||||
log.debug(" Schema element already exists (safe to skip): {}",
|
||||
statement.length() <= 100 ? statement : statement.substring(0, 97) + "...");
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
success = false;
|
||||
errorMessage = e.getMessage();
|
||||
|
||||
long elapsed = System.currentTimeMillis() - startTime;
|
||||
recordMigration(SchemaMigrationRecord.builder()
|
||||
.version(migration.getVersion())
|
||||
.description(migration.getDescription())
|
||||
.checksum(computeChecksum(migration.getStatements()))
|
||||
.appliedAt(Instant.now().toString())
|
||||
.executionTimeMs(elapsed)
|
||||
.success(false)
|
||||
.statementsCount(migration.getStatements().size())
|
||||
.errorMessage(errorMessage)
|
||||
.build());
|
||||
|
||||
throw BusinessException.of(
|
||||
KnowledgeGraphErrorCode.SCHEMA_MIGRATION_FAILED,
|
||||
String.format("Migration V%d (%s) failed: %s",
|
||||
migration.getVersion(), migration.getDescription(), errorMessage)
|
||||
);
|
||||
}
|
||||
|
||||
long elapsed = System.currentTimeMillis() - startTime;
|
||||
recordMigration(SchemaMigrationRecord.builder()
|
||||
.version(migration.getVersion())
|
||||
.description(migration.getDescription())
|
||||
.checksum(computeChecksum(migration.getStatements()))
|
||||
.appliedAt(Instant.now().toString())
|
||||
.executionTimeMs(elapsed)
|
||||
.success(true)
|
||||
.statementsCount(migration.getStatements().size())
|
||||
.build());
|
||||
|
||||
log.info("Migration V{} completed in {}ms", migration.getVersion(), elapsed);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 写入迁移记录节点。
|
||||
* <p>
|
||||
* 使用 MERGE(按 version 匹配)+ SET 而非 CREATE,确保:
|
||||
* <ul>
|
||||
* <li>失败后重试不会因唯一约束冲突而卡死(P0)</li>
|
||||
* <li>迁移执行成功但记录写入失败后,重跑可安全补写记录(幂等性)</li>
|
||||
* </ul>
|
||||
*/
|
||||
void recordMigration(SchemaMigrationRecord record) {
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("version", record.getVersion());
|
||||
params.put("description", nullToEmpty(record.getDescription()));
|
||||
params.put("checksum", nullToEmpty(record.getChecksum()));
|
||||
params.put("appliedAt", nullToEmpty(record.getAppliedAt()));
|
||||
params.put("executionTimeMs", record.getExecutionTimeMs());
|
||||
params.put("success", record.isSuccess());
|
||||
params.put("statementsCount", record.getStatementsCount());
|
||||
params.put("errorMessage", nullToEmpty(record.getErrorMessage()));
|
||||
|
||||
neo4jClient.query(
|
||||
"MERGE (m:_SchemaMigration {version: $version}) " +
|
||||
"SET m.description = $description, " +
|
||||
" m.checksum = $checksum, " +
|
||||
" m.applied_at = $appliedAt, " +
|
||||
" m.execution_time_ms = $executionTimeMs, " +
|
||||
" m.success = $success, " +
|
||||
" m.statements_count = $statementsCount, " +
|
||||
" m.error_message = $errorMessage"
|
||||
).bindAll(params).run();
|
||||
}
|
||||
|
||||
/**
|
||||
* 计算语句列表的 SHA-256 校验和。
|
||||
*/
|
||||
static String computeChecksum(List<String> statements) {
|
||||
try {
|
||||
MessageDigest digest = MessageDigest.getInstance("SHA-256");
|
||||
for (String statement : statements) {
|
||||
digest.update(statement.getBytes(StandardCharsets.UTF_8));
|
||||
}
|
||||
byte[] hash = digest.digest();
|
||||
StringBuilder hex = new StringBuilder();
|
||||
for (byte b : hash) {
|
||||
hex.append(String.format("%02x", b));
|
||||
}
|
||||
return hex.toString();
|
||||
} catch (NoSuchAlgorithmException e) {
|
||||
throw new IllegalStateException("SHA-256 algorithm not available", e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 判断异常是否仅因为 Schema 元素已存在(安全可忽略)。
|
||||
*/
|
||||
static boolean isAlreadyExistsError(Exception e) {
|
||||
String msg = e.getMessage();
|
||||
if (msg == null) {
|
||||
return false;
|
||||
}
|
||||
String lowerMsg = msg.toLowerCase();
|
||||
return ALREADY_EXISTS_KEYWORDS.stream().anyMatch(kw -> lowerMsg.contains(kw.toLowerCase()));
|
||||
}
|
||||
|
||||
/**
|
||||
* 将 null 字符串转换为空字符串,避免 Neo4j 驱动 bindAll 传入 null 值导致属性缺失。
|
||||
*/
|
||||
private static String nullToEmpty(String value) {
|
||||
return value != null ? value : "";
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,66 @@
|
||||
package com.datamate.knowledgegraph.infrastructure.neo4j.migration;
|
||||
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* V1 基线迁移:初始 Schema。
|
||||
* <p>
|
||||
* 包含 {@code GraphInitializer} 中原有的全部 14 条 DDL 语句。
|
||||
* 在已有数据库上首次运行时,所有语句因 {@code IF NOT EXISTS} 而为 no-op,
|
||||
* 但会建立版本基线。
|
||||
*/
|
||||
@Component
|
||||
public class V1__InitialSchema implements SchemaMigration {
|
||||
|
||||
@Override
|
||||
public int getVersion() {
|
||||
return 1;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getDescription() {
|
||||
return "Initial schema: Entity and SyncHistory constraints and indexes";
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<String> getStatements() {
|
||||
return List.of(
|
||||
// 约束(自动创建对应索引)
|
||||
"CREATE CONSTRAINT entity_id_unique IF NOT EXISTS FOR (n:Entity) REQUIRE n.id IS UNIQUE",
|
||||
|
||||
// 同步 upsert 复合唯一约束:防止并发写入产生重复实体
|
||||
"CREATE CONSTRAINT entity_sync_unique IF NOT EXISTS " +
|
||||
"FOR (n:Entity) REQUIRE (n.graph_id, n.source_id, n.type) IS UNIQUE",
|
||||
|
||||
// 单字段索引
|
||||
"CREATE INDEX entity_graph_id IF NOT EXISTS FOR (n:Entity) ON (n.graph_id)",
|
||||
"CREATE INDEX entity_type IF NOT EXISTS FOR (n:Entity) ON (n.type)",
|
||||
"CREATE INDEX entity_name IF NOT EXISTS FOR (n:Entity) ON (n.name)",
|
||||
"CREATE INDEX entity_source_id IF NOT EXISTS FOR (n:Entity) ON (n.source_id)",
|
||||
"CREATE INDEX entity_created_at IF NOT EXISTS FOR (n:Entity) ON (n.created_at)",
|
||||
|
||||
// 复合索引
|
||||
"CREATE INDEX entity_graph_id_type IF NOT EXISTS FOR (n:Entity) ON (n.graph_id, n.type)",
|
||||
"CREATE INDEX entity_graph_id_id IF NOT EXISTS FOR (n:Entity) ON (n.graph_id, n.id)",
|
||||
"CREATE INDEX entity_graph_id_source_id IF NOT EXISTS FOR (n:Entity) ON (n.graph_id, n.source_id)",
|
||||
|
||||
// 全文索引
|
||||
"CREATE FULLTEXT INDEX entity_fulltext IF NOT EXISTS FOR (n:Entity) ON EACH [n.name, n.description]",
|
||||
|
||||
// ── SyncHistory 约束和索引 ──
|
||||
|
||||
// syncId 唯一约束,防止 ID 碰撞
|
||||
"CREATE CONSTRAINT sync_history_graph_sync_unique IF NOT EXISTS " +
|
||||
"FOR (h:SyncHistory) REQUIRE (h.graph_id, h.sync_id) IS UNIQUE",
|
||||
|
||||
// 查询优化索引
|
||||
"CREATE INDEX sync_history_graph_started IF NOT EXISTS " +
|
||||
"FOR (h:SyncHistory) ON (h.graph_id, h.started_at)",
|
||||
|
||||
"CREATE INDEX sync_history_graph_status_started IF NOT EXISTS " +
|
||||
"FOR (h:SyncHistory) ON (h.graph_id, h.status, h.started_at)"
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,51 @@
|
||||
package com.datamate.knowledgegraph.infrastructure.neo4j.migration;
|
||||
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* V2 性能优化迁移:关系索引和属性索引。
|
||||
* <p>
|
||||
* V1 仅对 Entity 节点创建了索引。该迁移补充:
|
||||
* <ul>
|
||||
* <li>RELATED_TO 关系的 graph_id 索引(加速子图查询中的关系过滤)</li>
|
||||
* <li>RELATED_TO 关系的 relation_type 索引(加速按类型筛选)</li>
|
||||
* <li>Entity 的 (graph_id, name) 复合索引(加速 name 过滤查询)</li>
|
||||
* <li>Entity 的 updated_at 索引(加速增量同步范围查询)</li>
|
||||
* <li>RELATED_TO 关系的 (graph_id, relation_type) 复合索引</li>
|
||||
* </ul>
|
||||
*/
|
||||
@Component
|
||||
public class V2__PerformanceIndexes implements SchemaMigration {
|
||||
|
||||
@Override
|
||||
public int getVersion() {
|
||||
return 2;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getDescription() {
|
||||
return "Performance indexes: relationship indexes and additional composite indexes";
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<String> getStatements() {
|
||||
return List.of(
|
||||
// 关系索引:加速子图查询中 WHERE r.graph_id = $graphId 的过滤
|
||||
"CREATE INDEX rel_graph_id IF NOT EXISTS FOR ()-[r:RELATED_TO]-() ON (r.graph_id)",
|
||||
|
||||
// 关系索引:加速按关系类型筛选
|
||||
"CREATE INDEX rel_relation_type IF NOT EXISTS FOR ()-[r:RELATED_TO]-() ON (r.relation_type)",
|
||||
|
||||
// 关系复合索引:加速同一图谱内按类型查询关系
|
||||
"CREATE INDEX rel_graph_id_type IF NOT EXISTS FOR ()-[r:RELATED_TO]-() ON (r.graph_id, r.relation_type)",
|
||||
|
||||
// 节点复合索引:加速 graph_id + name 过滤查询
|
||||
"CREATE INDEX entity_graph_id_name IF NOT EXISTS FOR (n:Entity) ON (n.graph_id, n.name)",
|
||||
|
||||
// 节点索引:加速增量同步中的时间范围查询
|
||||
"CREATE INDEX entity_updated_at IF NOT EXISTS FOR (n:Entity) ON (n.updated_at)"
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,74 @@
|
||||
package com.datamate.knowledgegraph.infrastructure.security;
|
||||
|
||||
import com.datamate.common.infrastructure.common.Response;
|
||||
import com.datamate.knowledgegraph.infrastructure.exception.KnowledgeGraphErrorCode;
|
||||
import com.datamate.knowledgegraph.infrastructure.neo4j.KnowledgeGraphProperties;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import jakarta.servlet.http.HttpServletRequest;
|
||||
import jakarta.servlet.http.HttpServletResponse;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.http.MediaType;
|
||||
import org.springframework.stereotype.Component;
|
||||
import org.springframework.util.StringUtils;
|
||||
import org.springframework.web.servlet.HandlerInterceptor;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* 内部服务调用 Token 校验拦截器。
|
||||
* <p>
|
||||
* 验证 {@code X-Internal-Token} 请求头,保护 sync 端点仅供内部服务/定时任务调用。
|
||||
* <p>
|
||||
* <strong>安全策略(fail-closed)</strong>:
|
||||
* <ul>
|
||||
* <li>Token 未配置且 {@code skip-token-check=false}(默认)时,直接拒绝请求</li>
|
||||
* <li>仅当 dev/test 环境显式设置 {@code skip-token-check=true} 时,才跳过校验</li>
|
||||
* </ul>
|
||||
*/
|
||||
@Component
|
||||
@RequiredArgsConstructor
|
||||
public class InternalTokenInterceptor implements HandlerInterceptor {
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger(InternalTokenInterceptor.class);
|
||||
private static final String HEADER_INTERNAL_TOKEN = "X-Internal-Token";
|
||||
|
||||
private final KnowledgeGraphProperties properties;
|
||||
private final ObjectMapper objectMapper;
|
||||
|
||||
@Override
|
||||
public boolean preHandle(HttpServletRequest request, HttpServletResponse response, Object handler)
|
||||
throws IOException {
|
||||
KnowledgeGraphProperties.Security security = properties.getSecurity();
|
||||
String configuredToken = security.getInternalToken();
|
||||
|
||||
if (!StringUtils.hasText(configuredToken)) {
|
||||
if (security.isSkipTokenCheck()) {
|
||||
log.warn("内部调用 Token 未配置且 skip-token-check=true,跳过校验(仅限 dev/test 环境)。");
|
||||
return true;
|
||||
}
|
||||
log.error("内部调用 Token 未配置且 skip-token-check=false(fail-closed),拒绝请求。"
|
||||
+ "请设置 KG_INTERNAL_TOKEN 环境变量或在 dev/test 环境启用 skip-token-check。");
|
||||
writeErrorResponse(response);
|
||||
return false;
|
||||
}
|
||||
|
||||
String requestToken = request.getHeader(HEADER_INTERNAL_TOKEN);
|
||||
|
||||
if (!configuredToken.equals(requestToken)) {
|
||||
writeErrorResponse(response);
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
private void writeErrorResponse(HttpServletResponse response) throws IOException {
|
||||
Response<?> errorBody = Response.error(KnowledgeGraphErrorCode.UNAUTHORIZED_INTERNAL_CALL);
|
||||
response.setStatus(HttpServletResponse.SC_UNAUTHORIZED);
|
||||
response.setContentType(MediaType.APPLICATION_JSON_VALUE);
|
||||
response.setCharacterEncoding("UTF-8");
|
||||
response.getWriter().write(objectMapper.writeValueAsString(errorBody));
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,22 @@
|
||||
package com.datamate.knowledgegraph.infrastructure.security;
|
||||
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.web.servlet.config.annotation.InterceptorRegistry;
|
||||
import org.springframework.web.servlet.config.annotation.WebMvcConfigurer;
|
||||
|
||||
/**
|
||||
* 注册 {@link InternalTokenInterceptor},仅拦截 sync 端点。
|
||||
*/
|
||||
@Configuration
|
||||
@RequiredArgsConstructor
|
||||
public class InternalTokenWebMvcConfigurer implements WebMvcConfigurer {
|
||||
|
||||
private final InternalTokenInterceptor internalTokenInterceptor;
|
||||
|
||||
@Override
|
||||
public void addInterceptors(InterceptorRegistry registry) {
|
||||
registry.addInterceptor(internalTokenInterceptor)
|
||||
.addPathPatterns("/knowledge-graph/*/sync/**");
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,24 @@
|
||||
package com.datamate.knowledgegraph.interfaces.dto;
|
||||
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Builder;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* 所有路径查询结果。
|
||||
*/
|
||||
@Data
|
||||
@Builder
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class AllPathsVO {
|
||||
|
||||
/** 所有路径列表(按路径长度升序) */
|
||||
private List<PathVO> paths;
|
||||
|
||||
/** 路径总数 */
|
||||
private int pathCount;
|
||||
}
|
||||
@@ -0,0 +1,18 @@
|
||||
package com.datamate.knowledgegraph.interfaces.dto;
|
||||
|
||||
import jakarta.validation.constraints.NotEmpty;
|
||||
import jakarta.validation.constraints.Size;
|
||||
import lombok.Data;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* 批量删除请求。
|
||||
*/
|
||||
@Data
|
||||
public class BatchDeleteRequest {
|
||||
|
||||
@NotEmpty(message = "ID 列表不能为空")
|
||||
@Size(max = 100, message = "单次批量删除最多 100 条")
|
||||
private List<String> ids;
|
||||
}
|
||||
@@ -0,0 +1,31 @@
|
||||
package com.datamate.knowledgegraph.interfaces.dto;
|
||||
|
||||
import jakarta.validation.constraints.NotBlank;
|
||||
import lombok.Data;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
@Data
|
||||
public class CreateEntityRequest {
|
||||
|
||||
@NotBlank(message = "实体名称不能为空")
|
||||
private String name;
|
||||
|
||||
@NotBlank(message = "实体类型不能为空")
|
||||
private String type;
|
||||
|
||||
private String description;
|
||||
|
||||
private List<String> aliases = new ArrayList<>();
|
||||
|
||||
private Map<String, Object> properties = new HashMap<>();
|
||||
|
||||
private String sourceId;
|
||||
|
||||
private String sourceType;
|
||||
|
||||
private Double confidence;
|
||||
}
|
||||
@@ -0,0 +1,42 @@
|
||||
package com.datamate.knowledgegraph.interfaces.dto;
|
||||
|
||||
import jakarta.validation.constraints.DecimalMax;
|
||||
import jakarta.validation.constraints.DecimalMin;
|
||||
import jakarta.validation.constraints.NotBlank;
|
||||
import jakarta.validation.constraints.Pattern;
|
||||
import jakarta.validation.constraints.Size;
|
||||
import lombok.Data;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
@Data
|
||||
public class CreateRelationRequest {
|
||||
|
||||
private static final String UUID_REGEX =
|
||||
"^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$";
|
||||
|
||||
@NotBlank(message = "源实体ID不能为空")
|
||||
@Pattern(regexp = UUID_REGEX, message = "源实体ID格式无效")
|
||||
private String sourceEntityId;
|
||||
|
||||
@NotBlank(message = "目标实体ID不能为空")
|
||||
@Pattern(regexp = UUID_REGEX, message = "目标实体ID格式无效")
|
||||
private String targetEntityId;
|
||||
|
||||
@NotBlank(message = "关系类型不能为空")
|
||||
@Size(min = 1, max = 50, message = "关系类型长度必须在1-50之间")
|
||||
private String relationType;
|
||||
|
||||
private Map<String, Object> properties = new HashMap<>();
|
||||
|
||||
@DecimalMin(value = "0.0", message = "权重必须在0.0-1.0之间")
|
||||
@DecimalMax(value = "1.0", message = "权重必须在0.0-1.0之间")
|
||||
private Double weight;
|
||||
|
||||
private String sourceId;
|
||||
|
||||
@DecimalMin(value = "0.0", message = "置信度必须在0.0-1.0之间")
|
||||
@DecimalMax(value = "1.0", message = "置信度必须在0.0-1.0之间")
|
||||
private Double confidence;
|
||||
}
|
||||
@@ -0,0 +1,22 @@
|
||||
package com.datamate.knowledgegraph.interfaces.dto;
|
||||
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Builder;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
/**
|
||||
* 关系摘要,用于图遍历结果中的边表示。
|
||||
*/
|
||||
@Data
|
||||
@Builder
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class EdgeSummaryVO {
|
||||
|
||||
private String id;
|
||||
private String sourceEntityId;
|
||||
private String targetEntityId;
|
||||
private String relationType;
|
||||
private Double weight;
|
||||
}
|
||||
@@ -0,0 +1,31 @@
|
||||
package com.datamate.knowledgegraph.interfaces.dto;
|
||||
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Builder;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
|
||||
/**
|
||||
* 编辑审核记录视图对象。
|
||||
*/
|
||||
@Data
|
||||
@Builder
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class EditReviewVO {
|
||||
|
||||
private String id;
|
||||
private String graphId;
|
||||
private String operationType;
|
||||
private String entityId;
|
||||
private String relationId;
|
||||
private String payload;
|
||||
private String status;
|
||||
private String submittedBy;
|
||||
private String reviewedBy;
|
||||
private String reviewComment;
|
||||
private LocalDateTime createdAt;
|
||||
private LocalDateTime reviewedAt;
|
||||
}
|
||||
@@ -0,0 +1,21 @@
|
||||
package com.datamate.knowledgegraph.interfaces.dto;
|
||||
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Builder;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
/**
|
||||
* 实体摘要,用于图遍历结果中的节点表示。
|
||||
*/
|
||||
@Data
|
||||
@Builder
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class EntitySummaryVO {
|
||||
|
||||
private String id;
|
||||
private String name;
|
||||
private String type;
|
||||
private String description;
|
||||
}
|
||||
@@ -0,0 +1,24 @@
|
||||
package com.datamate.knowledgegraph.interfaces.dto;
|
||||
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Builder;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
/**
|
||||
* 导出用关系边,包含完整属性。
|
||||
*/
|
||||
@Data
|
||||
@Builder
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class ExportEdgeVO {
|
||||
|
||||
private String id;
|
||||
private String sourceEntityId;
|
||||
private String targetEntityId;
|
||||
private String relationType;
|
||||
private Double weight;
|
||||
private Double confidence;
|
||||
private String sourceId;
|
||||
}
|
||||
@@ -0,0 +1,24 @@
|
||||
package com.datamate.knowledgegraph.interfaces.dto;
|
||||
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Builder;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* 导出用节点,包含完整属性。
|
||||
*/
|
||||
@Data
|
||||
@Builder
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class ExportNodeVO {
|
||||
|
||||
private String id;
|
||||
private String name;
|
||||
private String type;
|
||||
private String description;
|
||||
private Map<String, Object> properties;
|
||||
}
|
||||
@@ -0,0 +1,27 @@
|
||||
package com.datamate.knowledgegraph.interfaces.dto;
|
||||
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Builder;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* 最短路径查询结果。
|
||||
*/
|
||||
@Data
|
||||
@Builder
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class PathVO {
|
||||
|
||||
/** 路径上的节点列表(按顺序) */
|
||||
private List<EntitySummaryVO> nodes;
|
||||
|
||||
/** 路径上的边列表(按顺序) */
|
||||
private List<EdgeSummaryVO> edges;
|
||||
|
||||
/** 路径长度(跳数) */
|
||||
private int pathLength;
|
||||
}
|
||||
@@ -0,0 +1,53 @@
|
||||
package com.datamate.knowledgegraph.interfaces.dto;
|
||||
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Builder;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* 关系查询结果视图对象。
|
||||
* <p>
|
||||
* 包含关系的完整信息,包括源实体和目标实体的摘要信息,
|
||||
* 用于 REST API 响应。
|
||||
*/
|
||||
@Data
|
||||
@Builder
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class RelationVO {
|
||||
|
||||
private String id;
|
||||
|
||||
private String sourceEntityId;
|
||||
|
||||
private String sourceEntityName;
|
||||
|
||||
private String sourceEntityType;
|
||||
|
||||
private String targetEntityId;
|
||||
|
||||
private String targetEntityName;
|
||||
|
||||
private String targetEntityType;
|
||||
|
||||
private String relationType;
|
||||
|
||||
@Builder.Default
|
||||
private Map<String, Object> properties = new HashMap<>();
|
||||
|
||||
private Double weight;
|
||||
|
||||
private Double confidence;
|
||||
|
||||
/** 来源数据集/知识库的 ID */
|
||||
private String sourceId;
|
||||
|
||||
private String graphId;
|
||||
|
||||
private LocalDateTime createdAt;
|
||||
}
|
||||
@@ -0,0 +1,13 @@
|
||||
package com.datamate.knowledgegraph.interfaces.dto;
|
||||
|
||||
import lombok.Data;
|
||||
|
||||
/**
|
||||
* 审核通过/拒绝请求。
|
||||
*/
|
||||
@Data
|
||||
public class ReviewActionRequest {
|
||||
|
||||
/** 审核意见(可选) */
|
||||
private String comment;
|
||||
}
|
||||
@@ -0,0 +1,24 @@
|
||||
package com.datamate.knowledgegraph.interfaces.dto;
|
||||
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Builder;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
/**
|
||||
* 全文搜索命中结果,包含相关度分数。
|
||||
*/
|
||||
@Data
|
||||
@Builder
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class SearchHitVO {
|
||||
|
||||
private String id;
|
||||
private String name;
|
||||
private String type;
|
||||
private String description;
|
||||
|
||||
/** 全文搜索相关度分数(越高越相关) */
|
||||
private double score;
|
||||
}
|
||||
@@ -0,0 +1,30 @@
|
||||
package com.datamate.knowledgegraph.interfaces.dto;
|
||||
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Builder;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* 子图导出结果。
|
||||
*/
|
||||
@Data
|
||||
@Builder
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class SubgraphExportVO {
|
||||
|
||||
/** 子图中的节点列表(包含完整属性) */
|
||||
private List<ExportNodeVO> nodes;
|
||||
|
||||
/** 子图中的边列表 */
|
||||
private List<ExportEdgeVO> edges;
|
||||
|
||||
/** 节点数量 */
|
||||
private int nodeCount;
|
||||
|
||||
/** 边数量 */
|
||||
private int edgeCount;
|
||||
}
|
||||
@@ -0,0 +1,26 @@
|
||||
package com.datamate.knowledgegraph.interfaces.dto;
|
||||
|
||||
import jakarta.validation.constraints.NotEmpty;
|
||||
import jakarta.validation.constraints.Pattern;
|
||||
import jakarta.validation.constraints.Size;
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* 子图查询请求。
|
||||
*/
|
||||
@Data
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class SubgraphRequest {
|
||||
|
||||
private static final String UUID_REGEX =
|
||||
"^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$";
|
||||
|
||||
@NotEmpty(message = "实体 ID 列表不能为空")
|
||||
@Size(max = 500, message = "实体数量超出限制(最大 500)")
|
||||
private List<@Pattern(regexp = UUID_REGEX, message = "entityId 格式无效") String> entityIds;
|
||||
}
|
||||
@@ -0,0 +1,30 @@
|
||||
package com.datamate.knowledgegraph.interfaces.dto;
|
||||
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Builder;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* 子图查询结果。
|
||||
*/
|
||||
@Data
|
||||
@Builder
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class SubgraphVO {
|
||||
|
||||
/** 子图中的节点列表 */
|
||||
private List<EntitySummaryVO> nodes;
|
||||
|
||||
/** 子图中的边列表 */
|
||||
private List<EdgeSummaryVO> edges;
|
||||
|
||||
/** 节点数量 */
|
||||
private int nodeCount;
|
||||
|
||||
/** 边数量 */
|
||||
private int edgeCount;
|
||||
}
|
||||
@@ -0,0 +1,65 @@
|
||||
package com.datamate.knowledgegraph.interfaces.dto;
|
||||
|
||||
import jakarta.validation.constraints.AssertTrue;
|
||||
import jakarta.validation.constraints.NotBlank;
|
||||
import jakarta.validation.constraints.Pattern;
|
||||
import lombok.Data;
|
||||
|
||||
/**
|
||||
* 提交编辑审核请求。
|
||||
*/
|
||||
@Data
|
||||
public class SubmitReviewRequest {
|
||||
|
||||
private static final String UUID_REGEX =
|
||||
"^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$";
|
||||
|
||||
/**
|
||||
* 操作类型:CREATE_ENTITY, UPDATE_ENTITY, DELETE_ENTITY,
|
||||
* CREATE_RELATION, UPDATE_RELATION, DELETE_RELATION,
|
||||
* BATCH_DELETE_ENTITY, BATCH_DELETE_RELATION
|
||||
*/
|
||||
@NotBlank(message = "操作类型不能为空")
|
||||
@Pattern(regexp = "^(CREATE|UPDATE|DELETE|BATCH_DELETE)_(ENTITY|RELATION)$",
|
||||
message = "操作类型无效")
|
||||
private String operationType;
|
||||
|
||||
/** 目标实体 ID(实体操作时必填) */
|
||||
private String entityId;
|
||||
|
||||
/** 目标关系 ID(关系操作时必填) */
|
||||
private String relationId;
|
||||
|
||||
/** 变更载荷(JSON 格式的请求体) */
|
||||
private String payload;
|
||||
|
||||
@AssertTrue(message = "UPDATE/DELETE 实体操作必须提供 entityId")
|
||||
private boolean isEntityIdValid() {
|
||||
if (operationType == null) return true;
|
||||
if (operationType.endsWith("_ENTITY") && !operationType.startsWith("CREATE")
|
||||
&& !operationType.startsWith("BATCH")) {
|
||||
return entityId != null && !entityId.isBlank();
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
@AssertTrue(message = "UPDATE/DELETE 关系操作必须提供 relationId")
|
||||
private boolean isRelationIdValid() {
|
||||
if (operationType == null) return true;
|
||||
if (operationType.endsWith("_RELATION") && !operationType.startsWith("CREATE")
|
||||
&& !operationType.startsWith("BATCH")) {
|
||||
return relationId != null && !relationId.isBlank();
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
@AssertTrue(message = "CREATE/UPDATE/BATCH_DELETE 操作必须提供 payload")
|
||||
private boolean isPayloadValid() {
|
||||
if (operationType == null) return true;
|
||||
if (operationType.startsWith("CREATE") || operationType.startsWith("UPDATE")
|
||||
|| operationType.startsWith("BATCH_DELETE")) {
|
||||
return payload != null && !payload.isBlank();
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,75 @@
|
||||
package com.datamate.knowledgegraph.interfaces.dto;
|
||||
|
||||
import com.datamate.knowledgegraph.domain.model.SyncMetadata;
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Builder;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* 同步元数据视图对象。
|
||||
* <p>
|
||||
* 包含本次同步的整体统计信息和各步骤的详细结果。
|
||||
*/
|
||||
@Data
|
||||
@Builder
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class SyncMetadataVO {
|
||||
|
||||
private String syncId;
|
||||
private String graphId;
|
||||
private String syncType;
|
||||
private String status;
|
||||
private LocalDateTime startedAt;
|
||||
private LocalDateTime completedAt;
|
||||
private long durationMillis;
|
||||
private int totalCreated;
|
||||
private int totalUpdated;
|
||||
private int totalSkipped;
|
||||
private int totalFailed;
|
||||
private int totalPurged;
|
||||
private int totalEntities;
|
||||
private LocalDateTime updatedFrom;
|
||||
private LocalDateTime updatedTo;
|
||||
private String errorMessage;
|
||||
private List<String> stepSummaries;
|
||||
/** 各步骤的详细结果(仅当前同步返回时携带,历史查询时为 null) */
|
||||
private List<SyncResultVO> results;
|
||||
|
||||
/**
|
||||
* 从 SyncMetadata 转换(包含详细步骤结果)。
|
||||
*/
|
||||
public static SyncMetadataVO from(SyncMetadata metadata) {
|
||||
List<SyncResultVO> resultVOs = null;
|
||||
if (metadata.getResults() != null) {
|
||||
resultVOs = metadata.getResults().stream()
|
||||
.map(SyncResultVO::from)
|
||||
.toList();
|
||||
}
|
||||
|
||||
return SyncMetadataVO.builder()
|
||||
.syncId(metadata.getSyncId())
|
||||
.graphId(metadata.getGraphId())
|
||||
.syncType(metadata.getSyncType())
|
||||
.status(metadata.getStatus())
|
||||
.startedAt(metadata.getStartedAt())
|
||||
.completedAt(metadata.getCompletedAt())
|
||||
.durationMillis(metadata.getDurationMillis())
|
||||
.totalCreated(metadata.getTotalCreated())
|
||||
.totalUpdated(metadata.getTotalUpdated())
|
||||
.totalSkipped(metadata.getTotalSkipped())
|
||||
.totalFailed(metadata.getTotalFailed())
|
||||
.totalPurged(metadata.getTotalPurged())
|
||||
.totalEntities(metadata.totalEntities())
|
||||
.updatedFrom(metadata.getUpdatedFrom())
|
||||
.updatedTo(metadata.getUpdatedTo())
|
||||
.errorMessage(metadata.getErrorMessage())
|
||||
.stepSummaries(metadata.getStepSummaries())
|
||||
.results(resultVOs)
|
||||
.build();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,56 @@
|
||||
package com.datamate.knowledgegraph.interfaces.dto;
|
||||
|
||||
import com.datamate.knowledgegraph.domain.model.SyncResult;
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Builder;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
|
||||
/**
|
||||
* 同步结果视图对象。
|
||||
* <p>
|
||||
* 不暴露内部错误详情(errors 列表),仅返回错误计数和 syncId,
|
||||
* 前端可通过 syncId 向运维查询具体日志。
|
||||
*/
|
||||
@Data
|
||||
@Builder
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class SyncResultVO {
|
||||
|
||||
private String syncId;
|
||||
private String syncType;
|
||||
private int created;
|
||||
private int updated;
|
||||
private int skipped;
|
||||
private int failed;
|
||||
private int purged;
|
||||
private int total;
|
||||
private long durationMillis;
|
||||
/** 标记为占位符的步骤(功能尚未实现) */
|
||||
private boolean placeholder;
|
||||
/** 错误数量(不暴露具体错误信息) */
|
||||
private int errorCount;
|
||||
private LocalDateTime startedAt;
|
||||
private LocalDateTime completedAt;
|
||||
|
||||
public static SyncResultVO from(SyncResult result) {
|
||||
return SyncResultVO.builder()
|
||||
.syncId(result.getSyncId())
|
||||
.syncType(result.getSyncType())
|
||||
.created(result.getCreated())
|
||||
.updated(result.getUpdated())
|
||||
.skipped(result.getSkipped())
|
||||
.failed(result.getFailed())
|
||||
.purged(result.getPurged())
|
||||
.total(result.total())
|
||||
.durationMillis(result.durationMillis())
|
||||
.placeholder(result.isPlaceholder())
|
||||
.errorCount(result.getErrors() != null ? result.getErrors().size() : 0)
|
||||
.startedAt(result.getStartedAt())
|
||||
.completedAt(result.getCompletedAt())
|
||||
.build();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,20 @@
|
||||
package com.datamate.knowledgegraph.interfaces.dto;
|
||||
|
||||
import lombok.Data;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
@Data
|
||||
public class UpdateEntityRequest {
|
||||
|
||||
private String name;
|
||||
|
||||
private String description;
|
||||
|
||||
private List<String> aliases;
|
||||
|
||||
private Map<String, Object> properties;
|
||||
|
||||
private Double confidence;
|
||||
}
|
||||
@@ -0,0 +1,30 @@
|
||||
package com.datamate.knowledgegraph.interfaces.dto;
|
||||
|
||||
import jakarta.validation.constraints.DecimalMax;
|
||||
import jakarta.validation.constraints.DecimalMin;
|
||||
import jakarta.validation.constraints.Size;
|
||||
import lombok.Data;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* 关系更新请求。
|
||||
* <p>
|
||||
* 所有字段均为可选,仅更新提供了值的字段(patch 语义)。
|
||||
*/
|
||||
@Data
|
||||
public class UpdateRelationRequest {
|
||||
|
||||
@Size(min = 1, max = 50, message = "关系类型长度必须在1-50之间")
|
||||
private String relationType;
|
||||
|
||||
private Map<String, Object> properties;
|
||||
|
||||
@DecimalMin(value = "0.0", message = "权重必须在0.0-1.0之间")
|
||||
@DecimalMax(value = "1.0", message = "权重必须在0.0-1.0之间")
|
||||
private Double weight;
|
||||
|
||||
@DecimalMin(value = "0.0", message = "置信度必须在0.0-1.0之间")
|
||||
@DecimalMax(value = "1.0", message = "置信度必须在0.0-1.0之间")
|
||||
private Double confidence;
|
||||
}
|
||||
@@ -0,0 +1,71 @@
|
||||
package com.datamate.knowledgegraph.interfaces.rest;
|
||||
|
||||
import com.datamate.common.interfaces.PagedResponse;
|
||||
import com.datamate.knowledgegraph.application.EditReviewService;
|
||||
import com.datamate.knowledgegraph.interfaces.dto.EditReviewVO;
|
||||
import com.datamate.knowledgegraph.interfaces.dto.ReviewActionRequest;
|
||||
import com.datamate.knowledgegraph.interfaces.dto.SubmitReviewRequest;
|
||||
import jakarta.validation.Valid;
|
||||
import jakarta.validation.constraints.Pattern;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import org.springframework.http.HttpStatus;
|
||||
import org.springframework.validation.annotation.Validated;
|
||||
import org.springframework.web.bind.annotation.*;
|
||||
|
||||
@RestController
|
||||
@RequestMapping("/knowledge-graph/{graphId}/review")
|
||||
@RequiredArgsConstructor
|
||||
@Validated
|
||||
public class EditReviewController {
|
||||
|
||||
private static final String UUID_REGEX =
|
||||
"^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$";
|
||||
|
||||
private final EditReviewService reviewService;
|
||||
|
||||
@PostMapping("/submit")
|
||||
@ResponseStatus(HttpStatus.CREATED)
|
||||
public EditReviewVO submitReview(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId,
|
||||
@Valid @RequestBody SubmitReviewRequest request,
|
||||
@RequestHeader(value = "X-User-Id", defaultValue = "anonymous") String userId) {
|
||||
return reviewService.submitReview(graphId, request, userId);
|
||||
}
|
||||
|
||||
@PostMapping("/{reviewId}/approve")
|
||||
public EditReviewVO approveReview(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId,
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "reviewId 格式无效") String reviewId,
|
||||
@RequestBody(required = false) ReviewActionRequest request,
|
||||
@RequestHeader(value = "X-User-Id", defaultValue = "anonymous") String userId) {
|
||||
String comment = (request != null) ? request.getComment() : null;
|
||||
return reviewService.approveReview(graphId, reviewId, userId, comment);
|
||||
}
|
||||
|
||||
@PostMapping("/{reviewId}/reject")
|
||||
public EditReviewVO rejectReview(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId,
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "reviewId 格式无效") String reviewId,
|
||||
@RequestBody(required = false) ReviewActionRequest request,
|
||||
@RequestHeader(value = "X-User-Id", defaultValue = "anonymous") String userId) {
|
||||
String comment = (request != null) ? request.getComment() : null;
|
||||
return reviewService.rejectReview(graphId, reviewId, userId, comment);
|
||||
}
|
||||
|
||||
@GetMapping("/pending")
|
||||
public PagedResponse<EditReviewVO> listPendingReviews(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId,
|
||||
@RequestParam(defaultValue = "0") int page,
|
||||
@RequestParam(defaultValue = "20") int size) {
|
||||
return reviewService.listPendingReviews(graphId, page, size);
|
||||
}
|
||||
|
||||
@GetMapping
|
||||
public PagedResponse<EditReviewVO> listReviews(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId,
|
||||
@RequestParam(required = false) String status,
|
||||
@RequestParam(defaultValue = "0") int page,
|
||||
@RequestParam(defaultValue = "20") int size) {
|
||||
return reviewService.listReviews(graphId, status, page, size);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,123 @@
|
||||
package com.datamate.knowledgegraph.interfaces.rest;
|
||||
|
||||
import com.datamate.common.interfaces.PagedResponse;
|
||||
import com.datamate.knowledgegraph.application.GraphEntityService;
|
||||
import com.datamate.knowledgegraph.application.GraphRelationService;
|
||||
import com.datamate.knowledgegraph.domain.model.GraphEntity;
|
||||
import com.datamate.knowledgegraph.interfaces.dto.CreateEntityRequest;
|
||||
import com.datamate.knowledgegraph.interfaces.dto.RelationVO;
|
||||
import com.datamate.knowledgegraph.interfaces.dto.UpdateEntityRequest;
|
||||
import jakarta.validation.Valid;
|
||||
import jakarta.validation.constraints.Pattern;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import org.springframework.http.HttpStatus;
|
||||
import org.springframework.validation.annotation.Validated;
|
||||
import org.springframework.web.bind.annotation.*;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
@RestController
|
||||
@RequestMapping("/knowledge-graph/{graphId}/entities")
|
||||
@RequiredArgsConstructor
|
||||
@Validated
|
||||
public class GraphEntityController {
|
||||
|
||||
private static final String UUID_REGEX =
|
||||
"^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$";
|
||||
|
||||
private final GraphEntityService entityService;
|
||||
private final GraphRelationService relationService;
|
||||
|
||||
@PostMapping
|
||||
@ResponseStatus(HttpStatus.CREATED)
|
||||
public GraphEntity createEntity(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId,
|
||||
@Valid @RequestBody CreateEntityRequest request) {
|
||||
return entityService.createEntity(graphId, request);
|
||||
}
|
||||
|
||||
@GetMapping("/{entityId}")
|
||||
public GraphEntity getEntity(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId,
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "entityId 格式无效") String entityId) {
|
||||
return entityService.getEntity(graphId, entityId);
|
||||
}
|
||||
|
||||
/**
|
||||
* 查询实体列表(非分页,向后兼容)。
|
||||
* <p>
|
||||
* 当请求不包含 {@code page} 参数时匹配此端点,返回 {@code List}。
|
||||
* 需要分页时请传入 {@code page} 参数,将路由到分页端点。
|
||||
*/
|
||||
@GetMapping(params = "!page")
|
||||
public List<GraphEntity> listEntities(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId,
|
||||
@RequestParam(required = false) String type,
|
||||
@RequestParam(required = false) String keyword) {
|
||||
if (keyword != null && !keyword.isBlank()) {
|
||||
return entityService.searchEntities(graphId, keyword);
|
||||
}
|
||||
if (type != null && !type.isBlank()) {
|
||||
return entityService.listEntitiesByType(graphId, type);
|
||||
}
|
||||
return entityService.listEntities(graphId);
|
||||
}
|
||||
|
||||
/**
|
||||
* 查询实体列表(分页)。
|
||||
* <p>
|
||||
* 当请求包含 {@code page} 参数时匹配此端点,返回 {@code PagedResponse}。
|
||||
*/
|
||||
@GetMapping(params = "page")
|
||||
public PagedResponse<GraphEntity> listEntitiesPaged(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId,
|
||||
@RequestParam(required = false) String type,
|
||||
@RequestParam(required = false) String keyword,
|
||||
@RequestParam(defaultValue = "0") int page,
|
||||
@RequestParam(defaultValue = "20") int size) {
|
||||
if (keyword != null && !keyword.isBlank()) {
|
||||
return entityService.searchEntitiesPaged(graphId, keyword, page, size);
|
||||
}
|
||||
if (type != null && !type.isBlank()) {
|
||||
return entityService.listEntitiesByTypePaged(graphId, type, page, size);
|
||||
}
|
||||
return entityService.listEntitiesPaged(graphId, page, size);
|
||||
}
|
||||
|
||||
@PutMapping("/{entityId}")
|
||||
public GraphEntity updateEntity(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId,
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "entityId 格式无效") String entityId,
|
||||
@Valid @RequestBody UpdateEntityRequest request) {
|
||||
return entityService.updateEntity(graphId, entityId, request);
|
||||
}
|
||||
|
||||
@DeleteMapping("/{entityId}")
|
||||
@ResponseStatus(HttpStatus.NO_CONTENT)
|
||||
public void deleteEntity(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId,
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "entityId 格式无效") String entityId) {
|
||||
entityService.deleteEntity(graphId, entityId);
|
||||
}
|
||||
|
||||
@GetMapping("/{entityId}/relations")
|
||||
public PagedResponse<RelationVO> listEntityRelations(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId,
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "entityId 格式无效") String entityId,
|
||||
@RequestParam(defaultValue = "all") @Pattern(regexp = "^(all|in|out)$", message = "direction 参数无效,允许值:all, in, out") String direction,
|
||||
@RequestParam(required = false) String type,
|
||||
@RequestParam(defaultValue = "0") int page,
|
||||
@RequestParam(defaultValue = "20") int size) {
|
||||
return relationService.listEntityRelations(graphId, entityId, direction, type, page, size);
|
||||
}
|
||||
|
||||
@GetMapping("/{entityId}/neighbors")
|
||||
public List<GraphEntity> getNeighbors(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId,
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "entityId 格式无效") String entityId,
|
||||
@RequestParam(defaultValue = "2") int depth,
|
||||
@RequestParam(defaultValue = "50") int limit) {
|
||||
return entityService.getNeighbors(graphId, entityId, depth, limit);
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,126 @@
|
||||
package com.datamate.knowledgegraph.interfaces.rest;
|
||||
|
||||
import com.datamate.common.interfaces.PagedResponse;
|
||||
import com.datamate.knowledgegraph.application.GraphQueryService;
|
||||
import com.datamate.knowledgegraph.interfaces.dto.*;
|
||||
import jakarta.validation.Valid;
|
||||
import jakarta.validation.constraints.Pattern;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import org.springframework.http.MediaType;
|
||||
import org.springframework.http.ResponseEntity;
|
||||
import org.springframework.validation.annotation.Validated;
|
||||
import org.springframework.web.bind.annotation.*;
|
||||
|
||||
/**
|
||||
* 知识图谱查询接口。
|
||||
* <p>
|
||||
* 提供图遍历(邻居、最短路径、所有路径、子图、子图导出)和全文搜索功能。
|
||||
*/
|
||||
@RestController
|
||||
@RequestMapping("/knowledge-graph/{graphId}/query")
|
||||
@RequiredArgsConstructor
|
||||
@Validated
|
||||
public class GraphQueryController {
|
||||
|
||||
private static final String UUID_REGEX =
|
||||
"^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$";
|
||||
|
||||
private final GraphQueryService queryService;
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// 图遍历
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* 查询实体的 N 跳邻居子图。
|
||||
*/
|
||||
@GetMapping("/neighbors/{entityId}")
|
||||
public SubgraphVO getNeighborGraph(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId,
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "entityId 格式无效") String entityId,
|
||||
@RequestParam(defaultValue = "2") int depth,
|
||||
@RequestParam(defaultValue = "50") int limit) {
|
||||
return queryService.getNeighborGraph(graphId, entityId, depth, limit);
|
||||
}
|
||||
|
||||
/**
|
||||
* 查询两个实体之间的最短路径。
|
||||
*/
|
||||
@GetMapping("/shortest-path")
|
||||
public PathVO getShortestPath(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId,
|
||||
@RequestParam @Pattern(regexp = UUID_REGEX, message = "sourceId 格式无效") String sourceId,
|
||||
@RequestParam @Pattern(regexp = UUID_REGEX, message = "targetId 格式无效") String targetId,
|
||||
@RequestParam(defaultValue = "3") int maxDepth) {
|
||||
return queryService.getShortestPath(graphId, sourceId, targetId, maxDepth);
|
||||
}
|
||||
|
||||
/**
|
||||
* 查询两个实体之间的所有路径。
|
||||
* <p>
|
||||
* 返回按路径长度升序排列的所有路径,支持最大深度和最大路径数限制。
|
||||
*/
|
||||
@GetMapping("/all-paths")
|
||||
public AllPathsVO findAllPaths(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId,
|
||||
@RequestParam @Pattern(regexp = UUID_REGEX, message = "sourceId 格式无效") String sourceId,
|
||||
@RequestParam @Pattern(regexp = UUID_REGEX, message = "targetId 格式无效") String targetId,
|
||||
@RequestParam(defaultValue = "3") int maxDepth,
|
||||
@RequestParam(defaultValue = "10") int maxPaths) {
|
||||
return queryService.findAllPaths(graphId, sourceId, targetId, maxDepth, maxPaths);
|
||||
}
|
||||
|
||||
/**
|
||||
* 提取指定实体集合的子图(关系网络)。
|
||||
*/
|
||||
@PostMapping("/subgraph")
|
||||
public SubgraphVO getSubgraph(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId,
|
||||
@Valid @RequestBody SubgraphRequest request) {
|
||||
return queryService.getSubgraph(graphId, request.getEntityIds());
|
||||
}
|
||||
|
||||
/**
|
||||
* 导出指定实体集合的子图。
|
||||
* <p>
|
||||
* 支持深度扩展和多种输出格式(JSON、GraphML)。
|
||||
*
|
||||
* @param format 输出格式:json(默认)或 graphml
|
||||
* @param depth 扩展深度(0=仅指定实体,1=含 1 跳邻居)
|
||||
*/
|
||||
@PostMapping("/subgraph/export")
|
||||
public ResponseEntity<?> exportSubgraph(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId,
|
||||
@Valid @RequestBody SubgraphRequest request,
|
||||
@RequestParam(defaultValue = "json") String format,
|
||||
@RequestParam(defaultValue = "0") int depth) {
|
||||
SubgraphExportVO exportVO = queryService.exportSubgraph(graphId, request.getEntityIds(), depth);
|
||||
|
||||
if ("graphml".equalsIgnoreCase(format)) {
|
||||
String graphml = queryService.convertToGraphML(exportVO);
|
||||
return ResponseEntity.ok()
|
||||
.contentType(MediaType.APPLICATION_XML)
|
||||
.body(graphml);
|
||||
}
|
||||
|
||||
return ResponseEntity.ok(exportVO);
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// 全文搜索
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* 基于全文索引搜索实体。
|
||||
* <p>
|
||||
* 搜索 name 和 description 字段,按相关度排序。
|
||||
*/
|
||||
@GetMapping("/search")
|
||||
public PagedResponse<SearchHitVO> fulltextSearch(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId,
|
||||
@RequestParam String q,
|
||||
@RequestParam(defaultValue = "0") int page,
|
||||
@RequestParam(defaultValue = "20") int size) {
|
||||
return queryService.fulltextSearch(graphId, q, page, size);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,66 @@
|
||||
package com.datamate.knowledgegraph.interfaces.rest;
|
||||
|
||||
import com.datamate.common.interfaces.PagedResponse;
|
||||
import com.datamate.knowledgegraph.application.GraphRelationService;
|
||||
import com.datamate.knowledgegraph.interfaces.dto.CreateRelationRequest;
|
||||
import com.datamate.knowledgegraph.interfaces.dto.RelationVO;
|
||||
import com.datamate.knowledgegraph.interfaces.dto.UpdateRelationRequest;
|
||||
import jakarta.validation.Valid;
|
||||
import jakarta.validation.constraints.Pattern;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import org.springframework.http.HttpStatus;
|
||||
import org.springframework.validation.annotation.Validated;
|
||||
import org.springframework.web.bind.annotation.*;
|
||||
|
||||
@RestController
|
||||
@RequestMapping("/knowledge-graph/{graphId}/relations")
|
||||
@RequiredArgsConstructor
|
||||
@Validated
|
||||
public class GraphRelationController {
|
||||
|
||||
private static final String UUID_REGEX =
|
||||
"^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$";
|
||||
|
||||
private final GraphRelationService relationService;
|
||||
|
||||
@PostMapping
|
||||
@ResponseStatus(HttpStatus.CREATED)
|
||||
public RelationVO createRelation(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId,
|
||||
@Valid @RequestBody CreateRelationRequest request) {
|
||||
return relationService.createRelation(graphId, request);
|
||||
}
|
||||
|
||||
@GetMapping
|
||||
public PagedResponse<RelationVO> listRelations(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId,
|
||||
@RequestParam(required = false) String type,
|
||||
@RequestParam(defaultValue = "0") int page,
|
||||
@RequestParam(defaultValue = "20") int size) {
|
||||
return relationService.listRelations(graphId, type, page, size);
|
||||
}
|
||||
|
||||
@GetMapping("/{relationId}")
|
||||
public RelationVO getRelation(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId,
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "relationId 格式无效") String relationId) {
|
||||
return relationService.getRelation(graphId, relationId);
|
||||
}
|
||||
|
||||
@PutMapping("/{relationId}")
|
||||
public RelationVO updateRelation(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId,
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "relationId 格式无效") String relationId,
|
||||
@Valid @RequestBody UpdateRelationRequest request) {
|
||||
return relationService.updateRelation(graphId, relationId, request);
|
||||
}
|
||||
|
||||
@DeleteMapping("/{relationId}")
|
||||
@ResponseStatus(HttpStatus.NO_CONTENT)
|
||||
public void deleteRelation(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId,
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "relationId 格式无效") String relationId) {
|
||||
relationService.deleteRelation(graphId, relationId);
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,282 @@
|
||||
package com.datamate.knowledgegraph.interfaces.rest;
|
||||
|
||||
import com.datamate.knowledgegraph.application.GraphSyncService;
|
||||
import com.datamate.knowledgegraph.domain.model.SyncMetadata;
|
||||
import com.datamate.knowledgegraph.domain.model.SyncResult;
|
||||
import com.datamate.knowledgegraph.interfaces.dto.SyncMetadataVO;
|
||||
import com.datamate.knowledgegraph.interfaces.dto.SyncResultVO;
|
||||
import jakarta.validation.constraints.Max;
|
||||
import jakarta.validation.constraints.Min;
|
||||
import jakarta.validation.constraints.Pattern;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import org.springframework.format.annotation.DateTimeFormat;
|
||||
import org.springframework.http.ResponseEntity;
|
||||
import org.springframework.validation.annotation.Validated;
|
||||
import org.springframework.web.bind.annotation.*;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* 知识图谱数据同步 API。
|
||||
* <p>
|
||||
* 提供手动触发 MySQL → Neo4j 同步的 REST 端点。
|
||||
* 生产环境中也可通过定时任务自动触发。
|
||||
* <p>
|
||||
* <b>安全架构</b>:
|
||||
* <ul>
|
||||
* <li>外部请求 → API Gateway (JWT 校验) → X-User-* headers → 后端服务</li>
|
||||
* <li>内部调用 → X-Internal-Token header → {@code InternalTokenInterceptor} 校验 → sync 端点</li>
|
||||
* </ul>
|
||||
* Token 校验由 {@code InternalTokenInterceptor} 拦截器统一实现,
|
||||
* 对 {@code /knowledge-graph/{graphId}/sync/} 路径前缀自动生效。
|
||||
*/
|
||||
@RestController
|
||||
@RequestMapping("/knowledge-graph/{graphId}/sync")
|
||||
@RequiredArgsConstructor
|
||||
@Validated
|
||||
public class GraphSyncController {
|
||||
|
||||
private static final String UUID_REGEX =
|
||||
"^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$";
|
||||
|
||||
private final GraphSyncService syncService;
|
||||
|
||||
/**
|
||||
* 全量同步:拉取所有实体并构建关系。
|
||||
*/
|
||||
@PostMapping("/full")
|
||||
public SyncMetadataVO syncAll(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId) {
|
||||
SyncMetadata metadata = syncService.syncAll(graphId);
|
||||
return SyncMetadataVO.from(metadata);
|
||||
}
|
||||
|
||||
/**
|
||||
* 增量同步:仅拉取指定时间窗口内变更的数据并同步。
|
||||
*/
|
||||
@PostMapping("/incremental")
|
||||
public SyncMetadataVO syncIncremental(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId,
|
||||
@RequestParam @DateTimeFormat(iso = DateTimeFormat.ISO.DATE_TIME) LocalDateTime updatedFrom,
|
||||
@RequestParam @DateTimeFormat(iso = DateTimeFormat.ISO.DATE_TIME) LocalDateTime updatedTo) {
|
||||
SyncMetadata metadata = syncService.syncIncremental(graphId, updatedFrom, updatedTo);
|
||||
return SyncMetadataVO.from(metadata);
|
||||
}
|
||||
|
||||
/**
|
||||
* 同步数据集实体。
|
||||
*/
|
||||
@PostMapping("/datasets")
|
||||
public SyncResultVO syncDatasets(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId) {
|
||||
return SyncResultVO.from(syncService.syncDatasets(graphId));
|
||||
}
|
||||
|
||||
/**
|
||||
* 同步字段实体。
|
||||
*/
|
||||
@PostMapping("/fields")
|
||||
public SyncResultVO syncFields(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId) {
|
||||
return SyncResultVO.from(syncService.syncFields(graphId));
|
||||
}
|
||||
|
||||
/**
|
||||
* 同步用户实体。
|
||||
*/
|
||||
@PostMapping("/users")
|
||||
public SyncResultVO syncUsers(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId) {
|
||||
return SyncResultVO.from(syncService.syncUsers(graphId));
|
||||
}
|
||||
|
||||
/**
|
||||
* 同步组织实体。
|
||||
*/
|
||||
@PostMapping("/orgs")
|
||||
public SyncResultVO syncOrgs(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId) {
|
||||
return SyncResultVO.from(syncService.syncOrgs(graphId));
|
||||
}
|
||||
|
||||
/**
|
||||
* 构建 HAS_FIELD 关系。
|
||||
*/
|
||||
@PostMapping("/relations/has-field")
|
||||
public SyncResultVO buildHasFieldRelations(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId) {
|
||||
return SyncResultVO.from(syncService.buildHasFieldRelations(graphId));
|
||||
}
|
||||
|
||||
/**
|
||||
* 构建 DERIVED_FROM 关系。
|
||||
*/
|
||||
@PostMapping("/relations/derived-from")
|
||||
public SyncResultVO buildDerivedFromRelations(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId) {
|
||||
return SyncResultVO.from(syncService.buildDerivedFromRelations(graphId));
|
||||
}
|
||||
|
||||
/**
|
||||
* 构建 BELONGS_TO 关系。
|
||||
*/
|
||||
@PostMapping("/relations/belongs-to")
|
||||
public SyncResultVO buildBelongsToRelations(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId) {
|
||||
return SyncResultVO.from(syncService.buildBelongsToRelations(graphId));
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// 新增实体同步端点
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* 同步工作流实体。
|
||||
*/
|
||||
@PostMapping("/workflows")
|
||||
public SyncResultVO syncWorkflows(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId) {
|
||||
return SyncResultVO.from(syncService.syncWorkflows(graphId));
|
||||
}
|
||||
|
||||
/**
|
||||
* 同步作业实体。
|
||||
*/
|
||||
@PostMapping("/jobs")
|
||||
public SyncResultVO syncJobs(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId) {
|
||||
return SyncResultVO.from(syncService.syncJobs(graphId));
|
||||
}
|
||||
|
||||
/**
|
||||
* 同步标注任务实体。
|
||||
*/
|
||||
@PostMapping("/label-tasks")
|
||||
public SyncResultVO syncLabelTasks(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId) {
|
||||
return SyncResultVO.from(syncService.syncLabelTasks(graphId));
|
||||
}
|
||||
|
||||
/**
|
||||
* 同步知识集实体。
|
||||
*/
|
||||
@PostMapping("/knowledge-sets")
|
||||
public SyncResultVO syncKnowledgeSets(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId) {
|
||||
return SyncResultVO.from(syncService.syncKnowledgeSets(graphId));
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// 新增关系构建端点
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* 构建 USES_DATASET 关系。
|
||||
*/
|
||||
@PostMapping("/relations/uses-dataset")
|
||||
public SyncResultVO buildUsesDatasetRelations(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId) {
|
||||
return SyncResultVO.from(syncService.buildUsesDatasetRelations(graphId));
|
||||
}
|
||||
|
||||
/**
|
||||
* 构建 PRODUCES 关系。
|
||||
*/
|
||||
@PostMapping("/relations/produces")
|
||||
public SyncResultVO buildProducesRelations(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId) {
|
||||
return SyncResultVO.from(syncService.buildProducesRelations(graphId));
|
||||
}
|
||||
|
||||
/**
|
||||
* 构建 ASSIGNED_TO 关系。
|
||||
*/
|
||||
@PostMapping("/relations/assigned-to")
|
||||
public SyncResultVO buildAssignedToRelations(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId) {
|
||||
return SyncResultVO.from(syncService.buildAssignedToRelations(graphId));
|
||||
}
|
||||
|
||||
/**
|
||||
* 构建 TRIGGERS 关系。
|
||||
*/
|
||||
@PostMapping("/relations/triggers")
|
||||
public SyncResultVO buildTriggersRelations(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId) {
|
||||
return SyncResultVO.from(syncService.buildTriggersRelations(graphId));
|
||||
}
|
||||
|
||||
/**
|
||||
* 构建 DEPENDS_ON 关系。
|
||||
*/
|
||||
@PostMapping("/relations/depends-on")
|
||||
public SyncResultVO buildDependsOnRelations(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId) {
|
||||
return SyncResultVO.from(syncService.buildDependsOnRelations(graphId));
|
||||
}
|
||||
|
||||
/**
|
||||
* 构建 IMPACTS 关系。
|
||||
*/
|
||||
@PostMapping("/relations/impacts")
|
||||
public SyncResultVO buildImpactsRelations(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId) {
|
||||
return SyncResultVO.from(syncService.buildImpactsRelations(graphId));
|
||||
}
|
||||
|
||||
/**
|
||||
* 构建 SOURCED_FROM 关系。
|
||||
*/
|
||||
@PostMapping("/relations/sourced-from")
|
||||
public SyncResultVO buildSourcedFromRelations(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId) {
|
||||
return SyncResultVO.from(syncService.buildSourcedFromRelations(graphId));
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// 同步历史查询端点
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* 查询同步历史记录。
|
||||
*
|
||||
* @param status 可选,按状态过滤(SUCCESS / FAILED / PARTIAL)
|
||||
* @param limit 返回条数上限,默认 20
|
||||
*/
|
||||
@GetMapping("/history")
|
||||
public List<SyncMetadataVO> getSyncHistory(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId,
|
||||
@RequestParam(required = false) String status,
|
||||
@RequestParam(defaultValue = "20") @Min(1) @Max(200) int limit) {
|
||||
List<SyncMetadata> history = syncService.getSyncHistory(graphId, status, limit);
|
||||
return history.stream().map(SyncMetadataVO::from).toList();
|
||||
}
|
||||
|
||||
/**
|
||||
* 按时间范围查询同步历史。
|
||||
*/
|
||||
@GetMapping("/history/range")
|
||||
public List<SyncMetadataVO> getSyncHistoryByTimeRange(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId,
|
||||
@RequestParam @DateTimeFormat(iso = DateTimeFormat.ISO.DATE_TIME) LocalDateTime from,
|
||||
@RequestParam @DateTimeFormat(iso = DateTimeFormat.ISO.DATE_TIME) LocalDateTime to,
|
||||
@RequestParam(defaultValue = "0") @Min(0) @Max(10000) int page,
|
||||
@RequestParam(defaultValue = "20") @Min(1) @Max(200) int size) {
|
||||
List<SyncMetadata> history = syncService.getSyncHistoryByTimeRange(graphId, from, to, page, size);
|
||||
return history.stream().map(SyncMetadataVO::from).toList();
|
||||
}
|
||||
|
||||
/**
|
||||
* 根据 syncId 查询单条同步记录。
|
||||
*/
|
||||
@GetMapping("/history/{syncId}")
|
||||
public ResponseEntity<SyncMetadataVO> getSyncRecord(
|
||||
@PathVariable @Pattern(regexp = UUID_REGEX, message = "graphId 格式无效") String graphId,
|
||||
@PathVariable String syncId) {
|
||||
return syncService.getSyncRecord(graphId, syncId)
|
||||
.map(SyncMetadataVO::from)
|
||||
.map(ResponseEntity::ok)
|
||||
.orElse(ResponseEntity.notFound().build());
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,76 @@
|
||||
# 知识图谱服务 - Neo4j连接配置
|
||||
# 该配置在 main-application 的 spring.config.import 中引入
|
||||
# 注意:生产环境务必通过环境变量 NEO4J_PASSWORD 设置密码,不要使用默认值
|
||||
|
||||
spring:
|
||||
data:
|
||||
redis:
|
||||
host: ${REDIS_HOST:datamate-redis}
|
||||
port: ${REDIS_PORT:6379}
|
||||
password: ${REDIS_PASSWORD:}
|
||||
timeout: ${REDIS_TIMEOUT:3000}
|
||||
|
||||
neo4j:
|
||||
uri: ${NEO4J_URI:bolt://datamate-neo4j:7687}
|
||||
authentication:
|
||||
username: ${NEO4J_USERNAME:neo4j}
|
||||
password: ${NEO4J_PASSWORD:datamate123}
|
||||
pool:
|
||||
max-connection-pool-size: ${NEO4J_POOL_MAX_SIZE:50}
|
||||
connection-acquisition-timeout: 30s
|
||||
max-connection-lifetime: 1h
|
||||
log-leaked-sessions: true
|
||||
|
||||
# 知识图谱服务配置
|
||||
datamate:
|
||||
knowledge-graph:
|
||||
# 默认查询跳数限制
|
||||
max-depth: ${KG_MAX_DEPTH:3}
|
||||
# 子图返回最大节点数
|
||||
max-nodes-per-query: ${KG_MAX_NODES:500}
|
||||
# 批量导入批次大小
|
||||
import-batch-size: ${KG_IMPORT_BATCH_SIZE:100}
|
||||
# 安全配置
|
||||
security:
|
||||
# 内部服务调用 Token(用于 sync 端点的 X-Internal-Token 校验)
|
||||
# 生产环境务必通过 KG_INTERNAL_TOKEN 环境变量设置,否则 sync 端点将拒绝所有请求(fail-closed)
|
||||
internal-token: ${KG_INTERNAL_TOKEN:}
|
||||
# 是否跳过 Token 校验(默认 false = fail-closed)
|
||||
# 仅在 dev/test 环境显式设置为 true 以跳过校验
|
||||
skip-token-check: ${KG_SKIP_TOKEN_CHECK:false}
|
||||
# Schema 迁移配置
|
||||
migration:
|
||||
# 是否启用 Schema 版本化迁移
|
||||
enabled: ${KG_MIGRATION_ENABLED:true}
|
||||
# 是否校验已应用迁移的 checksum(防止迁移被篡改)
|
||||
validate-checksums: ${KG_MIGRATION_VALIDATE_CHECKSUMS:true}
|
||||
# MySQL → Neo4j 同步配置
|
||||
sync:
|
||||
# 数据管理服务地址
|
||||
data-management-url: ${DATA_MANAGEMENT_URL:http://localhost:8080/api}
|
||||
# 标注服务地址
|
||||
annotation-service-url: ${ANNOTATION_SERVICE_URL:http://localhost:8080/api}
|
||||
# 每页拉取数量
|
||||
page-size: ${KG_SYNC_PAGE_SIZE:200}
|
||||
# HTTP 连接超时(毫秒)
|
||||
connect-timeout: ${KG_SYNC_CONNECT_TIMEOUT:5000}
|
||||
# HTTP 读取超时(毫秒)
|
||||
read-timeout: ${KG_SYNC_READ_TIMEOUT:30000}
|
||||
# 失败时最大重试次数
|
||||
max-retries: ${KG_SYNC_MAX_RETRIES:3}
|
||||
# 重试间隔(毫秒)
|
||||
retry-interval: ${KG_SYNC_RETRY_INTERVAL:1000}
|
||||
# 是否在启动时自动初始化 Schema
|
||||
auto-init-schema: ${KG_AUTO_INIT_SCHEMA:true}
|
||||
# 是否允许空快照触发 purge(默认 false,防止上游返回空列表时误删全部同步实体)
|
||||
allow-purge-on-empty-snapshot: ${KG_ALLOW_PURGE_ON_EMPTY_SNAPSHOT:false}
|
||||
# 缓存配置
|
||||
cache:
|
||||
# 是否启用 Redis 缓存
|
||||
enabled: ${KG_CACHE_ENABLED:true}
|
||||
# 实体缓存 TTL(秒)
|
||||
entity-ttl-seconds: ${KG_CACHE_ENTITY_TTL:3600}
|
||||
# 查询结果缓存 TTL(秒)
|
||||
query-ttl-seconds: ${KG_CACHE_QUERY_TTL:300}
|
||||
# 全文搜索缓存 TTL(秒)
|
||||
search-ttl-seconds: ${KG_CACHE_SEARCH_TTL:180}
|
||||
@@ -0,0 +1,361 @@
|
||||
package com.datamate.knowledgegraph.application;
|
||||
|
||||
import com.datamate.common.infrastructure.exception.BusinessException;
|
||||
import com.datamate.knowledgegraph.domain.model.EditReview;
|
||||
import com.datamate.knowledgegraph.domain.repository.EditReviewRepository;
|
||||
import com.datamate.knowledgegraph.interfaces.dto.EditReviewVO;
|
||||
import com.datamate.knowledgegraph.interfaces.dto.SubmitReviewRequest;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.mockito.InjectMocks;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.assertj.core.api.Assertions.assertThatThrownBy;
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
import static org.mockito.ArgumentMatchers.eq;
|
||||
import static org.mockito.Mockito.*;
|
||||
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
class EditReviewServiceTest {
|
||||
|
||||
private static final String GRAPH_ID = "550e8400-e29b-41d4-a716-446655440000";
|
||||
private static final String REVIEW_ID = "660e8400-e29b-41d4-a716-446655440001";
|
||||
private static final String ENTITY_ID = "770e8400-e29b-41d4-a716-446655440002";
|
||||
private static final String USER_ID = "user-1";
|
||||
private static final String REVIEWER_ID = "reviewer-1";
|
||||
private static final String INVALID_GRAPH_ID = "not-a-uuid";
|
||||
|
||||
@Mock
|
||||
private EditReviewRepository reviewRepository;
|
||||
|
||||
@Mock
|
||||
private GraphEntityService entityService;
|
||||
|
||||
@Mock
|
||||
private GraphRelationService relationService;
|
||||
|
||||
@InjectMocks
|
||||
private EditReviewService reviewService;
|
||||
|
||||
private EditReview pendingReview;
|
||||
|
||||
@BeforeEach
|
||||
void setUp() {
|
||||
pendingReview = EditReview.builder()
|
||||
.id(REVIEW_ID)
|
||||
.graphId(GRAPH_ID)
|
||||
.operationType("CREATE_ENTITY")
|
||||
.payload("{\"name\":\"TestEntity\",\"type\":\"Dataset\"}")
|
||||
.status("PENDING")
|
||||
.submittedBy(USER_ID)
|
||||
.createdAt(LocalDateTime.now())
|
||||
.build();
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// graphId 校验
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Test
|
||||
void submitReview_invalidGraphId_throwsBusinessException() {
|
||||
SubmitReviewRequest request = new SubmitReviewRequest();
|
||||
request.setOperationType("CREATE_ENTITY");
|
||||
request.setPayload("{}");
|
||||
|
||||
assertThatThrownBy(() -> reviewService.submitReview(INVALID_GRAPH_ID, request, USER_ID))
|
||||
.isInstanceOf(BusinessException.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
void approveReview_invalidGraphId_throwsBusinessException() {
|
||||
assertThatThrownBy(() -> reviewService.approveReview(INVALID_GRAPH_ID, REVIEW_ID, REVIEWER_ID, null))
|
||||
.isInstanceOf(BusinessException.class);
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// submitReview
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Test
|
||||
void submitReview_success() {
|
||||
SubmitReviewRequest request = new SubmitReviewRequest();
|
||||
request.setOperationType("CREATE_ENTITY");
|
||||
request.setPayload("{\"name\":\"NewEntity\",\"type\":\"Dataset\"}");
|
||||
|
||||
when(reviewRepository.save(any(EditReview.class))).thenReturn(pendingReview);
|
||||
|
||||
EditReviewVO result = reviewService.submitReview(GRAPH_ID, request, USER_ID);
|
||||
|
||||
assertThat(result).isNotNull();
|
||||
assertThat(result.getStatus()).isEqualTo("PENDING");
|
||||
assertThat(result.getOperationType()).isEqualTo("CREATE_ENTITY");
|
||||
verify(reviewRepository).save(any(EditReview.class));
|
||||
}
|
||||
|
||||
@Test
|
||||
void submitReview_withEntityId() {
|
||||
SubmitReviewRequest request = new SubmitReviewRequest();
|
||||
request.setOperationType("UPDATE_ENTITY");
|
||||
request.setEntityId(ENTITY_ID);
|
||||
request.setPayload("{\"name\":\"Updated\"}");
|
||||
|
||||
EditReview savedReview = EditReview.builder()
|
||||
.id(REVIEW_ID)
|
||||
.graphId(GRAPH_ID)
|
||||
.operationType("UPDATE_ENTITY")
|
||||
.entityId(ENTITY_ID)
|
||||
.payload("{\"name\":\"Updated\"}")
|
||||
.status("PENDING")
|
||||
.submittedBy(USER_ID)
|
||||
.createdAt(LocalDateTime.now())
|
||||
.build();
|
||||
|
||||
when(reviewRepository.save(any(EditReview.class))).thenReturn(savedReview);
|
||||
|
||||
EditReviewVO result = reviewService.submitReview(GRAPH_ID, request, USER_ID);
|
||||
|
||||
assertThat(result.getEntityId()).isEqualTo(ENTITY_ID);
|
||||
assertThat(result.getOperationType()).isEqualTo("UPDATE_ENTITY");
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// approveReview
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Test
|
||||
void approveReview_success_appliesChange() {
|
||||
when(reviewRepository.findById(REVIEW_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(pendingReview));
|
||||
when(reviewRepository.save(any(EditReview.class))).thenReturn(pendingReview);
|
||||
|
||||
EditReviewVO result = reviewService.approveReview(GRAPH_ID, REVIEW_ID, REVIEWER_ID, "LGTM");
|
||||
|
||||
assertThat(result).isNotNull();
|
||||
assertThat(pendingReview.getStatus()).isEqualTo("APPROVED");
|
||||
assertThat(pendingReview.getReviewedBy()).isEqualTo(REVIEWER_ID);
|
||||
assertThat(pendingReview.getReviewComment()).isEqualTo("LGTM");
|
||||
assertThat(pendingReview.getReviewedAt()).isNotNull();
|
||||
|
||||
// Verify applyChange was called (createEntity for CREATE_ENTITY)
|
||||
verify(entityService).createEntity(eq(GRAPH_ID), any());
|
||||
}
|
||||
|
||||
@Test
|
||||
void approveReview_notFound_throwsBusinessException() {
|
||||
when(reviewRepository.findById(REVIEW_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.empty());
|
||||
|
||||
assertThatThrownBy(() -> reviewService.approveReview(GRAPH_ID, REVIEW_ID, REVIEWER_ID, null))
|
||||
.isInstanceOf(BusinessException.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
void approveReview_alreadyProcessed_throwsBusinessException() {
|
||||
pendingReview.setStatus("APPROVED");
|
||||
|
||||
when(reviewRepository.findById(REVIEW_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(pendingReview));
|
||||
|
||||
assertThatThrownBy(() -> reviewService.approveReview(GRAPH_ID, REVIEW_ID, REVIEWER_ID, null))
|
||||
.isInstanceOf(BusinessException.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
void approveReview_deleteEntity_appliesChange() {
|
||||
pendingReview.setOperationType("DELETE_ENTITY");
|
||||
pendingReview.setEntityId(ENTITY_ID);
|
||||
pendingReview.setPayload(null);
|
||||
|
||||
when(reviewRepository.findById(REVIEW_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(pendingReview));
|
||||
when(reviewRepository.save(any(EditReview.class))).thenReturn(pendingReview);
|
||||
|
||||
reviewService.approveReview(GRAPH_ID, REVIEW_ID, REVIEWER_ID, null);
|
||||
|
||||
verify(entityService).deleteEntity(GRAPH_ID, ENTITY_ID);
|
||||
}
|
||||
|
||||
@Test
|
||||
void approveReview_updateEntity_appliesChange() {
|
||||
pendingReview.setOperationType("UPDATE_ENTITY");
|
||||
pendingReview.setEntityId(ENTITY_ID);
|
||||
pendingReview.setPayload("{\"name\":\"Updated\"}");
|
||||
|
||||
when(reviewRepository.findById(REVIEW_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(pendingReview));
|
||||
when(reviewRepository.save(any(EditReview.class))).thenReturn(pendingReview);
|
||||
|
||||
reviewService.approveReview(GRAPH_ID, REVIEW_ID, REVIEWER_ID, null);
|
||||
|
||||
verify(entityService).updateEntity(eq(GRAPH_ID), eq(ENTITY_ID), any());
|
||||
}
|
||||
|
||||
@Test
|
||||
void approveReview_createRelation_appliesChange() {
|
||||
pendingReview.setOperationType("CREATE_RELATION");
|
||||
pendingReview.setPayload("{\"sourceEntityId\":\"a\",\"targetEntityId\":\"b\",\"relationType\":\"HAS_FIELD\"}");
|
||||
|
||||
when(reviewRepository.findById(REVIEW_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(pendingReview));
|
||||
when(reviewRepository.save(any(EditReview.class))).thenReturn(pendingReview);
|
||||
|
||||
reviewService.approveReview(GRAPH_ID, REVIEW_ID, REVIEWER_ID, null);
|
||||
|
||||
verify(relationService).createRelation(eq(GRAPH_ID), any());
|
||||
}
|
||||
|
||||
@Test
|
||||
void approveReview_invalidPayload_throwsBusinessException() {
|
||||
pendingReview.setOperationType("CREATE_ENTITY");
|
||||
pendingReview.setPayload("not valid json {{");
|
||||
|
||||
when(reviewRepository.findById(REVIEW_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(pendingReview));
|
||||
|
||||
assertThatThrownBy(() -> reviewService.approveReview(GRAPH_ID, REVIEW_ID, REVIEWER_ID, null))
|
||||
.isInstanceOf(BusinessException.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
void approveReview_batchDeleteEntity_appliesChange() {
|
||||
pendingReview.setOperationType("BATCH_DELETE_ENTITY");
|
||||
pendingReview.setPayload("{\"ids\":[\"id-1\",\"id-2\",\"id-3\"]}");
|
||||
|
||||
when(reviewRepository.findById(REVIEW_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(pendingReview));
|
||||
when(reviewRepository.save(any(EditReview.class))).thenReturn(pendingReview);
|
||||
|
||||
reviewService.approveReview(GRAPH_ID, REVIEW_ID, REVIEWER_ID, null);
|
||||
|
||||
verify(entityService).batchDeleteEntities(eq(GRAPH_ID), eq(List.of("id-1", "id-2", "id-3")));
|
||||
}
|
||||
|
||||
@Test
|
||||
void approveReview_batchDeleteRelation_appliesChange() {
|
||||
pendingReview.setOperationType("BATCH_DELETE_RELATION");
|
||||
pendingReview.setPayload("{\"ids\":[\"rel-1\",\"rel-2\"]}");
|
||||
|
||||
when(reviewRepository.findById(REVIEW_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(pendingReview));
|
||||
when(reviewRepository.save(any(EditReview.class))).thenReturn(pendingReview);
|
||||
|
||||
reviewService.approveReview(GRAPH_ID, REVIEW_ID, REVIEWER_ID, null);
|
||||
|
||||
verify(relationService).batchDeleteRelations(eq(GRAPH_ID), eq(List.of("rel-1", "rel-2")));
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// rejectReview
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Test
|
||||
void rejectReview_success() {
|
||||
when(reviewRepository.findById(REVIEW_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(pendingReview));
|
||||
when(reviewRepository.save(any(EditReview.class))).thenReturn(pendingReview);
|
||||
|
||||
EditReviewVO result = reviewService.rejectReview(GRAPH_ID, REVIEW_ID, REVIEWER_ID, "不合适");
|
||||
|
||||
assertThat(result).isNotNull();
|
||||
assertThat(pendingReview.getStatus()).isEqualTo("REJECTED");
|
||||
assertThat(pendingReview.getReviewedBy()).isEqualTo(REVIEWER_ID);
|
||||
assertThat(pendingReview.getReviewComment()).isEqualTo("不合适");
|
||||
assertThat(pendingReview.getReviewedAt()).isNotNull();
|
||||
|
||||
// Verify no change was applied
|
||||
verifyNoInteractions(entityService);
|
||||
verifyNoInteractions(relationService);
|
||||
}
|
||||
|
||||
@Test
|
||||
void rejectReview_notFound_throwsBusinessException() {
|
||||
when(reviewRepository.findById(REVIEW_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.empty());
|
||||
|
||||
assertThatThrownBy(() -> reviewService.rejectReview(GRAPH_ID, REVIEW_ID, REVIEWER_ID, null))
|
||||
.isInstanceOf(BusinessException.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
void rejectReview_alreadyProcessed_throwsBusinessException() {
|
||||
pendingReview.setStatus("REJECTED");
|
||||
|
||||
when(reviewRepository.findById(REVIEW_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(pendingReview));
|
||||
|
||||
assertThatThrownBy(() -> reviewService.rejectReview(GRAPH_ID, REVIEW_ID, REVIEWER_ID, null))
|
||||
.isInstanceOf(BusinessException.class);
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// listPendingReviews
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Test
|
||||
void listPendingReviews_returnsPagedResult() {
|
||||
when(reviewRepository.findPendingByGraphId(GRAPH_ID, 0L, 20))
|
||||
.thenReturn(List.of(pendingReview));
|
||||
when(reviewRepository.countPendingByGraphId(GRAPH_ID)).thenReturn(1L);
|
||||
|
||||
var result = reviewService.listPendingReviews(GRAPH_ID, 0, 20);
|
||||
|
||||
assertThat(result.getContent()).hasSize(1);
|
||||
assertThat(result.getTotalElements()).isEqualTo(1);
|
||||
}
|
||||
|
||||
@Test
|
||||
void listPendingReviews_clampsPageSize() {
|
||||
when(reviewRepository.findPendingByGraphId(GRAPH_ID, 0L, 200))
|
||||
.thenReturn(List.of());
|
||||
when(reviewRepository.countPendingByGraphId(GRAPH_ID)).thenReturn(0L);
|
||||
|
||||
reviewService.listPendingReviews(GRAPH_ID, 0, 999);
|
||||
|
||||
verify(reviewRepository).findPendingByGraphId(GRAPH_ID, 0L, 200);
|
||||
}
|
||||
|
||||
@Test
|
||||
void listPendingReviews_negativePage_clampedToZero() {
|
||||
when(reviewRepository.findPendingByGraphId(GRAPH_ID, 0L, 20))
|
||||
.thenReturn(List.of());
|
||||
when(reviewRepository.countPendingByGraphId(GRAPH_ID)).thenReturn(0L);
|
||||
|
||||
var result = reviewService.listPendingReviews(GRAPH_ID, -1, 20);
|
||||
|
||||
assertThat(result.getPage()).isEqualTo(0);
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// listReviews
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Test
|
||||
void listReviews_withStatusFilter() {
|
||||
when(reviewRepository.findByGraphId(GRAPH_ID, "APPROVED", 0L, 20))
|
||||
.thenReturn(List.of());
|
||||
when(reviewRepository.countByGraphId(GRAPH_ID, "APPROVED")).thenReturn(0L);
|
||||
|
||||
var result = reviewService.listReviews(GRAPH_ID, "APPROVED", 0, 20);
|
||||
|
||||
assertThat(result.getContent()).isEmpty();
|
||||
verify(reviewRepository).findByGraphId(GRAPH_ID, "APPROVED", 0L, 20);
|
||||
}
|
||||
|
||||
@Test
|
||||
void listReviews_withoutStatusFilter() {
|
||||
when(reviewRepository.findByGraphId(GRAPH_ID, null, 0L, 20))
|
||||
.thenReturn(List.of(pendingReview));
|
||||
when(reviewRepository.countByGraphId(GRAPH_ID, null)).thenReturn(1L);
|
||||
|
||||
var result = reviewService.listReviews(GRAPH_ID, null, 0, 20);
|
||||
|
||||
assertThat(result.getContent()).hasSize(1);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,243 @@
|
||||
package com.datamate.knowledgegraph.application;
|
||||
|
||||
import com.datamate.common.infrastructure.exception.BusinessException;
|
||||
import com.datamate.knowledgegraph.domain.model.GraphEntity;
|
||||
import com.datamate.knowledgegraph.domain.repository.GraphEntityRepository;
|
||||
import com.datamate.knowledgegraph.infrastructure.cache.GraphCacheService;
|
||||
import com.datamate.knowledgegraph.infrastructure.neo4j.KnowledgeGraphProperties;
|
||||
import com.datamate.knowledgegraph.interfaces.dto.CreateEntityRequest;
|
||||
import com.datamate.knowledgegraph.interfaces.dto.UpdateEntityRequest;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.mockito.InjectMocks;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.assertj.core.api.Assertions.assertThatThrownBy;
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
import static org.mockito.ArgumentMatchers.eq;
|
||||
import static org.mockito.Mockito.*;
|
||||
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
class GraphEntityServiceTest {
|
||||
|
||||
private static final String GRAPH_ID = "550e8400-e29b-41d4-a716-446655440000";
|
||||
private static final String ENTITY_ID = "660e8400-e29b-41d4-a716-446655440001";
|
||||
private static final String INVALID_GRAPH_ID = "not-a-uuid";
|
||||
|
||||
@Mock
|
||||
private GraphEntityRepository entityRepository;
|
||||
|
||||
@Mock
|
||||
private KnowledgeGraphProperties properties;
|
||||
|
||||
@Mock
|
||||
private GraphCacheService cacheService;
|
||||
|
||||
@InjectMocks
|
||||
private GraphEntityService entityService;
|
||||
|
||||
private GraphEntity sampleEntity;
|
||||
|
||||
@BeforeEach
|
||||
void setUp() {
|
||||
sampleEntity = GraphEntity.builder()
|
||||
.id(ENTITY_ID)
|
||||
.name("TestDataset")
|
||||
.type("Dataset")
|
||||
.description("A test dataset")
|
||||
.graphId(GRAPH_ID)
|
||||
.confidence(1.0)
|
||||
.createdAt(LocalDateTime.now())
|
||||
.updatedAt(LocalDateTime.now())
|
||||
.build();
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// graphId 校验
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Test
|
||||
void getEntity_invalidGraphId_throwsBusinessException() {
|
||||
assertThatThrownBy(() -> entityService.getEntity(INVALID_GRAPH_ID, ENTITY_ID))
|
||||
.isInstanceOf(BusinessException.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
void getEntity_nullGraphId_throwsBusinessException() {
|
||||
assertThatThrownBy(() -> entityService.getEntity(null, ENTITY_ID))
|
||||
.isInstanceOf(BusinessException.class);
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// createEntity
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Test
|
||||
void createEntity_success() {
|
||||
CreateEntityRequest request = new CreateEntityRequest();
|
||||
request.setName("NewEntity");
|
||||
request.setType("Dataset");
|
||||
request.setDescription("Desc");
|
||||
|
||||
when(entityRepository.save(any(GraphEntity.class))).thenReturn(sampleEntity);
|
||||
|
||||
GraphEntity result = entityService.createEntity(GRAPH_ID, request);
|
||||
|
||||
assertThat(result).isNotNull();
|
||||
assertThat(result.getName()).isEqualTo("TestDataset");
|
||||
verify(entityRepository).save(any(GraphEntity.class));
|
||||
verify(cacheService).evictEntityCaches(GRAPH_ID, ENTITY_ID);
|
||||
verify(cacheService).evictSearchCaches(GRAPH_ID);
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// getEntity
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Test
|
||||
void getEntity_found() {
|
||||
when(entityRepository.findByIdAndGraphId(ENTITY_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(sampleEntity));
|
||||
|
||||
GraphEntity result = entityService.getEntity(GRAPH_ID, ENTITY_ID);
|
||||
|
||||
assertThat(result.getId()).isEqualTo(ENTITY_ID);
|
||||
assertThat(result.getName()).isEqualTo("TestDataset");
|
||||
}
|
||||
|
||||
@Test
|
||||
void getEntity_notFound_throwsBusinessException() {
|
||||
when(entityRepository.findByIdAndGraphId(ENTITY_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.empty());
|
||||
|
||||
assertThatThrownBy(() -> entityService.getEntity(GRAPH_ID, ENTITY_ID))
|
||||
.isInstanceOf(BusinessException.class);
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// listEntities
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Test
|
||||
void listEntities_returnsAll() {
|
||||
when(entityRepository.findByGraphId(GRAPH_ID))
|
||||
.thenReturn(List.of(sampleEntity));
|
||||
|
||||
List<GraphEntity> results = entityService.listEntities(GRAPH_ID);
|
||||
|
||||
assertThat(results).hasSize(1);
|
||||
assertThat(results.get(0).getName()).isEqualTo("TestDataset");
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// updateEntity
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Test
|
||||
void updateEntity_partialUpdate_onlyChangesProvidedFields() {
|
||||
when(entityRepository.findByIdAndGraphId(ENTITY_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(sampleEntity));
|
||||
when(entityRepository.save(any(GraphEntity.class)))
|
||||
.thenAnswer(inv -> inv.getArgument(0));
|
||||
|
||||
UpdateEntityRequest request = new UpdateEntityRequest();
|
||||
request.setName("UpdatedName");
|
||||
// description not set — should remain unchanged
|
||||
|
||||
GraphEntity result = entityService.updateEntity(GRAPH_ID, ENTITY_ID, request);
|
||||
|
||||
assertThat(result.getName()).isEqualTo("UpdatedName");
|
||||
assertThat(result.getDescription()).isEqualTo("A test dataset");
|
||||
verify(cacheService).evictEntityCaches(GRAPH_ID, ENTITY_ID);
|
||||
verify(cacheService).evictSearchCaches(GRAPH_ID);
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// deleteEntity
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Test
|
||||
void deleteEntity_success() {
|
||||
when(entityRepository.findByIdAndGraphId(ENTITY_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(sampleEntity));
|
||||
|
||||
entityService.deleteEntity(GRAPH_ID, ENTITY_ID);
|
||||
|
||||
verify(entityRepository).delete(sampleEntity);
|
||||
verify(cacheService).evictEntityCaches(GRAPH_ID, ENTITY_ID);
|
||||
verify(cacheService).evictSearchCaches(GRAPH_ID);
|
||||
}
|
||||
|
||||
@Test
|
||||
void deleteEntity_notFound_throwsBusinessException() {
|
||||
when(entityRepository.findByIdAndGraphId(ENTITY_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.empty());
|
||||
|
||||
assertThatThrownBy(() -> entityService.deleteEntity(GRAPH_ID, ENTITY_ID))
|
||||
.isInstanceOf(BusinessException.class);
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// getNeighbors — 深度/限制 clamping
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Test
|
||||
void getNeighbors_clampsDepthAndLimit() {
|
||||
when(properties.getMaxDepth()).thenReturn(3);
|
||||
when(properties.getMaxNodesPerQuery()).thenReturn(500);
|
||||
when(entityRepository.findNeighbors(eq(GRAPH_ID), eq(ENTITY_ID), eq(3), eq(500)))
|
||||
.thenReturn(List.of());
|
||||
|
||||
List<GraphEntity> result = entityService.getNeighbors(GRAPH_ID, ENTITY_ID, 100, 99999);
|
||||
|
||||
assertThat(result).isEmpty();
|
||||
// depth clamped to maxDepth=3, limit clamped to maxNodesPerQuery=500
|
||||
verify(entityRepository).findNeighbors(GRAPH_ID, ENTITY_ID, 3, 500);
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// 分页
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Test
|
||||
void listEntitiesPaged_normalPage() {
|
||||
when(entityRepository.findByGraphIdPaged(GRAPH_ID, 0L, 20))
|
||||
.thenReturn(List.of(sampleEntity));
|
||||
when(entityRepository.countByGraphId(GRAPH_ID)).thenReturn(1L);
|
||||
|
||||
var result = entityService.listEntitiesPaged(GRAPH_ID, 0, 20);
|
||||
|
||||
assertThat(result.getContent()).hasSize(1);
|
||||
assertThat(result.getTotalElements()).isEqualTo(1);
|
||||
}
|
||||
|
||||
@Test
|
||||
void listEntitiesPaged_negativePage_clampedToZero() {
|
||||
when(entityRepository.findByGraphIdPaged(GRAPH_ID, 0L, 20))
|
||||
.thenReturn(List.of());
|
||||
when(entityRepository.countByGraphId(GRAPH_ID)).thenReturn(0L);
|
||||
|
||||
var result = entityService.listEntitiesPaged(GRAPH_ID, -1, 20);
|
||||
|
||||
assertThat(result.getPage()).isEqualTo(0);
|
||||
}
|
||||
|
||||
@Test
|
||||
void listEntitiesPaged_oversizedPage_clampedTo200() {
|
||||
when(entityRepository.findByGraphIdPaged(GRAPH_ID, 0L, 200))
|
||||
.thenReturn(List.of());
|
||||
when(entityRepository.countByGraphId(GRAPH_ID)).thenReturn(0L);
|
||||
|
||||
entityService.listEntitiesPaged(GRAPH_ID, 0, 999);
|
||||
|
||||
verify(entityRepository).findByGraphIdPaged(GRAPH_ID, 0L, 200);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,894 @@
|
||||
package com.datamate.knowledgegraph.application;
|
||||
|
||||
import com.datamate.common.auth.application.ResourceAccessService;
|
||||
import com.datamate.common.infrastructure.exception.BusinessException;
|
||||
import com.datamate.knowledgegraph.domain.model.GraphEntity;
|
||||
import com.datamate.knowledgegraph.domain.repository.GraphEntityRepository;
|
||||
import com.datamate.knowledgegraph.infrastructure.neo4j.KnowledgeGraphProperties;
|
||||
import com.datamate.knowledgegraph.interfaces.dto.AllPathsVO;
|
||||
import com.datamate.knowledgegraph.interfaces.dto.SubgraphExportVO;
|
||||
import com.datamate.knowledgegraph.interfaces.dto.SubgraphVO;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Nested;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.mockito.InjectMocks;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
import org.neo4j.driver.Driver;
|
||||
import org.springframework.data.neo4j.core.Neo4jClient;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.assertj.core.api.Assertions.assertThatThrownBy;
|
||||
import static org.mockito.ArgumentMatchers.*;
|
||||
import static org.mockito.Mockito.*;
|
||||
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
class GraphQueryServiceTest {
|
||||
|
||||
private static final String GRAPH_ID = "550e8400-e29b-41d4-a716-446655440000";
|
||||
private static final String ENTITY_ID = "660e8400-e29b-41d4-a716-446655440001";
|
||||
private static final String ENTITY_ID_2 = "660e8400-e29b-41d4-a716-446655440002";
|
||||
private static final String INVALID_GRAPH_ID = "bad-id";
|
||||
|
||||
@Mock
|
||||
private Neo4jClient neo4jClient;
|
||||
|
||||
@Mock
|
||||
private Driver neo4jDriver;
|
||||
|
||||
@Mock
|
||||
private GraphEntityRepository entityRepository;
|
||||
|
||||
@Mock
|
||||
private KnowledgeGraphProperties properties;
|
||||
|
||||
@Mock
|
||||
private ResourceAccessService resourceAccessService;
|
||||
|
||||
@InjectMocks
|
||||
private GraphQueryService queryService;
|
||||
|
||||
@BeforeEach
|
||||
void setUp() {
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// graphId 校验
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Test
|
||||
void getNeighborGraph_invalidGraphId_throwsBusinessException() {
|
||||
assertThatThrownBy(() -> queryService.getNeighborGraph(INVALID_GRAPH_ID, ENTITY_ID, 2, 50))
|
||||
.isInstanceOf(BusinessException.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
void getShortestPath_invalidGraphId_throwsBusinessException() {
|
||||
assertThatThrownBy(() -> queryService.getShortestPath(INVALID_GRAPH_ID, ENTITY_ID, ENTITY_ID_2, 3))
|
||||
.isInstanceOf(BusinessException.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
void getSubgraph_invalidGraphId_throwsBusinessException() {
|
||||
assertThatThrownBy(() -> queryService.getSubgraph(INVALID_GRAPH_ID, List.of(ENTITY_ID)))
|
||||
.isInstanceOf(BusinessException.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
void fulltextSearch_invalidGraphId_throwsBusinessException() {
|
||||
assertThatThrownBy(() -> queryService.fulltextSearch(INVALID_GRAPH_ID, "test", 0, 20))
|
||||
.isInstanceOf(BusinessException.class);
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// getNeighborGraph — 实体不存在
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Test
|
||||
void getNeighborGraph_entityNotFound_throwsBusinessException() {
|
||||
when(entityRepository.findByIdAndGraphId(ENTITY_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.empty());
|
||||
|
||||
assertThatThrownBy(() -> queryService.getNeighborGraph(GRAPH_ID, ENTITY_ID, 2, 50))
|
||||
.isInstanceOf(BusinessException.class);
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// getShortestPath — 起止相同
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Test
|
||||
void getShortestPath_sameSourceAndTarget_returnsSingleNode() {
|
||||
GraphEntity entity = GraphEntity.builder()
|
||||
.id(ENTITY_ID).name("Node").type("Dataset").graphId(GRAPH_ID).build();
|
||||
when(entityRepository.findByIdAndGraphId(ENTITY_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(entity));
|
||||
|
||||
var result = queryService.getShortestPath(GRAPH_ID, ENTITY_ID, ENTITY_ID, 3);
|
||||
|
||||
assertThat(result.getPathLength()).isEqualTo(0);
|
||||
assertThat(result.getNodes()).hasSize(1);
|
||||
assertThat(result.getEdges()).isEmpty();
|
||||
}
|
||||
|
||||
@Test
|
||||
void getShortestPath_sourceNotFound_throwsBusinessException() {
|
||||
when(entityRepository.findByIdAndGraphId(ENTITY_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.empty());
|
||||
|
||||
assertThatThrownBy(() -> queryService.getShortestPath(GRAPH_ID, ENTITY_ID, ENTITY_ID_2, 3))
|
||||
.isInstanceOf(BusinessException.class);
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// getSubgraph — 空输入
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Test
|
||||
void getSubgraph_nullEntityIds_returnsEmptySubgraph() {
|
||||
SubgraphVO result = queryService.getSubgraph(GRAPH_ID, null);
|
||||
|
||||
assertThat(result.getNodes()).isEmpty();
|
||||
assertThat(result.getEdges()).isEmpty();
|
||||
assertThat(result.getNodeCount()).isEqualTo(0);
|
||||
}
|
||||
|
||||
@Test
|
||||
void getSubgraph_emptyEntityIds_returnsEmptySubgraph() {
|
||||
SubgraphVO result = queryService.getSubgraph(GRAPH_ID, List.of());
|
||||
|
||||
assertThat(result.getNodes()).isEmpty();
|
||||
assertThat(result.getEdges()).isEmpty();
|
||||
}
|
||||
|
||||
@Test
|
||||
void getSubgraph_exceedsMaxNodes_throwsBusinessException() {
|
||||
when(properties.getMaxNodesPerQuery()).thenReturn(5);
|
||||
|
||||
List<String> tooManyIds = List.of("1", "2", "3", "4", "5", "6");
|
||||
|
||||
assertThatThrownBy(() -> queryService.getSubgraph(GRAPH_ID, tooManyIds))
|
||||
.isInstanceOf(BusinessException.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
void getSubgraph_noExistingEntities_returnsEmptySubgraph() {
|
||||
when(properties.getMaxNodesPerQuery()).thenReturn(500);
|
||||
when(entityRepository.findByGraphIdAndIdIn(GRAPH_ID, List.of(ENTITY_ID)))
|
||||
.thenReturn(List.of());
|
||||
|
||||
SubgraphVO result = queryService.getSubgraph(GRAPH_ID, List.of(ENTITY_ID));
|
||||
|
||||
assertThat(result.getNodes()).isEmpty();
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// fulltextSearch — 空查询
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Test
|
||||
void fulltextSearch_blankQuery_returnsEmpty() {
|
||||
var result = queryService.fulltextSearch(GRAPH_ID, "", 0, 20);
|
||||
|
||||
assertThat(result.getContent()).isEmpty();
|
||||
assertThat(result.getTotalElements()).isEqualTo(0);
|
||||
verifyNoInteractions(neo4jClient);
|
||||
}
|
||||
|
||||
@Test
|
||||
void fulltextSearch_nullQuery_returnsEmpty() {
|
||||
var result = queryService.fulltextSearch(GRAPH_ID, null, 0, 20);
|
||||
|
||||
assertThat(result.getContent()).isEmpty();
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// 权限过滤
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Nested
|
||||
class PermissionFilteringTest {
|
||||
|
||||
private static final String CURRENT_USER_ID = "user-123";
|
||||
private static final String OTHER_USER_ID = "other-user";
|
||||
|
||||
// -- getNeighborGraph 权限 --
|
||||
|
||||
@Test
|
||||
void getNeighborGraph_nonAdmin_otherEntity_throwsInsufficientPermissions() {
|
||||
when(resourceAccessService.resolveOwnerFilterUserId()).thenReturn(CURRENT_USER_ID);
|
||||
|
||||
GraphEntity entity = GraphEntity.builder()
|
||||
.id(ENTITY_ID).name("Other's Dataset").type("Dataset").graphId(GRAPH_ID)
|
||||
.properties(new HashMap<>(Map.of("created_by", OTHER_USER_ID)))
|
||||
.build();
|
||||
when(entityRepository.findByIdAndGraphId(ENTITY_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(entity));
|
||||
|
||||
assertThatThrownBy(() -> queryService.getNeighborGraph(GRAPH_ID, ENTITY_ID, 2, 50))
|
||||
.isInstanceOf(BusinessException.class);
|
||||
|
||||
verifyNoInteractions(neo4jClient);
|
||||
}
|
||||
|
||||
@Test
|
||||
void getNeighborGraph_admin_otherEntity_noPermissionDenied() {
|
||||
// 管理员返回 null → 不过滤
|
||||
when(resourceAccessService.resolveOwnerFilterUserId()).thenReturn(null);
|
||||
|
||||
GraphEntity entity = GraphEntity.builder()
|
||||
.id(ENTITY_ID).name("Other's Dataset").type("Dataset").graphId(GRAPH_ID)
|
||||
.properties(new HashMap<>(Map.of("created_by", OTHER_USER_ID)))
|
||||
.build();
|
||||
when(entityRepository.findByIdAndGraphId(ENTITY_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(entity));
|
||||
when(properties.getMaxDepth()).thenReturn(3);
|
||||
when(properties.getMaxNodesPerQuery()).thenReturn(500);
|
||||
|
||||
// 管理员不会被权限拦截,会继续到 Neo4jClient 调用
|
||||
// 由于 Neo4jClient 未完全 mock,会抛出其他异常,不是 BusinessException
|
||||
try {
|
||||
queryService.getNeighborGraph(GRAPH_ID, ENTITY_ID, 2, 50);
|
||||
} catch (BusinessException e) {
|
||||
throw new AssertionError("Admin should not be blocked by permission check", e);
|
||||
} catch (Exception ignored) {
|
||||
// Neo4jClient mock chain 未完成,预期其他异常
|
||||
}
|
||||
}
|
||||
|
||||
// -- getShortestPath 权限 --
|
||||
|
||||
@Test
|
||||
void getShortestPath_nonAdmin_sourceNotAccessible_throws() {
|
||||
when(resourceAccessService.resolveOwnerFilterUserId()).thenReturn(CURRENT_USER_ID);
|
||||
|
||||
GraphEntity sourceEntity = GraphEntity.builder()
|
||||
.id(ENTITY_ID).name("Other's Dataset").type("Dataset").graphId(GRAPH_ID)
|
||||
.properties(new HashMap<>(Map.of("created_by", OTHER_USER_ID)))
|
||||
.build();
|
||||
when(entityRepository.findByIdAndGraphId(ENTITY_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(sourceEntity));
|
||||
|
||||
assertThatThrownBy(() -> queryService.getShortestPath(GRAPH_ID, ENTITY_ID, ENTITY_ID_2, 3))
|
||||
.isInstanceOf(BusinessException.class);
|
||||
|
||||
verifyNoInteractions(neo4jClient);
|
||||
}
|
||||
|
||||
@Test
|
||||
void getShortestPath_nonAdmin_targetNotAccessible_throws() {
|
||||
when(resourceAccessService.resolveOwnerFilterUserId()).thenReturn(CURRENT_USER_ID);
|
||||
|
||||
GraphEntity sourceEntity = GraphEntity.builder()
|
||||
.id(ENTITY_ID).name("My Dataset").type("Dataset").graphId(GRAPH_ID)
|
||||
.properties(new HashMap<>(Map.of("created_by", CURRENT_USER_ID)))
|
||||
.build();
|
||||
GraphEntity targetEntity = GraphEntity.builder()
|
||||
.id(ENTITY_ID_2).name("Other's Dataset").type("Dataset").graphId(GRAPH_ID)
|
||||
.properties(new HashMap<>(Map.of("created_by", OTHER_USER_ID)))
|
||||
.build();
|
||||
when(entityRepository.findByIdAndGraphId(ENTITY_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(sourceEntity));
|
||||
when(entityRepository.findByIdAndGraphId(ENTITY_ID_2, GRAPH_ID))
|
||||
.thenReturn(Optional.of(targetEntity));
|
||||
|
||||
assertThatThrownBy(() -> queryService.getShortestPath(GRAPH_ID, ENTITY_ID, ENTITY_ID_2, 3))
|
||||
.isInstanceOf(BusinessException.class);
|
||||
|
||||
verifyNoInteractions(neo4jClient);
|
||||
}
|
||||
|
||||
@Test
|
||||
void getShortestPath_nonAdmin_sameOwnEntity_returnsSingleNode() {
|
||||
when(resourceAccessService.resolveOwnerFilterUserId()).thenReturn(CURRENT_USER_ID);
|
||||
|
||||
GraphEntity entity = GraphEntity.builder()
|
||||
.id(ENTITY_ID).name("My Dataset").type("Dataset").graphId(GRAPH_ID)
|
||||
.properties(new HashMap<>(Map.of("created_by", CURRENT_USER_ID)))
|
||||
.build();
|
||||
when(entityRepository.findByIdAndGraphId(ENTITY_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(entity));
|
||||
|
||||
var result = queryService.getShortestPath(GRAPH_ID, ENTITY_ID, ENTITY_ID, 3);
|
||||
|
||||
assertThat(result.getPathLength()).isEqualTo(0);
|
||||
assertThat(result.getNodes()).hasSize(1);
|
||||
assertThat(result.getNodes().get(0).getName()).isEqualTo("My Dataset");
|
||||
}
|
||||
|
||||
@Test
|
||||
void getShortestPath_nonAdmin_structuralEntity_noPermissionDenied() {
|
||||
// 结构型实体(无 created_by)对所有用户可见
|
||||
when(resourceAccessService.resolveOwnerFilterUserId()).thenReturn(CURRENT_USER_ID);
|
||||
|
||||
GraphEntity structuralEntity = GraphEntity.builder()
|
||||
.id(ENTITY_ID).name("Admin User").type("User").graphId(GRAPH_ID)
|
||||
.properties(new HashMap<>())
|
||||
.build();
|
||||
when(entityRepository.findByIdAndGraphId(ENTITY_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(structuralEntity));
|
||||
|
||||
// 起止相同 → 返回单节点路径,不需要 Neo4jClient
|
||||
var result = queryService.getShortestPath(GRAPH_ID, ENTITY_ID, ENTITY_ID, 3);
|
||||
|
||||
assertThat(result.getPathLength()).isEqualTo(0);
|
||||
assertThat(result.getNodes()).hasSize(1);
|
||||
assertThat(result.getNodes().get(0).getType()).isEqualTo("User");
|
||||
}
|
||||
|
||||
// -- getSubgraph 权限过滤 --
|
||||
|
||||
@Test
|
||||
void getSubgraph_nonAdmin_filtersInaccessibleEntities() {
|
||||
when(resourceAccessService.resolveOwnerFilterUserId()).thenReturn(CURRENT_USER_ID);
|
||||
when(properties.getMaxNodesPerQuery()).thenReturn(500);
|
||||
|
||||
GraphEntity ownEntity = GraphEntity.builder()
|
||||
.id(ENTITY_ID).name("My Dataset").type("Dataset").graphId(GRAPH_ID)
|
||||
.properties(new HashMap<>(Map.of("created_by", CURRENT_USER_ID)))
|
||||
.build();
|
||||
GraphEntity otherEntity = GraphEntity.builder()
|
||||
.id(ENTITY_ID_2).name("Other Dataset").type("Dataset").graphId(GRAPH_ID)
|
||||
.properties(new HashMap<>(Map.of("created_by", OTHER_USER_ID)))
|
||||
.build();
|
||||
|
||||
when(entityRepository.findByGraphIdAndIdIn(GRAPH_ID, List.of(ENTITY_ID, ENTITY_ID_2)))
|
||||
.thenReturn(List.of(ownEntity, otherEntity));
|
||||
|
||||
SubgraphVO result = queryService.getSubgraph(GRAPH_ID, List.of(ENTITY_ID, ENTITY_ID_2));
|
||||
|
||||
// 只返回自己创建的实体(另一个被过滤),单节点无边
|
||||
assertThat(result.getNodes()).hasSize(1);
|
||||
assertThat(result.getNodes().get(0).getName()).isEqualTo("My Dataset");
|
||||
assertThat(result.getEdges()).isEmpty();
|
||||
assertThat(result.getNodeCount()).isEqualTo(1);
|
||||
}
|
||||
|
||||
@Test
|
||||
void getSubgraph_nonAdmin_allFiltered_returnsEmptySubgraph() {
|
||||
when(resourceAccessService.resolveOwnerFilterUserId()).thenReturn(CURRENT_USER_ID);
|
||||
when(properties.getMaxNodesPerQuery()).thenReturn(500);
|
||||
|
||||
GraphEntity otherEntity = GraphEntity.builder()
|
||||
.id(ENTITY_ID).name("Other Dataset").type("Dataset").graphId(GRAPH_ID)
|
||||
.properties(new HashMap<>(Map.of("created_by", OTHER_USER_ID)))
|
||||
.build();
|
||||
|
||||
when(entityRepository.findByGraphIdAndIdIn(GRAPH_ID, List.of(ENTITY_ID)))
|
||||
.thenReturn(List.of(otherEntity));
|
||||
|
||||
SubgraphVO result = queryService.getSubgraph(GRAPH_ID, List.of(ENTITY_ID));
|
||||
|
||||
assertThat(result.getNodes()).isEmpty();
|
||||
assertThat(result.getEdges()).isEmpty();
|
||||
assertThat(result.getNodeCount()).isEqualTo(0);
|
||||
}
|
||||
|
||||
@Test
|
||||
void getSubgraph_nonAdmin_structuralEntitiesVisible() {
|
||||
when(resourceAccessService.resolveOwnerFilterUserId()).thenReturn(CURRENT_USER_ID);
|
||||
when(properties.getMaxNodesPerQuery()).thenReturn(500);
|
||||
|
||||
// 结构型实体没有 created_by → 对所有用户可见
|
||||
GraphEntity structuralEntity = GraphEntity.builder()
|
||||
.id(ENTITY_ID).name("Default Org").type("Org").graphId(GRAPH_ID)
|
||||
.properties(new HashMap<>())
|
||||
.build();
|
||||
|
||||
when(entityRepository.findByGraphIdAndIdIn(GRAPH_ID, List.of(ENTITY_ID)))
|
||||
.thenReturn(List.of(structuralEntity));
|
||||
|
||||
SubgraphVO result = queryService.getSubgraph(GRAPH_ID, List.of(ENTITY_ID));
|
||||
|
||||
assertThat(result.getNodes()).hasSize(1);
|
||||
assertThat(result.getNodes().get(0).getType()).isEqualTo("Org");
|
||||
}
|
||||
|
||||
@Test
|
||||
void getSubgraph_admin_seesAllEntities() {
|
||||
// 管理员返回 null → 不过滤
|
||||
when(resourceAccessService.resolveOwnerFilterUserId()).thenReturn(null);
|
||||
when(properties.getMaxNodesPerQuery()).thenReturn(500);
|
||||
|
||||
GraphEntity otherUserEntity = GraphEntity.builder()
|
||||
.id(ENTITY_ID).name("Other's Dataset").type("Dataset").graphId(GRAPH_ID)
|
||||
.properties(new HashMap<>(Map.of("created_by", "user-1")))
|
||||
.build();
|
||||
|
||||
when(entityRepository.findByGraphIdAndIdIn(GRAPH_ID, List.of(ENTITY_ID)))
|
||||
.thenReturn(List.of(otherUserEntity));
|
||||
|
||||
SubgraphVO result = queryService.getSubgraph(GRAPH_ID, List.of(ENTITY_ID));
|
||||
|
||||
// 管理员看到其他用户的实体(不被过滤)
|
||||
assertThat(result.getNodes()).hasSize(1);
|
||||
assertThat(result.getNodes().get(0).getName()).isEqualTo("Other's Dataset");
|
||||
}
|
||||
|
||||
// -- P1-2: 业务实体缺失 created_by(脏数据)被正确拦截 --
|
||||
|
||||
@Test
|
||||
void getNeighborGraph_nonAdmin_businessEntityWithoutCreatedBy_throws() {
|
||||
when(resourceAccessService.resolveOwnerFilterUserId()).thenReturn(CURRENT_USER_ID);
|
||||
|
||||
// 业务实体缺失 created_by → 应被拦截
|
||||
GraphEntity dirtyEntity = GraphEntity.builder()
|
||||
.id(ENTITY_ID).name("Dirty Dataset").type("Dataset").graphId(GRAPH_ID)
|
||||
.properties(new HashMap<>())
|
||||
.build();
|
||||
when(entityRepository.findByIdAndGraphId(ENTITY_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(dirtyEntity));
|
||||
|
||||
assertThatThrownBy(() -> queryService.getNeighborGraph(GRAPH_ID, ENTITY_ID, 2, 50))
|
||||
.isInstanceOf(BusinessException.class);
|
||||
|
||||
verifyNoInteractions(neo4jClient);
|
||||
}
|
||||
|
||||
@Test
|
||||
void getSubgraph_nonAdmin_businessEntityWithoutCreatedBy_filtered() {
|
||||
when(resourceAccessService.resolveOwnerFilterUserId()).thenReturn(CURRENT_USER_ID);
|
||||
when(properties.getMaxNodesPerQuery()).thenReturn(500);
|
||||
|
||||
// 业务实体缺失 created_by → 应被过滤
|
||||
GraphEntity dirtyEntity = GraphEntity.builder()
|
||||
.id(ENTITY_ID).name("Dirty Dataset").type("Dataset").graphId(GRAPH_ID)
|
||||
.properties(new HashMap<>())
|
||||
.build();
|
||||
|
||||
when(entityRepository.findByGraphIdAndIdIn(GRAPH_ID, List.of(ENTITY_ID)))
|
||||
.thenReturn(List.of(dirtyEntity));
|
||||
|
||||
SubgraphVO result = queryService.getSubgraph(GRAPH_ID, List.of(ENTITY_ID));
|
||||
|
||||
assertThat(result.getNodes()).isEmpty();
|
||||
assertThat(result.getNodeCount()).isEqualTo(0);
|
||||
}
|
||||
|
||||
// -- P1-1: CONFIDENTIAL 敏感度过滤 --
|
||||
|
||||
@Test
|
||||
void getNeighborGraph_nonAdmin_confidentialEntity_throwsWithoutPermission() {
|
||||
when(resourceAccessService.resolveOwnerFilterUserId()).thenReturn(CURRENT_USER_ID);
|
||||
// canViewConfidential() 默认返回 false(mock 默认值)→ 无保密权限
|
||||
|
||||
GraphEntity confidentialEntity = GraphEntity.builder()
|
||||
.id(ENTITY_ID).name("Secret KS").type("KnowledgeSet").graphId(GRAPH_ID)
|
||||
.properties(new HashMap<>(Map.of("created_by", CURRENT_USER_ID, "sensitivity", "CONFIDENTIAL")))
|
||||
.build();
|
||||
when(entityRepository.findByIdAndGraphId(ENTITY_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(confidentialEntity));
|
||||
|
||||
assertThatThrownBy(() -> queryService.getNeighborGraph(GRAPH_ID, ENTITY_ID, 2, 50))
|
||||
.isInstanceOf(BusinessException.class);
|
||||
|
||||
verifyNoInteractions(neo4jClient);
|
||||
}
|
||||
|
||||
@Test
|
||||
void getNeighborGraph_nonAdmin_confidentialEntity_allowedWithPermission() {
|
||||
when(resourceAccessService.resolveOwnerFilterUserId()).thenReturn(CURRENT_USER_ID);
|
||||
when(resourceAccessService.canViewConfidential()).thenReturn(true);
|
||||
|
||||
GraphEntity confidentialEntity = GraphEntity.builder()
|
||||
.id(ENTITY_ID).name("Secret KS").type("KnowledgeSet").graphId(GRAPH_ID)
|
||||
.properties(new HashMap<>(Map.of("created_by", CURRENT_USER_ID, "sensitivity", "CONFIDENTIAL")))
|
||||
.build();
|
||||
when(entityRepository.findByIdAndGraphId(ENTITY_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(confidentialEntity));
|
||||
when(properties.getMaxDepth()).thenReturn(3);
|
||||
when(properties.getMaxNodesPerQuery()).thenReturn(500);
|
||||
|
||||
// 有保密权限 → 通过安全检查,继续到 Neo4jClient 调用
|
||||
try {
|
||||
queryService.getNeighborGraph(GRAPH_ID, ENTITY_ID, 2, 50);
|
||||
} catch (BusinessException e) {
|
||||
throw new AssertionError("Should not be blocked by permission check", e);
|
||||
} catch (Exception ignored) {
|
||||
// Neo4jClient mock chain 未完成,预期其他异常
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void getSubgraph_nonAdmin_confidentialEntity_filteredWithoutPermission() {
|
||||
when(resourceAccessService.resolveOwnerFilterUserId()).thenReturn(CURRENT_USER_ID);
|
||||
when(properties.getMaxNodesPerQuery()).thenReturn(500);
|
||||
// canViewConfidential() 默认返回 false → 无保密权限
|
||||
|
||||
GraphEntity ownNonConfidential = GraphEntity.builder()
|
||||
.id(ENTITY_ID).name("Normal KS").type("KnowledgeSet").graphId(GRAPH_ID)
|
||||
.properties(new HashMap<>(Map.of("created_by", CURRENT_USER_ID)))
|
||||
.build();
|
||||
GraphEntity ownConfidential = GraphEntity.builder()
|
||||
.id(ENTITY_ID_2).name("Secret KS").type("KnowledgeSet").graphId(GRAPH_ID)
|
||||
.properties(new HashMap<>(Map.of("created_by", CURRENT_USER_ID, "sensitivity", "CONFIDENTIAL")))
|
||||
.build();
|
||||
|
||||
when(entityRepository.findByGraphIdAndIdIn(GRAPH_ID, List.of(ENTITY_ID, ENTITY_ID_2)))
|
||||
.thenReturn(List.of(ownNonConfidential, ownConfidential));
|
||||
|
||||
SubgraphVO result = queryService.getSubgraph(GRAPH_ID, List.of(ENTITY_ID, ENTITY_ID_2));
|
||||
|
||||
// CONFIDENTIAL 实体被过滤,只剩普通实体
|
||||
assertThat(result.getNodes()).hasSize(1);
|
||||
assertThat(result.getNodes().get(0).getName()).isEqualTo("Normal KS");
|
||||
}
|
||||
|
||||
@Test
|
||||
void getSubgraph_nonAdmin_confidentialEntity_visibleWithPermission() {
|
||||
when(resourceAccessService.resolveOwnerFilterUserId()).thenReturn(CURRENT_USER_ID);
|
||||
when(resourceAccessService.canViewConfidential()).thenReturn(true);
|
||||
when(properties.getMaxNodesPerQuery()).thenReturn(500);
|
||||
|
||||
GraphEntity ownConfidential = GraphEntity.builder()
|
||||
.id(ENTITY_ID).name("Secret KS").type("KnowledgeSet").graphId(GRAPH_ID)
|
||||
.properties(new HashMap<>(Map.of("created_by", CURRENT_USER_ID, "sensitivity", "CONFIDENTIAL")))
|
||||
.build();
|
||||
|
||||
when(entityRepository.findByGraphIdAndIdIn(GRAPH_ID, List.of(ENTITY_ID)))
|
||||
.thenReturn(List.of(ownConfidential));
|
||||
|
||||
SubgraphVO result = queryService.getSubgraph(GRAPH_ID, List.of(ENTITY_ID));
|
||||
|
||||
// 有保密权限 → 看到 CONFIDENTIAL 实体
|
||||
assertThat(result.getNodes()).hasSize(1);
|
||||
assertThat(result.getNodes().get(0).getName()).isEqualTo("Secret KS");
|
||||
}
|
||||
|
||||
// -- P2-2: CONFIDENTIAL 大小写不敏感 --
|
||||
|
||||
@Test
|
||||
void getNeighborGraph_nonAdmin_lowercaseConfidential_throwsWithoutPermission() {
|
||||
when(resourceAccessService.resolveOwnerFilterUserId()).thenReturn(CURRENT_USER_ID);
|
||||
|
||||
GraphEntity entity = GraphEntity.builder()
|
||||
.id(ENTITY_ID).name("Secret KS").type("KnowledgeSet").graphId(GRAPH_ID)
|
||||
.properties(new HashMap<>(Map.of("created_by", CURRENT_USER_ID, "sensitivity", "confidential")))
|
||||
.build();
|
||||
when(entityRepository.findByIdAndGraphId(ENTITY_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(entity));
|
||||
|
||||
assertThatThrownBy(() -> queryService.getNeighborGraph(GRAPH_ID, ENTITY_ID, 2, 50))
|
||||
.isInstanceOf(BusinessException.class);
|
||||
|
||||
verifyNoInteractions(neo4jClient);
|
||||
}
|
||||
|
||||
@Test
|
||||
void getNeighborGraph_nonAdmin_mixedCaseConfidentialWithSpaces_throwsWithoutPermission() {
|
||||
when(resourceAccessService.resolveOwnerFilterUserId()).thenReturn(CURRENT_USER_ID);
|
||||
|
||||
GraphEntity entity = GraphEntity.builder()
|
||||
.id(ENTITY_ID).name("Secret KS").type("KnowledgeSet").graphId(GRAPH_ID)
|
||||
.properties(new HashMap<>(Map.of("created_by", CURRENT_USER_ID, "sensitivity", " Confidential ")))
|
||||
.build();
|
||||
when(entityRepository.findByIdAndGraphId(ENTITY_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(entity));
|
||||
|
||||
assertThatThrownBy(() -> queryService.getNeighborGraph(GRAPH_ID, ENTITY_ID, 2, 50))
|
||||
.isInstanceOf(BusinessException.class);
|
||||
|
||||
verifyNoInteractions(neo4jClient);
|
||||
}
|
||||
|
||||
@Test
|
||||
void getSubgraph_nonAdmin_lowercaseConfidential_filteredWithoutPermission() {
|
||||
when(resourceAccessService.resolveOwnerFilterUserId()).thenReturn(CURRENT_USER_ID);
|
||||
when(properties.getMaxNodesPerQuery()).thenReturn(500);
|
||||
|
||||
GraphEntity normalKs = GraphEntity.builder()
|
||||
.id(ENTITY_ID).name("Normal KS").type("KnowledgeSet").graphId(GRAPH_ID)
|
||||
.properties(new HashMap<>(Map.of("created_by", CURRENT_USER_ID)))
|
||||
.build();
|
||||
GraphEntity lowercaseConfidential = GraphEntity.builder()
|
||||
.id(ENTITY_ID_2).name("Secret KS").type("KnowledgeSet").graphId(GRAPH_ID)
|
||||
.properties(new HashMap<>(Map.of("created_by", CURRENT_USER_ID, "sensitivity", "confidential")))
|
||||
.build();
|
||||
|
||||
when(entityRepository.findByGraphIdAndIdIn(GRAPH_ID, List.of(ENTITY_ID, ENTITY_ID_2)))
|
||||
.thenReturn(List.of(normalKs, lowercaseConfidential));
|
||||
|
||||
SubgraphVO result = queryService.getSubgraph(GRAPH_ID, List.of(ENTITY_ID, ENTITY_ID_2));
|
||||
|
||||
assertThat(result.getNodes()).hasSize(1);
|
||||
assertThat(result.getNodes().get(0).getName()).isEqualTo("Normal KS");
|
||||
}
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// findAllPaths
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Nested
|
||||
class FindAllPathsTest {
|
||||
|
||||
@Test
|
||||
void findAllPaths_invalidGraphId_throwsBusinessException() {
|
||||
assertThatThrownBy(() -> queryService.findAllPaths(INVALID_GRAPH_ID, ENTITY_ID, ENTITY_ID_2, 3, 10))
|
||||
.isInstanceOf(BusinessException.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
void findAllPaths_sourceNotFound_throwsBusinessException() {
|
||||
when(entityRepository.findByIdAndGraphId(ENTITY_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.empty());
|
||||
|
||||
assertThatThrownBy(() -> queryService.findAllPaths(GRAPH_ID, ENTITY_ID, ENTITY_ID_2, 3, 10))
|
||||
.isInstanceOf(BusinessException.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
void findAllPaths_targetNotFound_throwsBusinessException() {
|
||||
GraphEntity sourceEntity = GraphEntity.builder()
|
||||
.id(ENTITY_ID).name("Source").type("Dataset").graphId(GRAPH_ID).build();
|
||||
when(entityRepository.findByIdAndGraphId(ENTITY_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(sourceEntity));
|
||||
when(entityRepository.findByIdAndGraphId(ENTITY_ID_2, GRAPH_ID))
|
||||
.thenReturn(Optional.empty());
|
||||
|
||||
assertThatThrownBy(() -> queryService.findAllPaths(GRAPH_ID, ENTITY_ID, ENTITY_ID_2, 3, 10))
|
||||
.isInstanceOf(BusinessException.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
void findAllPaths_sameSourceAndTarget_returnsSingleNodePath() {
|
||||
GraphEntity entity = GraphEntity.builder()
|
||||
.id(ENTITY_ID).name("Node").type("Dataset").graphId(GRAPH_ID).build();
|
||||
when(entityRepository.findByIdAndGraphId(ENTITY_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(entity));
|
||||
|
||||
AllPathsVO result = queryService.findAllPaths(GRAPH_ID, ENTITY_ID, ENTITY_ID, 3, 10);
|
||||
|
||||
assertThat(result.getPathCount()).isEqualTo(1);
|
||||
assertThat(result.getPaths()).hasSize(1);
|
||||
assertThat(result.getPaths().get(0).getPathLength()).isEqualTo(0);
|
||||
assertThat(result.getPaths().get(0).getNodes()).hasSize(1);
|
||||
assertThat(result.getPaths().get(0).getEdges()).isEmpty();
|
||||
}
|
||||
|
||||
@Test
|
||||
void findAllPaths_nonAdmin_sourceNotAccessible_throws() {
|
||||
when(resourceAccessService.resolveOwnerFilterUserId()).thenReturn("user-123");
|
||||
|
||||
GraphEntity sourceEntity = GraphEntity.builder()
|
||||
.id(ENTITY_ID).name("Other's Dataset").type("Dataset").graphId(GRAPH_ID)
|
||||
.properties(new HashMap<>(Map.of("created_by", "other-user")))
|
||||
.build();
|
||||
when(entityRepository.findByIdAndGraphId(ENTITY_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(sourceEntity));
|
||||
|
||||
assertThatThrownBy(() -> queryService.findAllPaths(GRAPH_ID, ENTITY_ID, ENTITY_ID_2, 3, 10))
|
||||
.isInstanceOf(BusinessException.class);
|
||||
|
||||
verifyNoInteractions(neo4jClient);
|
||||
}
|
||||
|
||||
@Test
|
||||
void findAllPaths_nonAdmin_targetNotAccessible_throws() {
|
||||
when(resourceAccessService.resolveOwnerFilterUserId()).thenReturn("user-123");
|
||||
|
||||
GraphEntity sourceEntity = GraphEntity.builder()
|
||||
.id(ENTITY_ID).name("My Dataset").type("Dataset").graphId(GRAPH_ID)
|
||||
.properties(new HashMap<>(Map.of("created_by", "user-123")))
|
||||
.build();
|
||||
GraphEntity targetEntity = GraphEntity.builder()
|
||||
.id(ENTITY_ID_2).name("Other's Dataset").type("Dataset").graphId(GRAPH_ID)
|
||||
.properties(new HashMap<>(Map.of("created_by", "other-user")))
|
||||
.build();
|
||||
when(entityRepository.findByIdAndGraphId(ENTITY_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(sourceEntity));
|
||||
when(entityRepository.findByIdAndGraphId(ENTITY_ID_2, GRAPH_ID))
|
||||
.thenReturn(Optional.of(targetEntity));
|
||||
|
||||
assertThatThrownBy(() -> queryService.findAllPaths(GRAPH_ID, ENTITY_ID, ENTITY_ID_2, 3, 10))
|
||||
.isInstanceOf(BusinessException.class);
|
||||
|
||||
verifyNoInteractions(neo4jClient);
|
||||
}
|
||||
|
||||
@Test
|
||||
void findAllPaths_nonAdmin_structuralEntity_sameSourceAndTarget_returnsSingleNode() {
|
||||
when(resourceAccessService.resolveOwnerFilterUserId()).thenReturn("user-123");
|
||||
|
||||
GraphEntity structuralEntity = GraphEntity.builder()
|
||||
.id(ENTITY_ID).name("Admin User").type("User").graphId(GRAPH_ID)
|
||||
.properties(new HashMap<>())
|
||||
.build();
|
||||
when(entityRepository.findByIdAndGraphId(ENTITY_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(structuralEntity));
|
||||
|
||||
AllPathsVO result = queryService.findAllPaths(GRAPH_ID, ENTITY_ID, ENTITY_ID, 3, 10);
|
||||
|
||||
assertThat(result.getPathCount()).isEqualTo(1);
|
||||
assertThat(result.getPaths().get(0).getNodes().get(0).getType()).isEqualTo("User");
|
||||
}
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// exportSubgraph
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Nested
|
||||
class ExportSubgraphTest {
|
||||
|
||||
@Test
|
||||
void exportSubgraph_invalidGraphId_throwsBusinessException() {
|
||||
assertThatThrownBy(() -> queryService.exportSubgraph(INVALID_GRAPH_ID, List.of(ENTITY_ID), 0))
|
||||
.isInstanceOf(BusinessException.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
void exportSubgraph_nullEntityIds_returnsEmptyExport() {
|
||||
SubgraphExportVO result = queryService.exportSubgraph(GRAPH_ID, null, 0);
|
||||
|
||||
assertThat(result.getNodes()).isEmpty();
|
||||
assertThat(result.getEdges()).isEmpty();
|
||||
assertThat(result.getNodeCount()).isEqualTo(0);
|
||||
}
|
||||
|
||||
@Test
|
||||
void exportSubgraph_emptyEntityIds_returnsEmptyExport() {
|
||||
SubgraphExportVO result = queryService.exportSubgraph(GRAPH_ID, List.of(), 0);
|
||||
|
||||
assertThat(result.getNodes()).isEmpty();
|
||||
assertThat(result.getEdges()).isEmpty();
|
||||
}
|
||||
|
||||
@Test
|
||||
void exportSubgraph_exceedsMaxNodes_throwsBusinessException() {
|
||||
when(properties.getMaxNodesPerQuery()).thenReturn(5);
|
||||
|
||||
List<String> tooManyIds = List.of("1", "2", "3", "4", "5", "6");
|
||||
|
||||
assertThatThrownBy(() -> queryService.exportSubgraph(GRAPH_ID, tooManyIds, 0))
|
||||
.isInstanceOf(BusinessException.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
void exportSubgraph_depthZero_noExistingEntities_returnsEmptyExport() {
|
||||
when(properties.getMaxNodesPerQuery()).thenReturn(500);
|
||||
when(entityRepository.findByGraphIdAndIdIn(GRAPH_ID, List.of(ENTITY_ID)))
|
||||
.thenReturn(List.of());
|
||||
|
||||
SubgraphExportVO result = queryService.exportSubgraph(GRAPH_ID, List.of(ENTITY_ID), 0);
|
||||
|
||||
assertThat(result.getNodes()).isEmpty();
|
||||
assertThat(result.getNodeCount()).isEqualTo(0);
|
||||
}
|
||||
|
||||
@Test
|
||||
void exportSubgraph_depthZero_singleEntity_returnsNodeWithProperties() {
|
||||
when(properties.getMaxNodesPerQuery()).thenReturn(500);
|
||||
|
||||
GraphEntity entity = GraphEntity.builder()
|
||||
.id(ENTITY_ID).name("Test Dataset").type("Dataset").graphId(GRAPH_ID)
|
||||
.description("A test dataset")
|
||||
.properties(new HashMap<>(Map.of("created_by", "user-1", "sensitivity", "PUBLIC")))
|
||||
.build();
|
||||
when(entityRepository.findByGraphIdAndIdIn(GRAPH_ID, List.of(ENTITY_ID)))
|
||||
.thenReturn(List.of(entity));
|
||||
|
||||
SubgraphExportVO result = queryService.exportSubgraph(GRAPH_ID, List.of(ENTITY_ID), 0);
|
||||
|
||||
assertThat(result.getNodes()).hasSize(1);
|
||||
assertThat(result.getNodeCount()).isEqualTo(1);
|
||||
assertThat(result.getNodes().get(0).getName()).isEqualTo("Test Dataset");
|
||||
assertThat(result.getNodes().get(0).getProperties()).containsEntry("created_by", "user-1");
|
||||
// 单节点无边
|
||||
assertThat(result.getEdges()).isEmpty();
|
||||
}
|
||||
|
||||
@Test
|
||||
void exportSubgraph_nonAdmin_filtersInaccessibleEntities() {
|
||||
when(resourceAccessService.resolveOwnerFilterUserId()).thenReturn("user-123");
|
||||
when(properties.getMaxNodesPerQuery()).thenReturn(500);
|
||||
|
||||
GraphEntity ownEntity = GraphEntity.builder()
|
||||
.id(ENTITY_ID).name("My Dataset").type("Dataset").graphId(GRAPH_ID)
|
||||
.properties(new HashMap<>(Map.of("created_by", "user-123")))
|
||||
.build();
|
||||
GraphEntity otherEntity = GraphEntity.builder()
|
||||
.id(ENTITY_ID_2).name("Other Dataset").type("Dataset").graphId(GRAPH_ID)
|
||||
.properties(new HashMap<>(Map.of("created_by", "other-user")))
|
||||
.build();
|
||||
|
||||
when(entityRepository.findByGraphIdAndIdIn(GRAPH_ID, List.of(ENTITY_ID, ENTITY_ID_2)))
|
||||
.thenReturn(List.of(ownEntity, otherEntity));
|
||||
|
||||
SubgraphExportVO result = queryService.exportSubgraph(GRAPH_ID,
|
||||
List.of(ENTITY_ID, ENTITY_ID_2), 0);
|
||||
|
||||
assertThat(result.getNodes()).hasSize(1);
|
||||
assertThat(result.getNodes().get(0).getName()).isEqualTo("My Dataset");
|
||||
}
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// convertToGraphML
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Nested
|
||||
class ConvertToGraphMLTest {
|
||||
|
||||
@Test
|
||||
void convertToGraphML_emptyExport_producesValidXml() {
|
||||
SubgraphExportVO emptyExport = SubgraphExportVO.builder()
|
||||
.nodes(List.of())
|
||||
.edges(List.of())
|
||||
.nodeCount(0)
|
||||
.edgeCount(0)
|
||||
.build();
|
||||
|
||||
String graphml = queryService.convertToGraphML(emptyExport);
|
||||
|
||||
assertThat(graphml).contains("<?xml version=\"1.0\" encoding=\"UTF-8\"?>");
|
||||
assertThat(graphml).contains("<graphml");
|
||||
assertThat(graphml).contains("<graph id=\"G\" edgedefault=\"directed\">");
|
||||
assertThat(graphml).contains("</graphml>");
|
||||
}
|
||||
|
||||
@Test
|
||||
void convertToGraphML_withNodesAndEdges_producesCorrectStructure() {
|
||||
SubgraphExportVO export = SubgraphExportVO.builder()
|
||||
.nodes(List.of(
|
||||
com.datamate.knowledgegraph.interfaces.dto.ExportNodeVO.builder()
|
||||
.id("node-1").name("Dataset A").type("Dataset")
|
||||
.description("Test dataset").properties(Map.of())
|
||||
.build(),
|
||||
com.datamate.knowledgegraph.interfaces.dto.ExportNodeVO.builder()
|
||||
.id("node-2").name("Workflow B").type("Workflow")
|
||||
.description(null).properties(Map.of())
|
||||
.build()
|
||||
))
|
||||
.edges(List.of(
|
||||
com.datamate.knowledgegraph.interfaces.dto.ExportEdgeVO.builder()
|
||||
.id("edge-1").sourceEntityId("node-1").targetEntityId("node-2")
|
||||
.relationType("DERIVED_FROM").weight(0.8)
|
||||
.build()
|
||||
))
|
||||
.nodeCount(2)
|
||||
.edgeCount(1)
|
||||
.build();
|
||||
|
||||
String graphml = queryService.convertToGraphML(export);
|
||||
|
||||
assertThat(graphml).contains("<node id=\"node-1\">");
|
||||
assertThat(graphml).contains("<data key=\"name\">Dataset A</data>");
|
||||
assertThat(graphml).contains("<data key=\"type\">Dataset</data>");
|
||||
assertThat(graphml).contains("<data key=\"description\">Test dataset</data>");
|
||||
assertThat(graphml).contains("<node id=\"node-2\">");
|
||||
assertThat(graphml).contains("<data key=\"type\">Workflow</data>");
|
||||
// null description 不输出
|
||||
assertThat(graphml).doesNotContain("<data key=\"description\">null</data>");
|
||||
assertThat(graphml).contains("<edge id=\"edge-1\" source=\"node-1\" target=\"node-2\">");
|
||||
assertThat(graphml).contains("<data key=\"relationType\">DERIVED_FROM</data>");
|
||||
assertThat(graphml).contains("<data key=\"weight\">0.8</data>");
|
||||
}
|
||||
|
||||
@Test
|
||||
void convertToGraphML_specialCharactersEscaped() {
|
||||
SubgraphExportVO export = SubgraphExportVO.builder()
|
||||
.nodes(List.of(
|
||||
com.datamate.knowledgegraph.interfaces.dto.ExportNodeVO.builder()
|
||||
.id("node-1").name("A & B <Corp>").type("Org")
|
||||
.description("\"Test\" org").properties(Map.of())
|
||||
.build()
|
||||
))
|
||||
.edges(List.of())
|
||||
.nodeCount(1)
|
||||
.edgeCount(0)
|
||||
.build();
|
||||
|
||||
String graphml = queryService.convertToGraphML(export);
|
||||
|
||||
assertThat(graphml).contains("A & B <Corp>");
|
||||
assertThat(graphml).contains(""Test" org");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,278 @@
|
||||
package com.datamate.knowledgegraph.application;
|
||||
|
||||
import com.datamate.common.infrastructure.exception.BusinessException;
|
||||
import com.datamate.knowledgegraph.domain.model.GraphEntity;
|
||||
import com.datamate.knowledgegraph.domain.model.RelationDetail;
|
||||
import com.datamate.knowledgegraph.domain.repository.GraphEntityRepository;
|
||||
import com.datamate.knowledgegraph.domain.repository.GraphRelationRepository;
|
||||
import com.datamate.knowledgegraph.infrastructure.cache.GraphCacheService;
|
||||
import com.datamate.knowledgegraph.interfaces.dto.CreateRelationRequest;
|
||||
import com.datamate.knowledgegraph.interfaces.dto.RelationVO;
|
||||
import com.datamate.knowledgegraph.interfaces.dto.UpdateRelationRequest;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.mockito.InjectMocks;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.assertj.core.api.Assertions.assertThatThrownBy;
|
||||
import static org.mockito.ArgumentMatchers.*;
|
||||
import static org.mockito.Mockito.*;
|
||||
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
class GraphRelationServiceTest {
|
||||
|
||||
private static final String GRAPH_ID = "550e8400-e29b-41d4-a716-446655440000";
|
||||
private static final String RELATION_ID = "770e8400-e29b-41d4-a716-446655440002";
|
||||
private static final String SOURCE_ENTITY_ID = "660e8400-e29b-41d4-a716-446655440001";
|
||||
private static final String TARGET_ENTITY_ID = "660e8400-e29b-41d4-a716-446655440003";
|
||||
private static final String INVALID_GRAPH_ID = "not-a-uuid";
|
||||
|
||||
@Mock
|
||||
private GraphRelationRepository relationRepository;
|
||||
|
||||
@Mock
|
||||
private GraphEntityRepository entityRepository;
|
||||
|
||||
@Mock
|
||||
private GraphCacheService cacheService;
|
||||
|
||||
@InjectMocks
|
||||
private GraphRelationService relationService;
|
||||
|
||||
private RelationDetail sampleDetail;
|
||||
private GraphEntity sourceEntity;
|
||||
private GraphEntity targetEntity;
|
||||
|
||||
@BeforeEach
|
||||
void setUp() {
|
||||
sampleDetail = RelationDetail.builder()
|
||||
.id(RELATION_ID)
|
||||
.sourceEntityId(SOURCE_ENTITY_ID)
|
||||
.sourceEntityName("Source")
|
||||
.sourceEntityType("Dataset")
|
||||
.targetEntityId(TARGET_ENTITY_ID)
|
||||
.targetEntityName("Target")
|
||||
.targetEntityType("Field")
|
||||
.relationType("HAS_FIELD")
|
||||
.properties(Map.of())
|
||||
.weight(1.0)
|
||||
.confidence(1.0)
|
||||
.graphId(GRAPH_ID)
|
||||
.createdAt(LocalDateTime.now())
|
||||
.build();
|
||||
|
||||
sourceEntity = GraphEntity.builder()
|
||||
.id(SOURCE_ENTITY_ID).name("Source").type("Dataset").graphId(GRAPH_ID).build();
|
||||
targetEntity = GraphEntity.builder()
|
||||
.id(TARGET_ENTITY_ID).name("Target").type("Field").graphId(GRAPH_ID).build();
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// graphId 校验
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Test
|
||||
void getRelation_invalidGraphId_throwsBusinessException() {
|
||||
assertThatThrownBy(() -> relationService.getRelation(INVALID_GRAPH_ID, RELATION_ID))
|
||||
.isInstanceOf(BusinessException.class);
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// createRelation
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Test
|
||||
void createRelation_success() {
|
||||
when(entityRepository.findByIdAndGraphId(SOURCE_ENTITY_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(sourceEntity));
|
||||
when(entityRepository.findByIdAndGraphId(TARGET_ENTITY_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(targetEntity));
|
||||
when(relationRepository.create(eq(GRAPH_ID), eq(SOURCE_ENTITY_ID), eq(TARGET_ENTITY_ID),
|
||||
eq("HAS_FIELD"), anyMap(), isNull(), isNull(), isNull()))
|
||||
.thenReturn(Optional.of(sampleDetail));
|
||||
|
||||
CreateRelationRequest request = new CreateRelationRequest();
|
||||
request.setSourceEntityId(SOURCE_ENTITY_ID);
|
||||
request.setTargetEntityId(TARGET_ENTITY_ID);
|
||||
request.setRelationType("HAS_FIELD");
|
||||
|
||||
RelationVO result = relationService.createRelation(GRAPH_ID, request);
|
||||
|
||||
assertThat(result.getId()).isEqualTo(RELATION_ID);
|
||||
assertThat(result.getRelationType()).isEqualTo("HAS_FIELD");
|
||||
assertThat(result.getSourceEntityId()).isEqualTo(SOURCE_ENTITY_ID);
|
||||
assertThat(result.getTargetEntityId()).isEqualTo(TARGET_ENTITY_ID);
|
||||
verify(cacheService).evictEntityCaches(GRAPH_ID, SOURCE_ENTITY_ID);
|
||||
}
|
||||
|
||||
@Test
|
||||
void createRelation_sourceNotFound_throwsBusinessException() {
|
||||
when(entityRepository.findByIdAndGraphId(SOURCE_ENTITY_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.empty());
|
||||
|
||||
CreateRelationRequest request = new CreateRelationRequest();
|
||||
request.setSourceEntityId(SOURCE_ENTITY_ID);
|
||||
request.setTargetEntityId(TARGET_ENTITY_ID);
|
||||
request.setRelationType("HAS_FIELD");
|
||||
|
||||
assertThatThrownBy(() -> relationService.createRelation(GRAPH_ID, request))
|
||||
.isInstanceOf(BusinessException.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
void createRelation_targetNotFound_throwsBusinessException() {
|
||||
when(entityRepository.findByIdAndGraphId(SOURCE_ENTITY_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(sourceEntity));
|
||||
when(entityRepository.findByIdAndGraphId(TARGET_ENTITY_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.empty());
|
||||
|
||||
CreateRelationRequest request = new CreateRelationRequest();
|
||||
request.setSourceEntityId(SOURCE_ENTITY_ID);
|
||||
request.setTargetEntityId(TARGET_ENTITY_ID);
|
||||
request.setRelationType("HAS_FIELD");
|
||||
|
||||
assertThatThrownBy(() -> relationService.createRelation(GRAPH_ID, request))
|
||||
.isInstanceOf(BusinessException.class);
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// getRelation
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Test
|
||||
void getRelation_found() {
|
||||
when(relationRepository.findByIdAndGraphId(RELATION_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(sampleDetail));
|
||||
|
||||
RelationVO result = relationService.getRelation(GRAPH_ID, RELATION_ID);
|
||||
|
||||
assertThat(result.getId()).isEqualTo(RELATION_ID);
|
||||
}
|
||||
|
||||
@Test
|
||||
void getRelation_notFound_throwsBusinessException() {
|
||||
when(relationRepository.findByIdAndGraphId(RELATION_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.empty());
|
||||
|
||||
assertThatThrownBy(() -> relationService.getRelation(GRAPH_ID, RELATION_ID))
|
||||
.isInstanceOf(BusinessException.class);
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// listRelations (分页)
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Test
|
||||
void listRelations_returnsPaged() {
|
||||
when(relationRepository.findByGraphId(GRAPH_ID, null, 0L, 20))
|
||||
.thenReturn(List.of(sampleDetail));
|
||||
when(relationRepository.countByGraphId(GRAPH_ID, null))
|
||||
.thenReturn(1L);
|
||||
|
||||
var result = relationService.listRelations(GRAPH_ID, null, 0, 20);
|
||||
|
||||
assertThat(result.getContent()).hasSize(1);
|
||||
assertThat(result.getTotalElements()).isEqualTo(1);
|
||||
}
|
||||
|
||||
@Test
|
||||
void listRelations_oversizedPage_clampedTo200() {
|
||||
when(relationRepository.findByGraphId(GRAPH_ID, null, 0L, 200))
|
||||
.thenReturn(List.of());
|
||||
when(relationRepository.countByGraphId(GRAPH_ID, null))
|
||||
.thenReturn(0L);
|
||||
|
||||
relationService.listRelations(GRAPH_ID, null, 0, 999);
|
||||
|
||||
verify(relationRepository).findByGraphId(GRAPH_ID, null, 0L, 200);
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// listEntityRelations — direction 校验
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Test
|
||||
void listEntityRelations_invalidDirection_throwsBusinessException() {
|
||||
when(entityRepository.findByIdAndGraphId(SOURCE_ENTITY_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(sourceEntity));
|
||||
|
||||
assertThatThrownBy(() ->
|
||||
relationService.listEntityRelations(GRAPH_ID, SOURCE_ENTITY_ID, "invalid", null, 0, 20))
|
||||
.isInstanceOf(BusinessException.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
void listEntityRelations_inDirection() {
|
||||
when(entityRepository.findByIdAndGraphId(SOURCE_ENTITY_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(sourceEntity));
|
||||
when(relationRepository.findInboundByEntityId(GRAPH_ID, SOURCE_ENTITY_ID, null, 0L, 20))
|
||||
.thenReturn(List.of(sampleDetail));
|
||||
when(relationRepository.countByEntityId(GRAPH_ID, SOURCE_ENTITY_ID, null, "in"))
|
||||
.thenReturn(1L);
|
||||
|
||||
var result = relationService.listEntityRelations(
|
||||
GRAPH_ID, SOURCE_ENTITY_ID, "in", null, 0, 20);
|
||||
|
||||
assertThat(result.getContent()).hasSize(1);
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// updateRelation
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Test
|
||||
void updateRelation_success() {
|
||||
when(relationRepository.findByIdAndGraphId(RELATION_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(sampleDetail));
|
||||
RelationDetail updated = RelationDetail.builder()
|
||||
.id(RELATION_ID).relationType("USES").weight(0.8)
|
||||
.sourceEntityId(SOURCE_ENTITY_ID).targetEntityId(TARGET_ENTITY_ID)
|
||||
.graphId(GRAPH_ID).build();
|
||||
when(relationRepository.update(eq(RELATION_ID), eq(GRAPH_ID), eq("USES"), isNull(), eq(0.8), isNull()))
|
||||
.thenReturn(Optional.of(updated));
|
||||
|
||||
UpdateRelationRequest request = new UpdateRelationRequest();
|
||||
request.setRelationType("USES");
|
||||
request.setWeight(0.8);
|
||||
|
||||
RelationVO result = relationService.updateRelation(GRAPH_ID, RELATION_ID, request);
|
||||
|
||||
assertThat(result.getRelationType()).isEqualTo("USES");
|
||||
verify(cacheService).evictEntityCaches(GRAPH_ID, SOURCE_ENTITY_ID);
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// deleteRelation
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
@Test
|
||||
void deleteRelation_success() {
|
||||
when(relationRepository.findByIdAndGraphId(RELATION_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.of(sampleDetail));
|
||||
when(relationRepository.deleteByIdAndGraphId(RELATION_ID, GRAPH_ID))
|
||||
.thenReturn(1L);
|
||||
|
||||
relationService.deleteRelation(GRAPH_ID, RELATION_ID);
|
||||
|
||||
verify(relationRepository).deleteByIdAndGraphId(RELATION_ID, GRAPH_ID);
|
||||
verify(cacheService).evictEntityCaches(GRAPH_ID, SOURCE_ENTITY_ID);
|
||||
verify(cacheService).evictEntityCaches(GRAPH_ID, TARGET_ENTITY_ID);
|
||||
}
|
||||
|
||||
@Test
|
||||
void deleteRelation_notFound_throwsBusinessException() {
|
||||
when(relationRepository.findByIdAndGraphId(RELATION_ID, GRAPH_ID))
|
||||
.thenReturn(Optional.empty());
|
||||
|
||||
assertThatThrownBy(() -> relationService.deleteRelation(GRAPH_ID, RELATION_ID))
|
||||
.isInstanceOf(BusinessException.class);
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user