From a69b9f4921ec1d31cb276bd009a2ca112b66b53b Mon Sep 17 00:00:00 2001 From: hhhhsc <1710496817@qq.com> Date: Tue, 28 Oct 2025 10:52:27 +0800 Subject: [PATCH 1/2] =?UTF-8?q?feature:=20=E5=AF=B9=E6=8E=A5deer-flow?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- Makefile | 42 ++++++-- backend/pom.xml | 11 ++- .../services/data-annotation-service/pom.xml | 4 +- .../services/data-cleaning-service/pom.xml | 4 +- .../services/data-collection-service/pom.xml | 4 +- .../services/data-evaluation-service/pom.xml | 4 +- .../services/data-management-service/pom.xml | 4 +- .../services/data-synthesis-service/pom.xml | 4 +- .../services/execution-engine-service/pom.xml | 4 +- backend/services/main-application/pom.xml | 4 +- .../services/operator-market-service/pom.xml | 4 +- .../pipeline-orchestration-service/pom.xml | 4 +- backend/services/rag-indexer-service/pom.xml | 4 +- backend/services/rag-query-service/pom.xml | 4 +- .../docker/datamate/.env.deer-flow.example | 1 + deployment/docker/datamate/.env.example | 1 + .../datamate/backend-with-deer-flow.conf | 49 +++++++++ .../docker/datamate}/backend.conf | 0 deployment/docker/datamate/docker-compose.yml | 7 +- deployment/docker/deer-flow/.env.example | 99 +++++++++++++++++++ deployment/docker/deer-flow/conf.yaml.example | 71 +++++++++++++ .../docker/deer-flow/docker-compose.yml | 28 ++++++ frontend/src/pages/Home/Home.tsx | 4 +- frontend/src/routes/routes.ts | 2 +- .../label-studio-adapter/app/core/config.py | 48 ++++----- scripts/images/backend/Dockerfile | 2 + scripts/images/frontend/Dockerfile | 6 +- scripts/images/runtime/Dockerfile | 8 +- 28 files changed, 354 insertions(+), 73 deletions(-) create mode 100644 deployment/docker/datamate/.env.deer-flow.example create mode 100644 deployment/docker/datamate/.env.example create mode 100644 deployment/docker/datamate/backend-with-deer-flow.conf rename {scripts/images/frontend => deployment/docker/datamate}/backend.conf (100%) create mode 100644 deployment/docker/deer-flow/.env.example create mode 100644 deployment/docker/deer-flow/conf.yaml.example create mode 100644 deployment/docker/deer-flow/docker-compose.yml diff --git a/Makefile b/Makefile index c0dcc8d..3a33206 100644 --- a/Makefile +++ b/Makefile @@ -12,7 +12,7 @@ build: backend-docker-build frontend-docker-build runtime-docker-build .PHONY: create-namespace create-namespace: - @kubectl get namespace $(NAMESPACE) > /dev/null 2>&1 || kubectl create namespace $(NAMESPACE) + kubectl get namespace $(NAMESPACE) > /dev/null 2>&1 || kubectl create namespace $(NAMESPACE) .PHONY: install-% install-%: @@ -73,37 +73,61 @@ runtime-docker-build: label-studio-adapter-docker-build: docker build -t label-studio-adapter:$(VERSION) . -f scripts/images/label-studio-adapter/Dockerfile +.PHONY: deer-flow-docker-build +deer-flow-docker-build: + @if [ -d "../deer-flow/.git" ]; then \ + cd ../deer-flow && git pull; \ + else \ + git clone git@github.com:bytedance/deer-flow.git ../deer-flow; \ + fi + sed -i "s/dark/light/g" "../deer-flow/web/src/components/deer-flow/theme-provider-wrapper.tsx" + cp deployment/docker/deer-flow/.env.example ../deer-flow/.env + cp deployment/docker/deer-flow/conf.yaml.example ../deer-flow/conf.yaml + cd ../deer-flow && docker compose build + .PHONY: backend-docker-install backend-docker-install: - cd deployment/docker/datamate && docker-compose up -d backend + cd deployment/docker/datamate && docker compose up -d backend .PHONY: backend-docker-uninstall backend-docker-uninstall: - cd deployment/docker/datamate && docker-compose down backend + cd deployment/docker/datamate && docker compose down backend .PHONY: frontend-docker-install frontend-docker-install: - cd deployment/docker/datamate && docker-compose up -d frontend + cd deployment/docker/datamate && docker compose up -d frontend .PHONY: frontend-docker-uninstall frontend-docker-uninstall: - cd deployment/docker/datamate && docker-compose down frontend + cd deployment/docker/datamate && docker compose down frontend .PHONY: runtime-docker-install runtime-docker-install: - cd deployment/docker/datamate && docker-compose up -d runtime + cd deployment/docker/datamate && docker compose up -d runtime .PHONY: runtime-docker-uninstall runtime-docker-uninstall: - cd deployment/docker/datamate && docker-compose down runtime + cd deployment/docker/datamate && docker compose down runtime .PHONY: datamate-docker-install datamate-docker-install: - cd deployment/docker/datamate && docker-compose up -d + cd deployment/docker/datamate && cp .env.example .env && docker compose -f docker-compose.yml up -d .PHONY: datamate-docker-uninstall datamate-docker-uninstall: - cd deployment/docker/datamate && docker-compose down + cd deployment/docker/datamate && docker compose -f docker-compose.yml down + +.PHONY: deer-flow-docker-install +deer-flow-docker-install: + cd deployment/docker/datamate && cp .env.deer-flow.example .env && docker compose -f docker-compose.yml up -d + cd deployment/docker/deer-flow && cp .env.example .env && cp conf.yaml.example conf.yaml && docker compose -f docker-compose.yml up -d + +.PHONY: deer-flow-docker-uninstall +deer-flow-docker-uninstall: + @if docker compose ls --filter name=datamate | grep -q datamate; then \ + cd deployment/docker/datamate && docker compose -f docker-compose.yml up -d; \ + fi + cd deployment/docker/deer-flow && docker compose -f docker-compose.yml down .PHONY: datamate-k8s-install datamate-k8s-install: create-namespace diff --git a/backend/pom.xml b/backend/pom.xml index 682eaa0..3b4d938 100644 --- a/backend/pom.xml +++ b/backend/pom.xml @@ -20,6 +20,7 @@ 3.5.6 2025.0.0 + 1.0.0-RC1 8.0.33 42.6.0 3.2.0 @@ -126,8 +127,8 @@ - mysql - mysql-connector-java + com.mysql + mysql-connector-j ${mysql.version} @@ -164,6 +165,12 @@ + + org.springframework.ai + spring-ai-starter-mcp-server-webmvc + ${spring-ai.version} + + com.baomidou mybatis-plus-spring-boot3-starter diff --git a/backend/services/data-annotation-service/pom.xml b/backend/services/data-annotation-service/pom.xml index a91d058..fd85cf9 100644 --- a/backend/services/data-annotation-service/pom.xml +++ b/backend/services/data-annotation-service/pom.xml @@ -31,8 +31,8 @@ spring-boot-starter-websocket - mysql - mysql-connector-java + com.mysql + mysql-connector-j ${mysql.version} diff --git a/backend/services/data-cleaning-service/pom.xml b/backend/services/data-cleaning-service/pom.xml index 56b70fc..b4ad5ab 100644 --- a/backend/services/data-cleaning-service/pom.xml +++ b/backend/services/data-cleaning-service/pom.xml @@ -53,8 +53,8 @@ mybatis-plus-spring-boot3-starter - mysql - mysql-connector-java + com.mysql + mysql-connector-j org.apache.commons diff --git a/backend/services/data-collection-service/pom.xml b/backend/services/data-collection-service/pom.xml index 6600bf1..39c233f 100644 --- a/backend/services/data-collection-service/pom.xml +++ b/backend/services/data-collection-service/pom.xml @@ -39,8 +39,8 @@ - mysql - mysql-connector-java + com.mysql + mysql-connector-j 8.0.33 runtime diff --git a/backend/services/data-evaluation-service/pom.xml b/backend/services/data-evaluation-service/pom.xml index c976d19..5a1928b 100644 --- a/backend/services/data-evaluation-service/pom.xml +++ b/backend/services/data-evaluation-service/pom.xml @@ -27,8 +27,8 @@ spring-boot-starter-web - mysql - mysql-connector-java + com.mysql + mysql-connector-j ${mysql.version} diff --git a/backend/services/data-management-service/pom.xml b/backend/services/data-management-service/pom.xml index e6f0c16..1882bfb 100644 --- a/backend/services/data-management-service/pom.xml +++ b/backend/services/data-management-service/pom.xml @@ -35,8 +35,8 @@ spring-boot-starter-data-redis - mysql - mysql-connector-java + com.mysql + mysql-connector-j ${mysql.version} diff --git a/backend/services/data-synthesis-service/pom.xml b/backend/services/data-synthesis-service/pom.xml index bc146a4..ae149e2 100644 --- a/backend/services/data-synthesis-service/pom.xml +++ b/backend/services/data-synthesis-service/pom.xml @@ -27,8 +27,8 @@ spring-boot-starter-web - mysql - mysql-connector-java + com.mysql + mysql-connector-j ${mysql.version} diff --git a/backend/services/execution-engine-service/pom.xml b/backend/services/execution-engine-service/pom.xml index 42f9484..b8aa6d7 100644 --- a/backend/services/execution-engine-service/pom.xml +++ b/backend/services/execution-engine-service/pom.xml @@ -31,8 +31,8 @@ spring-boot-starter-data-redis - mysql - mysql-connector-java + com.mysql + mysql-connector-j ${mysql.version} diff --git a/backend/services/main-application/pom.xml b/backend/services/main-application/pom.xml index 33e4862..158647b 100644 --- a/backend/services/main-application/pom.xml +++ b/backend/services/main-application/pom.xml @@ -118,8 +118,8 @@ - mysql - mysql-connector-java + com.mysql + mysql-connector-j 8.0.33 runtime diff --git a/backend/services/operator-market-service/pom.xml b/backend/services/operator-market-service/pom.xml index 6543a1e..34c8ece 100644 --- a/backend/services/operator-market-service/pom.xml +++ b/backend/services/operator-market-service/pom.xml @@ -31,8 +31,8 @@ spring-boot-starter-data-redis - mysql - mysql-connector-java + com.mysql + mysql-connector-j org.springframework.boot diff --git a/backend/services/pipeline-orchestration-service/pom.xml b/backend/services/pipeline-orchestration-service/pom.xml index d5dfe1d..b8b3a04 100644 --- a/backend/services/pipeline-orchestration-service/pom.xml +++ b/backend/services/pipeline-orchestration-service/pom.xml @@ -31,8 +31,8 @@ spring-boot-starter-data-redis - mysql - mysql-connector-java + com.mysql + mysql-connector-j ${mysql.version} diff --git a/backend/services/rag-indexer-service/pom.xml b/backend/services/rag-indexer-service/pom.xml index f9e0441..206ae74 100644 --- a/backend/services/rag-indexer-service/pom.xml +++ b/backend/services/rag-indexer-service/pom.xml @@ -31,8 +31,8 @@ spring-boot-starter-data-elasticsearch - mysql - mysql-connector-java + com.mysql + mysql-connector-j ${mysql.version} diff --git a/backend/services/rag-query-service/pom.xml b/backend/services/rag-query-service/pom.xml index cc407ef..9c4e29a 100644 --- a/backend/services/rag-query-service/pom.xml +++ b/backend/services/rag-query-service/pom.xml @@ -31,8 +31,8 @@ spring-boot-starter-data-elasticsearch - mysql - mysql-connector-java + com.mysql + mysql-connector-j ${mysql.version} diff --git a/deployment/docker/datamate/.env.deer-flow.example b/deployment/docker/datamate/.env.deer-flow.example new file mode 100644 index 0000000..40b3afe --- /dev/null +++ b/deployment/docker/datamate/.env.deer-flow.example @@ -0,0 +1 @@ +NGINX_CONF=./backend-with-deer-flow.conf diff --git a/deployment/docker/datamate/.env.example b/deployment/docker/datamate/.env.example new file mode 100644 index 0000000..5009927 --- /dev/null +++ b/deployment/docker/datamate/.env.example @@ -0,0 +1 @@ +NGINX_CONF=./backend.conf diff --git a/deployment/docker/datamate/backend-with-deer-flow.conf b/deployment/docker/datamate/backend-with-deer-flow.conf new file mode 100644 index 0000000..eff6d5c --- /dev/null +++ b/deployment/docker/datamate/backend-with-deer-flow.conf @@ -0,0 +1,49 @@ +server { + listen 80; + server_name 0.0.0.0; + + access_log /var/log/datamate/frontend/access.log main; + error_log /var/log/datamate/frontend/error.log notice; + + client_max_body_size 1024M; + + add_header Set-Cookie "NEXT_LOCALE=zh"; + + location /api/ { + proxy_pass http://datamate-backend:8080/api/; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + } + + location /chat { + proxy_pass http://deer-flow-frontend:3000/chat; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + } + + location /_next { + proxy_pass http://deer-flow-frontend:3000/_next; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + } + + location /deer-flow-backend/ { + proxy_pass http://deer-flow-backend:8000/api/; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + } + + location / { + if ($query_string ~* "_rsc=pmmii") { + proxy_pass http://deer-flow-frontend:3000; + break; + } + + root /opt/frontend; + try_files $uri $uri/ /index.html; + } +} diff --git a/scripts/images/frontend/backend.conf b/deployment/docker/datamate/backend.conf similarity index 100% rename from scripts/images/frontend/backend.conf rename to deployment/docker/datamate/backend.conf diff --git a/deployment/docker/datamate/docker-compose.yml b/deployment/docker/datamate/docker-compose.yml index 545e7ac..623fa7d 100644 --- a/deployment/docker/datamate/docker-compose.yml +++ b/deployment/docker/datamate/docker-compose.yml @@ -5,8 +5,6 @@ services: image: datamate-backend restart: on-failure privileged: true - ports: - - "8080" volumes: - dataset_volume:/dataset - flow_volume:/flow @@ -24,6 +22,7 @@ services: - "30000:80" # nodePort → hostPort volumes: - frontend_log_volume:/var/log/datamate/frontend + - $NGINX_CONF:/etc/nginx/conf.d/backend.conf networks: [ datamate ] depends_on: - datamate-backend @@ -35,8 +34,6 @@ services: restart: on-failure environment: MYSQL_ROOT_PASSWORD: password - ports: - - "3306" command: | sh -c " chown mysql:mysql /var/log/datamate/database && @@ -63,8 +60,6 @@ services: MYSQL_USER: "root" MYSQL_PASSWORD: "password" MYSQL_DATABASE: "datamate" - ports: - - "8081" command: - python - /opt/runtime/datamate/operator_runtime.py diff --git a/deployment/docker/deer-flow/.env.example b/deployment/docker/deer-flow/.env.example new file mode 100644 index 0000000..f1ee31b --- /dev/null +++ b/deployment/docker/deer-flow/.env.example @@ -0,0 +1,99 @@ +# Application Settings +DEBUG=True +APP_ENV=development + +# docker build args +NEXT_PUBLIC_API_URL="http://localhost:30000/deer-flow-backend" + +AGENT_RECURSION_LIMIT=30 + +# CORS settings +# Comma-separated list of allowed origins for CORS requests +# Example: ALLOWED_ORIGINS=http://localhost:3000,http://example.com +ALLOWED_ORIGINS=http://localhost:3000 + +# Enable or disable MCP server configuration, the default is false. +# Please enable this feature before securing your front-end and back-end in a managed environment. +# Otherwise, you system could be compromised. +ENABLE_MCP_SERVER_CONFIGURATION=true + +# Enable or disable PYTHON_REPL configuration, the default is false. +# Please enable this feature before securing your in a managed environment. +# Otherwise, you system could be compromised. +ENABLE_PYTHON_REPL=false + +# Search Engine, Supported values: tavily (recommended), duckduckgo, brave_search, arxiv, searx +SEARCH_API=tavily +TAVILY_API_KEY=tvly-xxx +# SEARX_HOST=xxx # Required only if SEARCH_API is searx.(compatible with both Searx and SearxNG) +# BRAVE_SEARCH_API_KEY=xxx # Required only if SEARCH_API is brave_search +# JINA_API_KEY=jina_xxx # Optional, default is None + +# Optional, RAG provider +# RAG_PROVIDER=vikingdb_knowledge_base +# VIKINGDB_KNOWLEDGE_BASE_API_URL="api-knowledgebase.mlp.cn-beijing.volces.com" +# VIKINGDB_KNOWLEDGE_BASE_API_AK="AKxxx" +# VIKINGDB_KNOWLEDGE_BASE_API_SK="" +# VIKINGDB_KNOWLEDGE_BASE_RETRIEVAL_SIZE=15 + +# RAG_PROVIDER=ragflow +# RAGFLOW_API_URL="http://localhost:9388" +# RAGFLOW_API_KEY="ragflow-xxx" +# RAGFLOW_RETRIEVAL_SIZE=10 +# RAGFLOW_CROSS_LANGUAGES=English,Chinese,Spanish,French,German,Japanese,Korean # Optional. To use RAGFlow's cross-language search, please separate each language with a single comma + +# RAG_PROVIDER=dify +# DIFY_API_URL="https://api.dify.ai/v1" +# DIFY_API_KEY="dataset-xxx" + +# MOI is a hybrid database that mainly serves enterprise users (https://www.matrixorigin.io/matrixone-intelligence) +# RAG_PROVIDER=moi +# MOI_API_URL="https://cluster.matrixonecloud.cn" +# MOI_API_KEY="xxx-xxx-xxx-xxx" +# MOI_RETRIEVAL_SIZE=10 +# MOI_LIST_LIMIT=10 + + +# RAG_PROVIDER: milvus (using free milvus instance on zilliz cloud: https://docs.zilliz.com/docs/quick-start ) +# RAG_PROVIDER=milvus +# MILVUS_URI= +# MILVUS_USER= +# MILVUS_PASSWORD= +# MILVUS_COLLECTION=documents +# MILVUS_EMBEDDING_PROVIDER=openai # support openai,dashscope +# MILVUS_EMBEDDING_BASE_URL= +# MILVUS_EMBEDDING_MODEL= +# MILVUS_EMBEDDING_API_KEY= +# MILVUS_AUTO_LOAD_EXAMPLES=true + +# RAG_PROVIDER: milvus (using milvus lite on Mac or Linux) +# RAG_PROVIDER=milvus +# MILVUS_URI=./milvus_demo.db +# MILVUS_COLLECTION=documents +# MILVUS_EMBEDDING_PROVIDER=openai # support openai,dashscope +# MILVUS_EMBEDDING_BASE_URL= +# MILVUS_EMBEDDING_MODEL= +# MILVUS_EMBEDDING_API_KEY= +# MILVUS_AUTO_LOAD_EXAMPLES=true + +# Optional, volcengine TTS for generating podcast +VOLCENGINE_TTS_APPID=xxx +VOLCENGINE_TTS_ACCESS_TOKEN=xxx +# VOLCENGINE_TTS_CLUSTER=volcano_tts # Optional, default is volcano_tts +# VOLCENGINE_TTS_VOICE_TYPE=BV700_V2_streaming # Optional, default is BV700_V2_streaming + +# Option, for langsmith tracing and monitoring +# LANGSMITH_TRACING=true +# LANGSMITH_ENDPOINT="https://api.smith.langchain.com" +# LANGSMITH_API_KEY="xxx" +# LANGSMITH_PROJECT="xxx" + +# [!NOTE] +# For model settings and other configurations, please refer to `docs/configuration_guide.md` + +# Option, for langgraph mongodb checkpointer +# Enable LangGraph checkpoint saver, supports MongoDB, Postgres +#LANGGRAPH_CHECKPOINT_SAVER=true +# Set the database URL for saving checkpoints +#LANGGRAPH_CHECKPOINT_DB_URL=mongodb://localhost:27017/ +#LANGGRAPH_CHECKPOINT_DB_URL=postgresql://localhost:5432/postgres diff --git a/deployment/docker/deer-flow/conf.yaml.example b/deployment/docker/deer-flow/conf.yaml.example new file mode 100644 index 0000000..7e6863d --- /dev/null +++ b/deployment/docker/deer-flow/conf.yaml.example @@ -0,0 +1,71 @@ +# [!NOTE] +# Read the `docs/configuration_guide.md` carefully, and update the +# configurations to match your specific settings and requirements. +# - Replace `api_key` with your own credentials. +# - Replace `base_url` and `model` name if you want to use a custom model. +# - Set `verify_ssl` to `false` if your LLM server uses self-signed certificates +# - A restart is required every time you change the `conf.yaml` file. + +BASIC_MODEL: + base_url: https://ark.cn-beijing.volces.com/api/v3 + model: "doubao-1-5-pro-32k-250115" + api_key: xxxx + # max_retries: 3 # Maximum number of retries for LLM calls + # verify_ssl: false # Uncomment this line to disable SSL certificate verification for self-signed certificates + + # Local model configuration example: + + # Ollama (Tested and supported for local development) + # BASIC_MODEL: + # base_url: "http://localhost:11434/v1" # Ollama OpenAI compatible endpoint + # model: "qwen3:14b" # or "llama3.2", etc. + # api_key: "ollama" # Ollama doesn't need real API key + # max_retries: 3 + # verify_ssl: false # Local deployment usually doesn't need SSL verification + + # To use Google Ai Studio as your basic platform: + # BASIC_MODEL: + # platform: "google_aistudio" + # model: "gemini-2.5-flash" # or "gemini-1.5-pro", "gemini-2.5-flash-exp", etc. + # api_key: your_gemini_api_key # Get from https://aistudio.google.com/app/apikey + # max_retries: 3 + +# Reasoning model is optional. +# Uncomment the following settings if you want to use reasoning model +# for planning. + +# REASONING_MODEL: +# base_url: https://ark.cn-beijing.volces.com/api/v3 +# model: "doubao-1-5-thinking-pro-m-250428" +# api_key: xxxx +# max_retries: 3 # Maximum number of retries for LLM calls + + +# OTHER SETTINGS: +# Search engine configuration (Only supports Tavily currently) +# SEARCH_ENGINE: +# engine: tavily +# # Only include results from these domains +# include_domains: +# - example.com +# - trusted-news.com +# - reliable-source.org +# - gov.cn +# - edu.cn +# # Exclude results from these domains +# exclude_domains: +# - example.com +# # Include an answer in the search results +# include_answer: false +# # Search depth: "basic" or "advanced" +# search_depth: "advanced" +# # Include raw content from pages +# include_raw_content: true +# # Include images in search results +# include_images: true +# # Include descriptions for images +# include_image_descriptions: true +# # Minimum score threshold for results (0-1) +# min_score_threshold: 0.0 +# # Maximum content length per page +# max_content_length_per_page: 4000 diff --git a/deployment/docker/deer-flow/docker-compose.yml b/deployment/docker/deer-flow/docker-compose.yml new file mode 100644 index 0000000..00069fa --- /dev/null +++ b/deployment/docker/deer-flow/docker-compose.yml @@ -0,0 +1,28 @@ +services: + deer-flow-backend: + image: deer-flow-backend + container_name: deer-flow-backend + env_file: + - .env + volumes: + - ./conf.yaml:/app/conf.yaml:ro + restart: unless-stopped + networks: + - datamate + + deer-flow-frontend: + image: deer-flow-frontend + container_name: deer-flow-frontend + env_file: + - .env + depends_on: + - deer-flow-backend + restart: unless-stopped + networks: + - datamate + +networks: + datamate: + driver: bridge + name: datamate_datamate + external: true diff --git a/frontend/src/pages/Home/Home.tsx b/frontend/src/pages/Home/Home.tsx index 0858766..c9aff62 100644 --- a/frontend/src/pages/Home/Home.tsx +++ b/frontend/src/pages/Home/Home.tsx @@ -41,7 +41,7 @@ export default function WelcomePage() { 开始使用 navigate("/agent")} + onClick={() => navigate("/chat")} className="cursor-pointer rounded px-4 py-2 inline-flex items-center bg-gradient-to-r from-purple-600 to-pink-600 hover:from-purple-700 hover:to-pink-700 text-white shadow-lg" > @@ -233,7 +233,7 @@ export default function WelcomePage() {
navigate("/agent")} + onClick={() => navigate("/chat")} className="cursor-pointer rounded px-4 py-2 inline-flex items-center bg-gradient-to-r from-purple-600 to-pink-600 hover:from-purple-700 hover:to-pink-700 text-white shadow-lg" > diff --git a/frontend/src/routes/routes.ts b/frontend/src/routes/routes.ts index 763abf6..cb867c7 100644 --- a/frontend/src/routes/routes.ts +++ b/frontend/src/routes/routes.ts @@ -53,7 +53,7 @@ const router = createBrowserRouter([ Component: withErrorBoundary(Home), }, { - path: "/agent", + path: "/chat", Component: withErrorBoundary(AgentPage), }, { diff --git a/runtime/label-studio-adapter/app/core/config.py b/runtime/label-studio-adapter/app/core/config.py index d6cfe5c..09874e1 100644 --- a/runtime/label-studio-adapter/app/core/config.py +++ b/runtime/label-studio-adapter/app/core/config.py @@ -10,7 +10,7 @@ class Settings(BaseSettings): env_file = ".env" case_sensitive = False extra = 'ignore' # 允许额外字段(如 Shell 脚本专用的环境变量) - + # ========================= # Adapter 服务配置 # ========================= @@ -18,7 +18,7 @@ class Settings(BaseSettings): app_version: str = "1.0.0" app_description: str = "Adapter for integrating Data Management System with Label Studio" debug: bool = True - + # 服务器配置 host: str = "0.0.0.0" port: int = 8000 @@ -34,27 +34,27 @@ class Settings(BaseSettings): mysql_user: Optional[str] = None mysql_password: Optional[str] = None mysql_database: Optional[str] = None - + # PostgreSQL数据库配置 (优先级2) postgres_host: Optional[str] = None postgres_port: int = 5432 postgres_user: Optional[str] = None postgres_password: Optional[str] = None postgres_database: Optional[str] = None - + # SQLite数据库配置 (优先级3 - 兜底) sqlite_path: str = "data/labelstudio_adapter.db" - + # 直接数据库URL配置(如果提供,将覆盖上述配置) database_url: Optional[str] = None - + # 日志配置 log_level: str = "INFO" - + # 安全配置 secret_key: str = "your-secret-key-change-this-in-production" access_token_expire_minutes: int = 30 - + # ========================= # Label Studio 服务配置 # ========================= @@ -75,7 +75,7 @@ class Settings(BaseSettings): # ========================= dm_service_base_url: str = "http://data-engine" dm_file_path_prefix: str = "/" # DM存储文件夹前缀 - + @property def computed_database_url(self) -> str: @@ -86,61 +86,61 @@ class Settings(BaseSettings): # 如果直接提供了database_url,优先使用 if self.database_url: return self.database_url - + # 优先级1: MySQL if all([self.mysql_host, self.mysql_user, self.mysql_password, self.mysql_database]): return f"mysql+aiomysql://{self.mysql_user}:{self.mysql_password}@{self.mysql_host}:{self.mysql_port}/{self.mysql_database}" - + # 优先级2: PostgreSQL if all([self.postgres_host, self.postgres_user, self.postgres_password, self.postgres_database]): return f"postgresql+asyncpg://{self.postgres_user}:{self.postgres_password}@{self.postgres_host}:{self.postgres_port}/{self.postgres_database}" - + # 优先级3: SQLite (兜底) sqlite_full_path = Path(self.sqlite_path).absolute() # 确保目录存在 sqlite_full_path.parent.mkdir(parents=True, exist_ok=True) return f"sqlite+aiosqlite:///{sqlite_full_path}" - - @property + + @property def sync_database_url(self) -> str: """ 用于数据库迁移的同步连接URL 将异步驱动替换为同步驱动 """ async_url = self.computed_database_url - + # 替换异步驱动为同步驱动 sync_replacements = { "mysql+aiomysql://": "mysql+pymysql://", - "postgresql+asyncpg://": "postgresql+psycopg2://", + "postgresql+asyncpg://": "postgresql+psycopg2://", "sqlite+aiosqlite:///": "sqlite:///" } - + for async_driver, sync_driver in sync_replacements.items(): if async_url.startswith(async_driver): return async_url.replace(async_driver, sync_driver) - + return async_url - + def get_database_info(self) -> dict: """获取数据库配置信息""" url = self.computed_database_url - + if url.startswith("mysql"): db_type = "MySQL" elif url.startswith("postgresql"): - db_type = "PostgreSQL" + db_type = "PostgreSQL" elif url.startswith("sqlite"): db_type = "SQLite" else: db_type = "Unknown" - + return { "type": db_type, "url": url, "sync_url": self.sync_database_url } - + # 全局设置实例 -settings = Settings() \ No newline at end of file +settings = Settings() diff --git a/scripts/images/backend/Dockerfile b/scripts/images/backend/Dockerfile index ea898e7..8bfaff7 100644 --- a/scripts/images/backend/Dockerfile +++ b/scripts/images/backend/Dockerfile @@ -41,6 +41,8 @@ RUN dos2unix /opt/backend/start.sh \ && chmod +x /opt/backend/start.sh \ && ln -sf /usr/share/zoneinfo/Asia/Shanghai /etc/localtime +EXPOSE 8080 + ENTRYPOINT ["/opt/backend/start.sh"] CMD ["java", "-Duser.timezone=Asia/Shanghai", "-jar", "/opt/backend/data-mate.jar"] diff --git a/scripts/images/frontend/Dockerfile b/scripts/images/frontend/Dockerfile index a00267d..d29fa59 100644 --- a/scripts/images/frontend/Dockerfile +++ b/scripts/images/frontend/Dockerfile @@ -10,8 +10,10 @@ RUN if [ -f package-lock.json ]; then npm ci; else npm install; fi && \ FROM nginx:1.29 AS runner COPY --from=builder /app/dist /opt/frontend -COPY scripts/images/frontend/backend.conf /etc/nginx/conf.d/default.conf -RUN ln -sf /usr/share/zoneinfo/Asia/Shanghai /etc/localtime +RUN ln -sf /usr/share/zoneinfo/Asia/Shanghai /etc/localtime \ + && rm -f /etc/nginx/conf.d/default.conf + +EXPOSE 80 CMD ["nginx", "-g", "daemon off;"] diff --git a/scripts/images/runtime/Dockerfile b/scripts/images/runtime/Dockerfile index 6d3f8ef..693c7e9 100644 --- a/scripts/images/runtime/Dockerfile +++ b/scripts/images/runtime/Dockerfile @@ -5,7 +5,7 @@ COPY runtime/ops /opt/runtime/datamate/ops ENV PYTHONPATH=/opt/runtime/datamate/ -RUN sed -i 's/deb.debian.org/mirrors.huaweicloud.com/g' /etc/apt/sources.list.d/debian.sources \ +RUN sed -i 's/deb.debian.org/mirrors.aliyun.com/g' /etc/apt/sources.list.d/debian.sources \ && apt update \ && apt install -y libgl1 libglib2.0-0 vim poppler-utils tesseract-ocr tesseract-ocr-chi-sim libmagic1t64 libreoffice\ && apt clean \ @@ -15,8 +15,10 @@ WORKDIR /opt/runtime ENV HF_HUB_DISABLE_XET=1 -RUN pip install -e . -i https://mirrors.huaweicloud.com/repository/pypi/simple \ - && pip install -r /opt/runtime/datamate/ops/requirements.txt -i https://mirrors.huaweicloud.com/repository/pypi/simple \ +RUN pip install -e . -i https://mirrors.aliyun.com/pypi/simple/ \ + && pip install -r /opt/runtime/datamate/ops/requirements.txt -i https://mirrors.aliyun.com/pypi/simple/ \ && pip cache purge RUN ln -sf /usr/share/zoneinfo/Asia/Shanghai /etc/localtime + +EXPOSE 8081 From 4f5a9a9a83ed37ddcd4dca5268faf514ff6b2de2 Mon Sep 17 00:00:00 2001 From: hhhhsc <1710496817@qq.com> Date: Tue, 28 Oct 2025 16:24:40 +0800 Subject: [PATCH 2/2] refactor: simplify Dockerfile by removing redundant mirror configurations and cleaning up package installation commands --- scripts/images/backend/Dockerfile | 7 ++----- scripts/images/runtime/Dockerfile | 7 +++---- 2 files changed, 5 insertions(+), 9 deletions(-) diff --git a/scripts/images/backend/Dockerfile b/scripts/images/backend/Dockerfile index 8bfaff7..b06aad2 100644 --- a/scripts/images/backend/Dockerfile +++ b/scripts/images/backend/Dockerfile @@ -1,8 +1,6 @@ FROM maven:3-openjdk-8-slim AS datax-builder -RUN sed -i 's/deb.debian.org/mirrors.aliyun.com/g' /etc/apt/sources.list && \ - sed -i 's/security.debian.org/mirrors.aliyun.com/g' /etc/apt/sources.list && \ - apt-get update && \ +RUN apt-get update && \ apt-get install -y git && \ git clone https://github.com/alibaba/DataX.git @@ -24,8 +22,7 @@ RUN cd /opt/backend && \ FROM openjdk:21-jdk-slim -RUN sed -i 's/deb.debian.org/mirrors.aliyun.com/g' /etc/apt/sources.list.d/debian.sources && \ - apt-get update && \ +RUN apt-get update && \ apt-get install -y vim wget curl nfs-common rsync python3 python3-pip python-is-python3 dos2unix && \ apt-get clean && \ rm -rf /var/lib/apy/lists/* diff --git a/scripts/images/runtime/Dockerfile b/scripts/images/runtime/Dockerfile index 693c7e9..a9171e7 100644 --- a/scripts/images/runtime/Dockerfile +++ b/scripts/images/runtime/Dockerfile @@ -5,8 +5,7 @@ COPY runtime/ops /opt/runtime/datamate/ops ENV PYTHONPATH=/opt/runtime/datamate/ -RUN sed -i 's/deb.debian.org/mirrors.aliyun.com/g' /etc/apt/sources.list.d/debian.sources \ - && apt update \ +RUN apt update \ && apt install -y libgl1 libglib2.0-0 vim poppler-utils tesseract-ocr tesseract-ocr-chi-sim libmagic1t64 libreoffice\ && apt clean \ && rm -rf /var/lib/apt/lists/* @@ -15,8 +14,8 @@ WORKDIR /opt/runtime ENV HF_HUB_DISABLE_XET=1 -RUN pip install -e . -i https://mirrors.aliyun.com/pypi/simple/ \ - && pip install -r /opt/runtime/datamate/ops/requirements.txt -i https://mirrors.aliyun.com/pypi/simple/ \ +RUN pip install -e . \ + && pip install -r /opt/runtime/datamate/ops/requirements.txt \ && pip cache purge RUN ln -sf /usr/share/zoneinfo/Asia/Shanghai /etc/localtime