You've already forked DataMate
42
Makefile
42
Makefile
@@ -12,7 +12,7 @@ build: backend-docker-build frontend-docker-build runtime-docker-build
|
||||
|
||||
.PHONY: create-namespace
|
||||
create-namespace:
|
||||
@kubectl get namespace $(NAMESPACE) > /dev/null 2>&1 || kubectl create namespace $(NAMESPACE)
|
||||
kubectl get namespace $(NAMESPACE) > /dev/null 2>&1 || kubectl create namespace $(NAMESPACE)
|
||||
|
||||
.PHONY: install-%
|
||||
install-%:
|
||||
@@ -73,37 +73,61 @@ runtime-docker-build:
|
||||
label-studio-adapter-docker-build:
|
||||
docker build -t label-studio-adapter:$(VERSION) . -f scripts/images/label-studio-adapter/Dockerfile
|
||||
|
||||
.PHONY: deer-flow-docker-build
|
||||
deer-flow-docker-build:
|
||||
@if [ -d "../deer-flow/.git" ]; then \
|
||||
cd ../deer-flow && git pull; \
|
||||
else \
|
||||
git clone git@github.com:bytedance/deer-flow.git ../deer-flow; \
|
||||
fi
|
||||
sed -i "s/dark/light/g" "../deer-flow/web/src/components/deer-flow/theme-provider-wrapper.tsx"
|
||||
cp deployment/docker/deer-flow/.env.example ../deer-flow/.env
|
||||
cp deployment/docker/deer-flow/conf.yaml.example ../deer-flow/conf.yaml
|
||||
cd ../deer-flow && docker compose build
|
||||
|
||||
.PHONY: backend-docker-install
|
||||
backend-docker-install:
|
||||
cd deployment/docker/datamate && docker-compose up -d backend
|
||||
cd deployment/docker/datamate && docker compose up -d backend
|
||||
|
||||
.PHONY: backend-docker-uninstall
|
||||
backend-docker-uninstall:
|
||||
cd deployment/docker/datamate && docker-compose down backend
|
||||
cd deployment/docker/datamate && docker compose down backend
|
||||
|
||||
.PHONY: frontend-docker-install
|
||||
frontend-docker-install:
|
||||
cd deployment/docker/datamate && docker-compose up -d frontend
|
||||
cd deployment/docker/datamate && docker compose up -d frontend
|
||||
|
||||
.PHONY: frontend-docker-uninstall
|
||||
frontend-docker-uninstall:
|
||||
cd deployment/docker/datamate && docker-compose down frontend
|
||||
cd deployment/docker/datamate && docker compose down frontend
|
||||
|
||||
.PHONY: runtime-docker-install
|
||||
runtime-docker-install:
|
||||
cd deployment/docker/datamate && docker-compose up -d runtime
|
||||
cd deployment/docker/datamate && docker compose up -d runtime
|
||||
|
||||
.PHONY: runtime-docker-uninstall
|
||||
runtime-docker-uninstall:
|
||||
cd deployment/docker/datamate && docker-compose down runtime
|
||||
cd deployment/docker/datamate && docker compose down runtime
|
||||
|
||||
.PHONY: datamate-docker-install
|
||||
datamate-docker-install:
|
||||
cd deployment/docker/datamate && docker-compose up -d
|
||||
cd deployment/docker/datamate && cp .env.example .env && docker compose -f docker-compose.yml up -d
|
||||
|
||||
.PHONY: datamate-docker-uninstall
|
||||
datamate-docker-uninstall:
|
||||
cd deployment/docker/datamate && docker-compose down
|
||||
cd deployment/docker/datamate && docker compose -f docker-compose.yml down
|
||||
|
||||
.PHONY: deer-flow-docker-install
|
||||
deer-flow-docker-install:
|
||||
cd deployment/docker/datamate && cp .env.deer-flow.example .env && docker compose -f docker-compose.yml up -d
|
||||
cd deployment/docker/deer-flow && cp .env.example .env && cp conf.yaml.example conf.yaml && docker compose -f docker-compose.yml up -d
|
||||
|
||||
.PHONY: deer-flow-docker-uninstall
|
||||
deer-flow-docker-uninstall:
|
||||
@if docker compose ls --filter name=datamate | grep -q datamate; then \
|
||||
cd deployment/docker/datamate && docker compose -f docker-compose.yml up -d; \
|
||||
fi
|
||||
cd deployment/docker/deer-flow && docker compose -f docker-compose.yml down
|
||||
|
||||
.PHONY: datamate-k8s-install
|
||||
datamate-k8s-install: create-namespace
|
||||
|
||||
@@ -20,6 +20,7 @@
|
||||
|
||||
<spring-boot.version>3.5.6</spring-boot.version>
|
||||
<spring-cloud.version>2025.0.0</spring-cloud.version>
|
||||
<spring-ai.version>1.0.0-RC1</spring-ai.version>
|
||||
<mysql.version>8.0.33</mysql.version>
|
||||
<postgresql.version>42.6.0</postgresql.version>
|
||||
<redis.version>3.2.0</redis.version>
|
||||
@@ -126,8 +127,8 @@
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>mysql</groupId>
|
||||
<artifactId>mysql-connector-java</artifactId>
|
||||
<groupId>com.mysql</groupId>
|
||||
<artifactId>mysql-connector-j</artifactId>
|
||||
<version>${mysql.version}</version>
|
||||
</dependency>
|
||||
|
||||
@@ -164,6 +165,12 @@
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.springframework.ai</groupId>
|
||||
<artifactId>spring-ai-starter-mcp-server-webmvc</artifactId>
|
||||
<version>${spring-ai.version}</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.baomidou</groupId>
|
||||
<artifactId>mybatis-plus-spring-boot3-starter</artifactId>
|
||||
|
||||
@@ -31,8 +31,8 @@
|
||||
<artifactId>spring-boot-starter-websocket</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>mysql</groupId>
|
||||
<artifactId>mysql-connector-java</artifactId>
|
||||
<groupId>com.mysql</groupId>
|
||||
<artifactId>mysql-connector-j</artifactId>
|
||||
<version>${mysql.version}</version>
|
||||
</dependency>
|
||||
<!-- OpenAPI Dependencies -->
|
||||
|
||||
@@ -53,8 +53,8 @@
|
||||
<artifactId>mybatis-plus-spring-boot3-starter</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>mysql</groupId>
|
||||
<artifactId>mysql-connector-java</artifactId>
|
||||
<groupId>com.mysql</groupId>
|
||||
<artifactId>mysql-connector-j</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
|
||||
@@ -39,8 +39,8 @@
|
||||
|
||||
<!-- Database -->
|
||||
<dependency>
|
||||
<groupId>mysql</groupId>
|
||||
<artifactId>mysql-connector-java</artifactId>
|
||||
<groupId>com.mysql</groupId>
|
||||
<artifactId>mysql-connector-j</artifactId>
|
||||
<version>8.0.33</version>
|
||||
<scope>runtime</scope>
|
||||
</dependency>
|
||||
|
||||
@@ -27,8 +27,8 @@
|
||||
<artifactId>spring-boot-starter-web</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>mysql</groupId>
|
||||
<artifactId>mysql-connector-java</artifactId>
|
||||
<groupId>com.mysql</groupId>
|
||||
<artifactId>mysql-connector-j</artifactId>
|
||||
<version>${mysql.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
|
||||
@@ -35,8 +35,8 @@
|
||||
<artifactId>spring-boot-starter-data-redis</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>mysql</groupId>
|
||||
<artifactId>mysql-connector-java</artifactId>
|
||||
<groupId>com.mysql</groupId>
|
||||
<artifactId>mysql-connector-j</artifactId>
|
||||
<version>${mysql.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
|
||||
@@ -27,8 +27,8 @@
|
||||
<artifactId>spring-boot-starter-web</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>mysql</groupId>
|
||||
<artifactId>mysql-connector-java</artifactId>
|
||||
<groupId>com.mysql</groupId>
|
||||
<artifactId>mysql-connector-j</artifactId>
|
||||
<version>${mysql.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
|
||||
@@ -31,8 +31,8 @@
|
||||
<artifactId>spring-boot-starter-data-redis</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>mysql</groupId>
|
||||
<artifactId>mysql-connector-java</artifactId>
|
||||
<groupId>com.mysql</groupId>
|
||||
<artifactId>mysql-connector-j</artifactId>
|
||||
<version>${mysql.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
|
||||
@@ -118,8 +118,8 @@
|
||||
|
||||
<!-- Database -->
|
||||
<dependency>
|
||||
<groupId>mysql</groupId>
|
||||
<artifactId>mysql-connector-java</artifactId>
|
||||
<groupId>com.mysql</groupId>
|
||||
<artifactId>mysql-connector-j</artifactId>
|
||||
<version>8.0.33</version>
|
||||
<scope>runtime</scope>
|
||||
</dependency>
|
||||
|
||||
@@ -31,8 +31,8 @@
|
||||
<artifactId>spring-boot-starter-data-redis</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>mysql</groupId>
|
||||
<artifactId>mysql-connector-java</artifactId>
|
||||
<groupId>com.mysql</groupId>
|
||||
<artifactId>mysql-connector-j</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
|
||||
@@ -31,8 +31,8 @@
|
||||
<artifactId>spring-boot-starter-data-redis</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>mysql</groupId>
|
||||
<artifactId>mysql-connector-java</artifactId>
|
||||
<groupId>com.mysql</groupId>
|
||||
<artifactId>mysql-connector-j</artifactId>
|
||||
<version>${mysql.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
|
||||
@@ -31,8 +31,8 @@
|
||||
<artifactId>spring-boot-starter-data-elasticsearch</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>mysql</groupId>
|
||||
<artifactId>mysql-connector-java</artifactId>
|
||||
<groupId>com.mysql</groupId>
|
||||
<artifactId>mysql-connector-j</artifactId>
|
||||
<version>${mysql.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
|
||||
@@ -31,8 +31,8 @@
|
||||
<artifactId>spring-boot-starter-data-elasticsearch</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>mysql</groupId>
|
||||
<artifactId>mysql-connector-java</artifactId>
|
||||
<groupId>com.mysql</groupId>
|
||||
<artifactId>mysql-connector-j</artifactId>
|
||||
<version>${mysql.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
|
||||
1
deployment/docker/datamate/.env.deer-flow.example
Normal file
1
deployment/docker/datamate/.env.deer-flow.example
Normal file
@@ -0,0 +1 @@
|
||||
NGINX_CONF=./backend-with-deer-flow.conf
|
||||
1
deployment/docker/datamate/.env.example
Normal file
1
deployment/docker/datamate/.env.example
Normal file
@@ -0,0 +1 @@
|
||||
NGINX_CONF=./backend.conf
|
||||
49
deployment/docker/datamate/backend-with-deer-flow.conf
Normal file
49
deployment/docker/datamate/backend-with-deer-flow.conf
Normal file
@@ -0,0 +1,49 @@
|
||||
server {
|
||||
listen 80;
|
||||
server_name 0.0.0.0;
|
||||
|
||||
access_log /var/log/datamate/frontend/access.log main;
|
||||
error_log /var/log/datamate/frontend/error.log notice;
|
||||
|
||||
client_max_body_size 1024M;
|
||||
|
||||
add_header Set-Cookie "NEXT_LOCALE=zh";
|
||||
|
||||
location /api/ {
|
||||
proxy_pass http://datamate-backend:8080/api/;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
}
|
||||
|
||||
location /chat {
|
||||
proxy_pass http://deer-flow-frontend:3000/chat;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
}
|
||||
|
||||
location /_next {
|
||||
proxy_pass http://deer-flow-frontend:3000/_next;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
}
|
||||
|
||||
location /deer-flow-backend/ {
|
||||
proxy_pass http://deer-flow-backend:8000/api/;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
}
|
||||
|
||||
location / {
|
||||
if ($query_string ~* "_rsc=pmmii") {
|
||||
proxy_pass http://deer-flow-frontend:3000;
|
||||
break;
|
||||
}
|
||||
|
||||
root /opt/frontend;
|
||||
try_files $uri $uri/ /index.html;
|
||||
}
|
||||
}
|
||||
@@ -5,8 +5,6 @@ services:
|
||||
image: datamate-backend
|
||||
restart: on-failure
|
||||
privileged: true
|
||||
ports:
|
||||
- "8080"
|
||||
volumes:
|
||||
- dataset_volume:/dataset
|
||||
- flow_volume:/flow
|
||||
@@ -24,6 +22,7 @@ services:
|
||||
- "30000:80" # nodePort → hostPort
|
||||
volumes:
|
||||
- frontend_log_volume:/var/log/datamate/frontend
|
||||
- $NGINX_CONF:/etc/nginx/conf.d/backend.conf
|
||||
networks: [ datamate ]
|
||||
depends_on:
|
||||
- datamate-backend
|
||||
@@ -35,8 +34,6 @@ services:
|
||||
restart: on-failure
|
||||
environment:
|
||||
MYSQL_ROOT_PASSWORD: password
|
||||
ports:
|
||||
- "3306"
|
||||
command: |
|
||||
sh -c "
|
||||
chown mysql:mysql /var/log/datamate/database &&
|
||||
@@ -63,8 +60,6 @@ services:
|
||||
MYSQL_USER: "root"
|
||||
MYSQL_PASSWORD: "password"
|
||||
MYSQL_DATABASE: "datamate"
|
||||
ports:
|
||||
- "8081"
|
||||
command:
|
||||
- python
|
||||
- /opt/runtime/datamate/operator_runtime.py
|
||||
|
||||
99
deployment/docker/deer-flow/.env.example
Normal file
99
deployment/docker/deer-flow/.env.example
Normal file
@@ -0,0 +1,99 @@
|
||||
# Application Settings
|
||||
DEBUG=True
|
||||
APP_ENV=development
|
||||
|
||||
# docker build args
|
||||
NEXT_PUBLIC_API_URL="http://localhost:30000/deer-flow-backend"
|
||||
|
||||
AGENT_RECURSION_LIMIT=30
|
||||
|
||||
# CORS settings
|
||||
# Comma-separated list of allowed origins for CORS requests
|
||||
# Example: ALLOWED_ORIGINS=http://localhost:3000,http://example.com
|
||||
ALLOWED_ORIGINS=http://localhost:3000
|
||||
|
||||
# Enable or disable MCP server configuration, the default is false.
|
||||
# Please enable this feature before securing your front-end and back-end in a managed environment.
|
||||
# Otherwise, you system could be compromised.
|
||||
ENABLE_MCP_SERVER_CONFIGURATION=true
|
||||
|
||||
# Enable or disable PYTHON_REPL configuration, the default is false.
|
||||
# Please enable this feature before securing your in a managed environment.
|
||||
# Otherwise, you system could be compromised.
|
||||
ENABLE_PYTHON_REPL=false
|
||||
|
||||
# Search Engine, Supported values: tavily (recommended), duckduckgo, brave_search, arxiv, searx
|
||||
SEARCH_API=tavily
|
||||
TAVILY_API_KEY=tvly-xxx
|
||||
# SEARX_HOST=xxx # Required only if SEARCH_API is searx.(compatible with both Searx and SearxNG)
|
||||
# BRAVE_SEARCH_API_KEY=xxx # Required only if SEARCH_API is brave_search
|
||||
# JINA_API_KEY=jina_xxx # Optional, default is None
|
||||
|
||||
# Optional, RAG provider
|
||||
# RAG_PROVIDER=vikingdb_knowledge_base
|
||||
# VIKINGDB_KNOWLEDGE_BASE_API_URL="api-knowledgebase.mlp.cn-beijing.volces.com"
|
||||
# VIKINGDB_KNOWLEDGE_BASE_API_AK="AKxxx"
|
||||
# VIKINGDB_KNOWLEDGE_BASE_API_SK=""
|
||||
# VIKINGDB_KNOWLEDGE_BASE_RETRIEVAL_SIZE=15
|
||||
|
||||
# RAG_PROVIDER=ragflow
|
||||
# RAGFLOW_API_URL="http://localhost:9388"
|
||||
# RAGFLOW_API_KEY="ragflow-xxx"
|
||||
# RAGFLOW_RETRIEVAL_SIZE=10
|
||||
# RAGFLOW_CROSS_LANGUAGES=English,Chinese,Spanish,French,German,Japanese,Korean # Optional. To use RAGFlow's cross-language search, please separate each language with a single comma
|
||||
|
||||
# RAG_PROVIDER=dify
|
||||
# DIFY_API_URL="https://api.dify.ai/v1"
|
||||
# DIFY_API_KEY="dataset-xxx"
|
||||
|
||||
# MOI is a hybrid database that mainly serves enterprise users (https://www.matrixorigin.io/matrixone-intelligence)
|
||||
# RAG_PROVIDER=moi
|
||||
# MOI_API_URL="https://cluster.matrixonecloud.cn"
|
||||
# MOI_API_KEY="xxx-xxx-xxx-xxx"
|
||||
# MOI_RETRIEVAL_SIZE=10
|
||||
# MOI_LIST_LIMIT=10
|
||||
|
||||
|
||||
# RAG_PROVIDER: milvus (using free milvus instance on zilliz cloud: https://docs.zilliz.com/docs/quick-start )
|
||||
# RAG_PROVIDER=milvus
|
||||
# MILVUS_URI=<endpoint_of_self_hosted_milvus_or_zilliz_cloud>
|
||||
# MILVUS_USER=<username_of_self_hosted_milvus_or_zilliz_cloud>
|
||||
# MILVUS_PASSWORD=<password_of_self_hosted_milvus_or_zilliz_cloud>
|
||||
# MILVUS_COLLECTION=documents
|
||||
# MILVUS_EMBEDDING_PROVIDER=openai # support openai,dashscope
|
||||
# MILVUS_EMBEDDING_BASE_URL=
|
||||
# MILVUS_EMBEDDING_MODEL=
|
||||
# MILVUS_EMBEDDING_API_KEY=
|
||||
# MILVUS_AUTO_LOAD_EXAMPLES=true
|
||||
|
||||
# RAG_PROVIDER: milvus (using milvus lite on Mac or Linux)
|
||||
# RAG_PROVIDER=milvus
|
||||
# MILVUS_URI=./milvus_demo.db
|
||||
# MILVUS_COLLECTION=documents
|
||||
# MILVUS_EMBEDDING_PROVIDER=openai # support openai,dashscope
|
||||
# MILVUS_EMBEDDING_BASE_URL=
|
||||
# MILVUS_EMBEDDING_MODEL=
|
||||
# MILVUS_EMBEDDING_API_KEY=
|
||||
# MILVUS_AUTO_LOAD_EXAMPLES=true
|
||||
|
||||
# Optional, volcengine TTS for generating podcast
|
||||
VOLCENGINE_TTS_APPID=xxx
|
||||
VOLCENGINE_TTS_ACCESS_TOKEN=xxx
|
||||
# VOLCENGINE_TTS_CLUSTER=volcano_tts # Optional, default is volcano_tts
|
||||
# VOLCENGINE_TTS_VOICE_TYPE=BV700_V2_streaming # Optional, default is BV700_V2_streaming
|
||||
|
||||
# Option, for langsmith tracing and monitoring
|
||||
# LANGSMITH_TRACING=true
|
||||
# LANGSMITH_ENDPOINT="https://api.smith.langchain.com"
|
||||
# LANGSMITH_API_KEY="xxx"
|
||||
# LANGSMITH_PROJECT="xxx"
|
||||
|
||||
# [!NOTE]
|
||||
# For model settings and other configurations, please refer to `docs/configuration_guide.md`
|
||||
|
||||
# Option, for langgraph mongodb checkpointer
|
||||
# Enable LangGraph checkpoint saver, supports MongoDB, Postgres
|
||||
#LANGGRAPH_CHECKPOINT_SAVER=true
|
||||
# Set the database URL for saving checkpoints
|
||||
#LANGGRAPH_CHECKPOINT_DB_URL=mongodb://localhost:27017/
|
||||
#LANGGRAPH_CHECKPOINT_DB_URL=postgresql://localhost:5432/postgres
|
||||
71
deployment/docker/deer-flow/conf.yaml.example
Normal file
71
deployment/docker/deer-flow/conf.yaml.example
Normal file
@@ -0,0 +1,71 @@
|
||||
# [!NOTE]
|
||||
# Read the `docs/configuration_guide.md` carefully, and update the
|
||||
# configurations to match your specific settings and requirements.
|
||||
# - Replace `api_key` with your own credentials.
|
||||
# - Replace `base_url` and `model` name if you want to use a custom model.
|
||||
# - Set `verify_ssl` to `false` if your LLM server uses self-signed certificates
|
||||
# - A restart is required every time you change the `conf.yaml` file.
|
||||
|
||||
BASIC_MODEL:
|
||||
base_url: https://ark.cn-beijing.volces.com/api/v3
|
||||
model: "doubao-1-5-pro-32k-250115"
|
||||
api_key: xxxx
|
||||
# max_retries: 3 # Maximum number of retries for LLM calls
|
||||
# verify_ssl: false # Uncomment this line to disable SSL certificate verification for self-signed certificates
|
||||
|
||||
# Local model configuration example:
|
||||
|
||||
# Ollama (Tested and supported for local development)
|
||||
# BASIC_MODEL:
|
||||
# base_url: "http://localhost:11434/v1" # Ollama OpenAI compatible endpoint
|
||||
# model: "qwen3:14b" # or "llama3.2", etc.
|
||||
# api_key: "ollama" # Ollama doesn't need real API key
|
||||
# max_retries: 3
|
||||
# verify_ssl: false # Local deployment usually doesn't need SSL verification
|
||||
|
||||
# To use Google Ai Studio as your basic platform:
|
||||
# BASIC_MODEL:
|
||||
# platform: "google_aistudio"
|
||||
# model: "gemini-2.5-flash" # or "gemini-1.5-pro", "gemini-2.5-flash-exp", etc.
|
||||
# api_key: your_gemini_api_key # Get from https://aistudio.google.com/app/apikey
|
||||
# max_retries: 3
|
||||
|
||||
# Reasoning model is optional.
|
||||
# Uncomment the following settings if you want to use reasoning model
|
||||
# for planning.
|
||||
|
||||
# REASONING_MODEL:
|
||||
# base_url: https://ark.cn-beijing.volces.com/api/v3
|
||||
# model: "doubao-1-5-thinking-pro-m-250428"
|
||||
# api_key: xxxx
|
||||
# max_retries: 3 # Maximum number of retries for LLM calls
|
||||
|
||||
|
||||
# OTHER SETTINGS:
|
||||
# Search engine configuration (Only supports Tavily currently)
|
||||
# SEARCH_ENGINE:
|
||||
# engine: tavily
|
||||
# # Only include results from these domains
|
||||
# include_domains:
|
||||
# - example.com
|
||||
# - trusted-news.com
|
||||
# - reliable-source.org
|
||||
# - gov.cn
|
||||
# - edu.cn
|
||||
# # Exclude results from these domains
|
||||
# exclude_domains:
|
||||
# - example.com
|
||||
# # Include an answer in the search results
|
||||
# include_answer: false
|
||||
# # Search depth: "basic" or "advanced"
|
||||
# search_depth: "advanced"
|
||||
# # Include raw content from pages
|
||||
# include_raw_content: true
|
||||
# # Include images in search results
|
||||
# include_images: true
|
||||
# # Include descriptions for images
|
||||
# include_image_descriptions: true
|
||||
# # Minimum score threshold for results (0-1)
|
||||
# min_score_threshold: 0.0
|
||||
# # Maximum content length per page
|
||||
# max_content_length_per_page: 4000
|
||||
28
deployment/docker/deer-flow/docker-compose.yml
Normal file
28
deployment/docker/deer-flow/docker-compose.yml
Normal file
@@ -0,0 +1,28 @@
|
||||
services:
|
||||
deer-flow-backend:
|
||||
image: deer-flow-backend
|
||||
container_name: deer-flow-backend
|
||||
env_file:
|
||||
- .env
|
||||
volumes:
|
||||
- ./conf.yaml:/app/conf.yaml:ro
|
||||
restart: unless-stopped
|
||||
networks:
|
||||
- datamate
|
||||
|
||||
deer-flow-frontend:
|
||||
image: deer-flow-frontend
|
||||
container_name: deer-flow-frontend
|
||||
env_file:
|
||||
- .env
|
||||
depends_on:
|
||||
- deer-flow-backend
|
||||
restart: unless-stopped
|
||||
networks:
|
||||
- datamate
|
||||
|
||||
networks:
|
||||
datamate:
|
||||
driver: bridge
|
||||
name: datamate_datamate
|
||||
external: true
|
||||
@@ -40,7 +40,7 @@ export default function WelcomePage() {
|
||||
开始使用
|
||||
</span>
|
||||
<span
|
||||
onClick={() => navigate("/agent")}
|
||||
onClick={() => navigate("/chat")}
|
||||
className="cursor-pointer rounded px-4 py-2 inline-flex items-center bg-gradient-to-r from-purple-600 to-pink-600 hover:from-purple-700 hover:to-pink-700 text-white shadow-lg"
|
||||
>
|
||||
<MessageSquare className="mr-2 w-4 h-4" />
|
||||
@@ -232,7 +232,7 @@ export default function WelcomePage() {
|
||||
|
||||
<div className="text-center">
|
||||
<span
|
||||
onClick={() => navigate("/agent")}
|
||||
onClick={() => navigate("/chat")}
|
||||
className="cursor-pointer rounded px-4 py-2 inline-flex items-center bg-gradient-to-r from-purple-600 to-pink-600 hover:from-purple-700 hover:to-pink-700 text-white shadow-lg"
|
||||
>
|
||||
<MessageSquare className="mr-2 w-4 h-4" />
|
||||
|
||||
@@ -53,7 +53,7 @@ const router = createBrowserRouter([
|
||||
Component: withErrorBoundary(Home),
|
||||
},
|
||||
{
|
||||
path: "/agent",
|
||||
path: "/chat",
|
||||
Component: withErrorBoundary(AgentPage),
|
||||
},
|
||||
{
|
||||
|
||||
@@ -10,7 +10,7 @@ class Settings(BaseSettings):
|
||||
env_file = ".env"
|
||||
case_sensitive = False
|
||||
extra = 'ignore' # 允许额外字段(如 Shell 脚本专用的环境变量)
|
||||
|
||||
|
||||
# =========================
|
||||
# Adapter 服务配置
|
||||
# =========================
|
||||
@@ -18,7 +18,7 @@ class Settings(BaseSettings):
|
||||
app_version: str = "1.0.0"
|
||||
app_description: str = "Adapter for integrating Data Management System with Label Studio"
|
||||
debug: bool = True
|
||||
|
||||
|
||||
# 服务器配置
|
||||
host: str = "0.0.0.0"
|
||||
port: int = 8000
|
||||
@@ -34,27 +34,27 @@ class Settings(BaseSettings):
|
||||
mysql_user: Optional[str] = None
|
||||
mysql_password: Optional[str] = None
|
||||
mysql_database: Optional[str] = None
|
||||
|
||||
|
||||
# PostgreSQL数据库配置 (优先级2)
|
||||
postgres_host: Optional[str] = None
|
||||
postgres_port: int = 5432
|
||||
postgres_user: Optional[str] = None
|
||||
postgres_password: Optional[str] = None
|
||||
postgres_database: Optional[str] = None
|
||||
|
||||
|
||||
# SQLite数据库配置 (优先级3 - 兜底)
|
||||
sqlite_path: str = "data/labelstudio_adapter.db"
|
||||
|
||||
|
||||
# 直接数据库URL配置(如果提供,将覆盖上述配置)
|
||||
database_url: Optional[str] = None
|
||||
|
||||
|
||||
# 日志配置
|
||||
log_level: str = "INFO"
|
||||
|
||||
|
||||
# 安全配置
|
||||
secret_key: str = "your-secret-key-change-this-in-production"
|
||||
access_token_expire_minutes: int = 30
|
||||
|
||||
|
||||
# =========================
|
||||
# Label Studio 服务配置
|
||||
# =========================
|
||||
@@ -74,7 +74,7 @@ class Settings(BaseSettings):
|
||||
# Data Management 服务配置
|
||||
# =========================
|
||||
dm_file_path_prefix: str = "/" # DM存储文件夹前缀
|
||||
|
||||
|
||||
|
||||
@property
|
||||
def computed_database_url(self) -> str:
|
||||
@@ -85,61 +85,61 @@ class Settings(BaseSettings):
|
||||
# 如果直接提供了database_url,优先使用
|
||||
if self.database_url:
|
||||
return self.database_url
|
||||
|
||||
|
||||
# 优先级1: MySQL
|
||||
if all([self.mysql_host, self.mysql_user, self.mysql_password, self.mysql_database]):
|
||||
return f"mysql+aiomysql://{self.mysql_user}:{self.mysql_password}@{self.mysql_host}:{self.mysql_port}/{self.mysql_database}"
|
||||
|
||||
|
||||
# 优先级2: PostgreSQL
|
||||
if all([self.postgres_host, self.postgres_user, self.postgres_password, self.postgres_database]):
|
||||
return f"postgresql+asyncpg://{self.postgres_user}:{self.postgres_password}@{self.postgres_host}:{self.postgres_port}/{self.postgres_database}"
|
||||
|
||||
|
||||
# 优先级3: SQLite (兜底)
|
||||
sqlite_full_path = Path(self.sqlite_path).absolute()
|
||||
# 确保目录存在
|
||||
sqlite_full_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
return f"sqlite+aiosqlite:///{sqlite_full_path}"
|
||||
|
||||
@property
|
||||
|
||||
@property
|
||||
def sync_database_url(self) -> str:
|
||||
"""
|
||||
用于数据库迁移的同步连接URL
|
||||
将异步驱动替换为同步驱动
|
||||
"""
|
||||
async_url = self.computed_database_url
|
||||
|
||||
|
||||
# 替换异步驱动为同步驱动
|
||||
sync_replacements = {
|
||||
"mysql+aiomysql://": "mysql+pymysql://",
|
||||
"postgresql+asyncpg://": "postgresql+psycopg2://",
|
||||
"postgresql+asyncpg://": "postgresql+psycopg2://",
|
||||
"sqlite+aiosqlite:///": "sqlite:///"
|
||||
}
|
||||
|
||||
|
||||
for async_driver, sync_driver in sync_replacements.items():
|
||||
if async_url.startswith(async_driver):
|
||||
return async_url.replace(async_driver, sync_driver)
|
||||
|
||||
|
||||
return async_url
|
||||
|
||||
|
||||
def get_database_info(self) -> dict:
|
||||
"""获取数据库配置信息"""
|
||||
url = self.computed_database_url
|
||||
|
||||
|
||||
if url.startswith("mysql"):
|
||||
db_type = "MySQL"
|
||||
elif url.startswith("postgresql"):
|
||||
db_type = "PostgreSQL"
|
||||
db_type = "PostgreSQL"
|
||||
elif url.startswith("sqlite"):
|
||||
db_type = "SQLite"
|
||||
else:
|
||||
db_type = "Unknown"
|
||||
|
||||
|
||||
return {
|
||||
"type": db_type,
|
||||
"url": url,
|
||||
"sync_url": self.sync_database_url
|
||||
}
|
||||
|
||||
|
||||
|
||||
# 全局设置实例
|
||||
settings = Settings()
|
||||
settings = Settings()
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
FROM maven:3-openjdk-8-slim AS datax-builder
|
||||
|
||||
RUN sed -i 's/deb.debian.org/mirrors.aliyun.com/g' /etc/apt/sources.list && \
|
||||
sed -i 's/security.debian.org/mirrors.aliyun.com/g' /etc/apt/sources.list && \
|
||||
apt-get update && \
|
||||
RUN apt-get update && \
|
||||
apt-get install -y git && \
|
||||
git clone https://github.com/alibaba/DataX.git
|
||||
|
||||
@@ -24,8 +22,7 @@ RUN cd /opt/backend && \
|
||||
|
||||
FROM openjdk:21-jdk-slim
|
||||
|
||||
RUN sed -i 's/deb.debian.org/mirrors.aliyun.com/g' /etc/apt/sources.list.d/debian.sources && \
|
||||
apt-get update && \
|
||||
RUN apt-get update && \
|
||||
apt-get install -y vim wget curl nfs-common rsync python3 python3-pip python-is-python3 dos2unix && \
|
||||
apt-get clean && \
|
||||
rm -rf /var/lib/apy/lists/*
|
||||
@@ -41,6 +38,8 @@ RUN dos2unix /opt/backend/start.sh \
|
||||
&& chmod +x /opt/backend/start.sh \
|
||||
&& ln -sf /usr/share/zoneinfo/Asia/Shanghai /etc/localtime
|
||||
|
||||
EXPOSE 8080
|
||||
|
||||
ENTRYPOINT ["/opt/backend/start.sh"]
|
||||
|
||||
CMD ["java", "-Duser.timezone=Asia/Shanghai", "-jar", "/opt/backend/data-mate.jar"]
|
||||
|
||||
@@ -10,8 +10,10 @@ RUN if [ -f package-lock.json ]; then npm ci; else npm install; fi && \
|
||||
FROM nginx:1.29 AS runner
|
||||
|
||||
COPY --from=builder /app/dist /opt/frontend
|
||||
COPY scripts/images/frontend/backend.conf /etc/nginx/conf.d/default.conf
|
||||
|
||||
RUN ln -sf /usr/share/zoneinfo/Asia/Shanghai /etc/localtime
|
||||
RUN ln -sf /usr/share/zoneinfo/Asia/Shanghai /etc/localtime \
|
||||
&& rm -f /etc/nginx/conf.d/default.conf
|
||||
|
||||
EXPOSE 80
|
||||
|
||||
CMD ["nginx", "-g", "daemon off;"]
|
||||
|
||||
@@ -5,8 +5,7 @@ COPY runtime/ops /opt/runtime/datamate/ops
|
||||
|
||||
ENV PYTHONPATH=/opt/runtime/datamate/
|
||||
|
||||
RUN sed -i 's/deb.debian.org/mirrors.huaweicloud.com/g' /etc/apt/sources.list.d/debian.sources \
|
||||
&& apt update \
|
||||
RUN apt update \
|
||||
&& apt install -y libgl1 libglib2.0-0 vim poppler-utils tesseract-ocr tesseract-ocr-chi-sim libmagic1t64 libreoffice\
|
||||
&& apt clean \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
@@ -15,8 +14,10 @@ WORKDIR /opt/runtime
|
||||
|
||||
ENV HF_HUB_DISABLE_XET=1
|
||||
|
||||
RUN pip install -e . -i https://mirrors.huaweicloud.com/repository/pypi/simple \
|
||||
&& pip install -r /opt/runtime/datamate/ops/requirements.txt -i https://mirrors.huaweicloud.com/repository/pypi/simple \
|
||||
RUN pip install -e . \
|
||||
&& pip install -r /opt/runtime/datamate/ops/requirements.txt \
|
||||
&& pip cache purge
|
||||
|
||||
RUN ln -sf /usr/share/zoneinfo/Asia/Shanghai /etc/localtime
|
||||
|
||||
EXPOSE 8081
|
||||
|
||||
Reference in New Issue
Block a user