You've already forked DataMate
feature: 对接deer-flow
This commit is contained in:
1
deployment/docker/datamate/.env.deer-flow.example
Normal file
1
deployment/docker/datamate/.env.deer-flow.example
Normal file
@@ -0,0 +1 @@
|
||||
NGINX_CONF=./backend-with-deer-flow.conf
|
||||
1
deployment/docker/datamate/.env.example
Normal file
1
deployment/docker/datamate/.env.example
Normal file
@@ -0,0 +1 @@
|
||||
NGINX_CONF=./backend.conf
|
||||
49
deployment/docker/datamate/backend-with-deer-flow.conf
Normal file
49
deployment/docker/datamate/backend-with-deer-flow.conf
Normal file
@@ -0,0 +1,49 @@
|
||||
server {
|
||||
listen 80;
|
||||
server_name 0.0.0.0;
|
||||
|
||||
access_log /var/log/datamate/frontend/access.log main;
|
||||
error_log /var/log/datamate/frontend/error.log notice;
|
||||
|
||||
client_max_body_size 1024M;
|
||||
|
||||
add_header Set-Cookie "NEXT_LOCALE=zh";
|
||||
|
||||
location /api/ {
|
||||
proxy_pass http://datamate-backend:8080/api/;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
}
|
||||
|
||||
location /chat {
|
||||
proxy_pass http://deer-flow-frontend:3000/chat;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
}
|
||||
|
||||
location /_next {
|
||||
proxy_pass http://deer-flow-frontend:3000/_next;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
}
|
||||
|
||||
location /deer-flow-backend/ {
|
||||
proxy_pass http://deer-flow-backend:8000/api/;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
}
|
||||
|
||||
location / {
|
||||
if ($query_string ~* "_rsc=pmmii") {
|
||||
proxy_pass http://deer-flow-frontend:3000;
|
||||
break;
|
||||
}
|
||||
|
||||
root /opt/frontend;
|
||||
try_files $uri $uri/ /index.html;
|
||||
}
|
||||
}
|
||||
21
deployment/docker/datamate/backend.conf
Normal file
21
deployment/docker/datamate/backend.conf
Normal file
@@ -0,0 +1,21 @@
|
||||
server {
|
||||
listen 80;
|
||||
server_name 0.0.0.0;
|
||||
|
||||
access_log /var/log/datamate/frontend/access.log main;
|
||||
error_log /var/log/datamate/frontend/error.log notice;
|
||||
|
||||
client_max_body_size 1024M;
|
||||
|
||||
location /api/ {
|
||||
proxy_pass http://datamate-backend:8080/api/;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
}
|
||||
|
||||
location / {
|
||||
root /opt/frontend;
|
||||
try_files $uri $uri/ /index.html;
|
||||
}
|
||||
}
|
||||
@@ -5,8 +5,6 @@ services:
|
||||
image: datamate-backend
|
||||
restart: on-failure
|
||||
privileged: true
|
||||
ports:
|
||||
- "8080"
|
||||
volumes:
|
||||
- dataset_volume:/dataset
|
||||
- flow_volume:/flow
|
||||
@@ -24,6 +22,7 @@ services:
|
||||
- "30000:80" # nodePort → hostPort
|
||||
volumes:
|
||||
- frontend_log_volume:/var/log/datamate/frontend
|
||||
- $NGINX_CONF:/etc/nginx/conf.d/backend.conf
|
||||
networks: [ datamate ]
|
||||
depends_on:
|
||||
- datamate-backend
|
||||
@@ -35,8 +34,6 @@ services:
|
||||
restart: on-failure
|
||||
environment:
|
||||
MYSQL_ROOT_PASSWORD: password
|
||||
ports:
|
||||
- "3306"
|
||||
command: |
|
||||
sh -c "
|
||||
chown mysql:mysql /var/log/datamate/database &&
|
||||
@@ -63,8 +60,6 @@ services:
|
||||
MYSQL_USER: "root"
|
||||
MYSQL_PASSWORD: "password"
|
||||
MYSQL_DATABASE: "datamate"
|
||||
ports:
|
||||
- "8081"
|
||||
command:
|
||||
- python
|
||||
- /opt/runtime/datamate/operator_runtime.py
|
||||
|
||||
99
deployment/docker/deer-flow/.env.example
Normal file
99
deployment/docker/deer-flow/.env.example
Normal file
@@ -0,0 +1,99 @@
|
||||
# Application Settings
|
||||
DEBUG=True
|
||||
APP_ENV=development
|
||||
|
||||
# docker build args
|
||||
NEXT_PUBLIC_API_URL="http://localhost:30000/deer-flow-backend"
|
||||
|
||||
AGENT_RECURSION_LIMIT=30
|
||||
|
||||
# CORS settings
|
||||
# Comma-separated list of allowed origins for CORS requests
|
||||
# Example: ALLOWED_ORIGINS=http://localhost:3000,http://example.com
|
||||
ALLOWED_ORIGINS=http://localhost:3000
|
||||
|
||||
# Enable or disable MCP server configuration, the default is false.
|
||||
# Please enable this feature before securing your front-end and back-end in a managed environment.
|
||||
# Otherwise, you system could be compromised.
|
||||
ENABLE_MCP_SERVER_CONFIGURATION=true
|
||||
|
||||
# Enable or disable PYTHON_REPL configuration, the default is false.
|
||||
# Please enable this feature before securing your in a managed environment.
|
||||
# Otherwise, you system could be compromised.
|
||||
ENABLE_PYTHON_REPL=false
|
||||
|
||||
# Search Engine, Supported values: tavily (recommended), duckduckgo, brave_search, arxiv, searx
|
||||
SEARCH_API=tavily
|
||||
TAVILY_API_KEY=tvly-xxx
|
||||
# SEARX_HOST=xxx # Required only if SEARCH_API is searx.(compatible with both Searx and SearxNG)
|
||||
# BRAVE_SEARCH_API_KEY=xxx # Required only if SEARCH_API is brave_search
|
||||
# JINA_API_KEY=jina_xxx # Optional, default is None
|
||||
|
||||
# Optional, RAG provider
|
||||
# RAG_PROVIDER=vikingdb_knowledge_base
|
||||
# VIKINGDB_KNOWLEDGE_BASE_API_URL="api-knowledgebase.mlp.cn-beijing.volces.com"
|
||||
# VIKINGDB_KNOWLEDGE_BASE_API_AK="AKxxx"
|
||||
# VIKINGDB_KNOWLEDGE_BASE_API_SK=""
|
||||
# VIKINGDB_KNOWLEDGE_BASE_RETRIEVAL_SIZE=15
|
||||
|
||||
# RAG_PROVIDER=ragflow
|
||||
# RAGFLOW_API_URL="http://localhost:9388"
|
||||
# RAGFLOW_API_KEY="ragflow-xxx"
|
||||
# RAGFLOW_RETRIEVAL_SIZE=10
|
||||
# RAGFLOW_CROSS_LANGUAGES=English,Chinese,Spanish,French,German,Japanese,Korean # Optional. To use RAGFlow's cross-language search, please separate each language with a single comma
|
||||
|
||||
# RAG_PROVIDER=dify
|
||||
# DIFY_API_URL="https://api.dify.ai/v1"
|
||||
# DIFY_API_KEY="dataset-xxx"
|
||||
|
||||
# MOI is a hybrid database that mainly serves enterprise users (https://www.matrixorigin.io/matrixone-intelligence)
|
||||
# RAG_PROVIDER=moi
|
||||
# MOI_API_URL="https://cluster.matrixonecloud.cn"
|
||||
# MOI_API_KEY="xxx-xxx-xxx-xxx"
|
||||
# MOI_RETRIEVAL_SIZE=10
|
||||
# MOI_LIST_LIMIT=10
|
||||
|
||||
|
||||
# RAG_PROVIDER: milvus (using free milvus instance on zilliz cloud: https://docs.zilliz.com/docs/quick-start )
|
||||
# RAG_PROVIDER=milvus
|
||||
# MILVUS_URI=<endpoint_of_self_hosted_milvus_or_zilliz_cloud>
|
||||
# MILVUS_USER=<username_of_self_hosted_milvus_or_zilliz_cloud>
|
||||
# MILVUS_PASSWORD=<password_of_self_hosted_milvus_or_zilliz_cloud>
|
||||
# MILVUS_COLLECTION=documents
|
||||
# MILVUS_EMBEDDING_PROVIDER=openai # support openai,dashscope
|
||||
# MILVUS_EMBEDDING_BASE_URL=
|
||||
# MILVUS_EMBEDDING_MODEL=
|
||||
# MILVUS_EMBEDDING_API_KEY=
|
||||
# MILVUS_AUTO_LOAD_EXAMPLES=true
|
||||
|
||||
# RAG_PROVIDER: milvus (using milvus lite on Mac or Linux)
|
||||
# RAG_PROVIDER=milvus
|
||||
# MILVUS_URI=./milvus_demo.db
|
||||
# MILVUS_COLLECTION=documents
|
||||
# MILVUS_EMBEDDING_PROVIDER=openai # support openai,dashscope
|
||||
# MILVUS_EMBEDDING_BASE_URL=
|
||||
# MILVUS_EMBEDDING_MODEL=
|
||||
# MILVUS_EMBEDDING_API_KEY=
|
||||
# MILVUS_AUTO_LOAD_EXAMPLES=true
|
||||
|
||||
# Optional, volcengine TTS for generating podcast
|
||||
VOLCENGINE_TTS_APPID=xxx
|
||||
VOLCENGINE_TTS_ACCESS_TOKEN=xxx
|
||||
# VOLCENGINE_TTS_CLUSTER=volcano_tts # Optional, default is volcano_tts
|
||||
# VOLCENGINE_TTS_VOICE_TYPE=BV700_V2_streaming # Optional, default is BV700_V2_streaming
|
||||
|
||||
# Option, for langsmith tracing and monitoring
|
||||
# LANGSMITH_TRACING=true
|
||||
# LANGSMITH_ENDPOINT="https://api.smith.langchain.com"
|
||||
# LANGSMITH_API_KEY="xxx"
|
||||
# LANGSMITH_PROJECT="xxx"
|
||||
|
||||
# [!NOTE]
|
||||
# For model settings and other configurations, please refer to `docs/configuration_guide.md`
|
||||
|
||||
# Option, for langgraph mongodb checkpointer
|
||||
# Enable LangGraph checkpoint saver, supports MongoDB, Postgres
|
||||
#LANGGRAPH_CHECKPOINT_SAVER=true
|
||||
# Set the database URL for saving checkpoints
|
||||
#LANGGRAPH_CHECKPOINT_DB_URL=mongodb://localhost:27017/
|
||||
#LANGGRAPH_CHECKPOINT_DB_URL=postgresql://localhost:5432/postgres
|
||||
71
deployment/docker/deer-flow/conf.yaml.example
Normal file
71
deployment/docker/deer-flow/conf.yaml.example
Normal file
@@ -0,0 +1,71 @@
|
||||
# [!NOTE]
|
||||
# Read the `docs/configuration_guide.md` carefully, and update the
|
||||
# configurations to match your specific settings and requirements.
|
||||
# - Replace `api_key` with your own credentials.
|
||||
# - Replace `base_url` and `model` name if you want to use a custom model.
|
||||
# - Set `verify_ssl` to `false` if your LLM server uses self-signed certificates
|
||||
# - A restart is required every time you change the `conf.yaml` file.
|
||||
|
||||
BASIC_MODEL:
|
||||
base_url: https://ark.cn-beijing.volces.com/api/v3
|
||||
model: "doubao-1-5-pro-32k-250115"
|
||||
api_key: xxxx
|
||||
# max_retries: 3 # Maximum number of retries for LLM calls
|
||||
# verify_ssl: false # Uncomment this line to disable SSL certificate verification for self-signed certificates
|
||||
|
||||
# Local model configuration example:
|
||||
|
||||
# Ollama (Tested and supported for local development)
|
||||
# BASIC_MODEL:
|
||||
# base_url: "http://localhost:11434/v1" # Ollama OpenAI compatible endpoint
|
||||
# model: "qwen3:14b" # or "llama3.2", etc.
|
||||
# api_key: "ollama" # Ollama doesn't need real API key
|
||||
# max_retries: 3
|
||||
# verify_ssl: false # Local deployment usually doesn't need SSL verification
|
||||
|
||||
# To use Google Ai Studio as your basic platform:
|
||||
# BASIC_MODEL:
|
||||
# platform: "google_aistudio"
|
||||
# model: "gemini-2.5-flash" # or "gemini-1.5-pro", "gemini-2.5-flash-exp", etc.
|
||||
# api_key: your_gemini_api_key # Get from https://aistudio.google.com/app/apikey
|
||||
# max_retries: 3
|
||||
|
||||
# Reasoning model is optional.
|
||||
# Uncomment the following settings if you want to use reasoning model
|
||||
# for planning.
|
||||
|
||||
# REASONING_MODEL:
|
||||
# base_url: https://ark.cn-beijing.volces.com/api/v3
|
||||
# model: "doubao-1-5-thinking-pro-m-250428"
|
||||
# api_key: xxxx
|
||||
# max_retries: 3 # Maximum number of retries for LLM calls
|
||||
|
||||
|
||||
# OTHER SETTINGS:
|
||||
# Search engine configuration (Only supports Tavily currently)
|
||||
# SEARCH_ENGINE:
|
||||
# engine: tavily
|
||||
# # Only include results from these domains
|
||||
# include_domains:
|
||||
# - example.com
|
||||
# - trusted-news.com
|
||||
# - reliable-source.org
|
||||
# - gov.cn
|
||||
# - edu.cn
|
||||
# # Exclude results from these domains
|
||||
# exclude_domains:
|
||||
# - example.com
|
||||
# # Include an answer in the search results
|
||||
# include_answer: false
|
||||
# # Search depth: "basic" or "advanced"
|
||||
# search_depth: "advanced"
|
||||
# # Include raw content from pages
|
||||
# include_raw_content: true
|
||||
# # Include images in search results
|
||||
# include_images: true
|
||||
# # Include descriptions for images
|
||||
# include_image_descriptions: true
|
||||
# # Minimum score threshold for results (0-1)
|
||||
# min_score_threshold: 0.0
|
||||
# # Maximum content length per page
|
||||
# max_content_length_per_page: 4000
|
||||
28
deployment/docker/deer-flow/docker-compose.yml
Normal file
28
deployment/docker/deer-flow/docker-compose.yml
Normal file
@@ -0,0 +1,28 @@
|
||||
services:
|
||||
deer-flow-backend:
|
||||
image: deer-flow-backend
|
||||
container_name: deer-flow-backend
|
||||
env_file:
|
||||
- .env
|
||||
volumes:
|
||||
- ./conf.yaml:/app/conf.yaml:ro
|
||||
restart: unless-stopped
|
||||
networks:
|
||||
- datamate
|
||||
|
||||
deer-flow-frontend:
|
||||
image: deer-flow-frontend
|
||||
container_name: deer-flow-frontend
|
||||
env_file:
|
||||
- .env
|
||||
depends_on:
|
||||
- deer-flow-backend
|
||||
restart: unless-stopped
|
||||
networks:
|
||||
- datamate
|
||||
|
||||
networks:
|
||||
datamate:
|
||||
driver: bridge
|
||||
name: datamate_datamate
|
||||
external: true
|
||||
Reference in New Issue
Block a user