@@ -2,7 +2,7 @@ x-shared-env: &shared-api-worker-env
22services :
33 # API service
44 api :
5- image : langgenius/dify-api:0.14.2
5+ image : langgenius/dify-api:1.0.1
66 restart : always
77 environment :
88 # Use the shared environment variables.
@@ -12,6 +12,10 @@ services:
1212 SENTRY_DSN : ${API_SENTRY_DSN:-}
1313 SENTRY_TRACES_SAMPLE_RATE : ${API_SENTRY_TRACES_SAMPLE_RATE:-1.0}
1414 SENTRY_PROFILES_SAMPLE_RATE : ${API_SENTRY_PROFILES_SAMPLE_RATE:-1.0}
15+ PLUGIN_REMOTE_INSTALL_HOST : ${EXPOSE_PLUGIN_DEBUGGING_HOST:-localhost}
16+ PLUGIN_REMOTE_INSTALL_PORT : ${EXPOSE_PLUGIN_DEBUGGING_PORT:-5003}
17+ PLUGIN_MAX_PACKAGE_SIZE : ${PLUGIN_MAX_PACKAGE_SIZE:-52428800}
18+ INNER_API_KEY_FOR_PLUGIN : ${PLUGIN_DIFY_INNER_API_KEY:-QaHbTe77CtuXmsfyhR7+vRjI/+XbV1AaFy691iy+kGDv2Jvy0/eAh8Y1}
1519 depends_on :
1620 - db
1721 - redis
@@ -25,7 +29,7 @@ services:
2529 # worker service
2630 # The Celery worker for processing the queue.
2731 worker :
28- image : langgenius/dify-api:0.14.2
32+ image : langgenius/dify-api:1.0.1
2933 restart : always
3034 environment :
3135 # Use the shared environment variables.
@@ -35,6 +39,8 @@ services:
3539 SENTRY_DSN : ${API_SENTRY_DSN:-}
3640 SENTRY_TRACES_SAMPLE_RATE : ${API_SENTRY_TRACES_SAMPLE_RATE:-1.0}
3741 SENTRY_PROFILES_SAMPLE_RATE : ${API_SENTRY_PROFILES_SAMPLE_RATE:-1.0}
42+ PLUGIN_MAX_PACKAGE_SIZE : ${PLUGIN_MAX_PACKAGE_SIZE:-52428800}
43+ INNER_API_KEY_FOR_PLUGIN : ${PLUGIN_DIFY_INNER_API_KEY:-QaHbTe77CtuXmsfyhR7+vRjI/+XbV1AaFy691iy+kGDv2Jvy0/eAh8Y1}
3844 depends_on :
3945 - db
4046 - redis
@@ -47,7 +53,7 @@ services:
4753
4854 # Frontend web application.
4955 web :
50- image : langgenius/dify-web:0.14.2
56+ image : langgenius/dify-web:1.0.1
5157 restart : always
5258 environment :
5359 CONSOLE_API_URL : ${CONSOLE_API_URL:-}
@@ -56,6 +62,14 @@ services:
5662 NEXT_TELEMETRY_DISABLED : ${NEXT_TELEMETRY_DISABLED:-0}
5763 TEXT_GENERATION_TIMEOUT_MS : ${TEXT_GENERATION_TIMEOUT_MS:-60000}
5864 CSP_WHITELIST : ${CSP_WHITELIST:-}
65+ MARKETPLACE_API_URL : ${MARKETPLACE_API_URL:-https://marketplace.dify.ai}
66+ MARKETPLACE_URL : ${MARKETPLACE_URL:-https://marketplace.dify.ai}
67+ TOP_K_MAX_VALUE : ${TOP_K_MAX_VALUE:-}
68+ INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH : ${INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH:-}
69+ PM2_INSTANCES : ${PM2_INSTANCES:-2}
70+ LOOP_NODE_MAX_COUNT : ${LOOP_NODE_MAX_COUNT:-100}
71+ MAX_TOOLS_NUM : ${MAX_TOOLS_NUM:-10}
72+ MAX_PARALLEL_LIMIT : ${MAX_PARALLEL_LIMIT:-10}
5973
6074 # The postgres database.
6175 db :
@@ -75,7 +89,7 @@ services:
7589 volumes :
7690 - ./volumes/db/data:/var/lib/postgresql/data
7791 healthcheck :
78- test : ['CMD', 'pg_isready']
92+ test : [ 'CMD', 'pg_isready' ]
7993 interval : 1s
8094 timeout : 3s
8195 retries : 30
@@ -92,7 +106,7 @@ services:
92106 # Set the redis password when startup redis server.
93107 command : redis-server --requirepass ${REDIS_PASSWORD:-difyai123456}
94108 healthcheck :
95- test : ['CMD', 'redis-cli', 'ping']
109+ test : [ 'CMD', 'redis-cli', 'ping' ]
96110
97111 # The DifySandbox
98112 sandbox :
@@ -111,11 +125,40 @@ services:
111125 SANDBOX_PORT : ${SANDBOX_PORT:-8194}
112126 volumes :
113127 - ./volumes/sandbox/dependencies:/dependencies
128+ - ./volumes/sandbox/conf:/conf
114129 healthcheck :
115- test : ['CMD', 'curl', '-f', 'http://localhost:8194/health']
130+ test : [ 'CMD', 'curl', '-f', 'http://localhost:8194/health' ]
116131 networks :
117132 - ssrf_proxy_network
118133
134+ # plugin daemon
135+ plugin_daemon :
136+ image : langgenius/dify-plugin-daemon:0.0.4-local
137+ restart : always
138+ environment :
139+ # Use the shared environment variables.
140+ << : *shared-api-worker-env
141+ DB_DATABASE : ${DB_PLUGIN_DATABASE:-dify_plugin}
142+ SERVER_PORT : ${PLUGIN_DAEMON_PORT:-5002}
143+ SERVER_KEY : ${PLUGIN_DAEMON_KEY:-lYkiYYT6owG+71oLerGzA7GXCgOT++6ovaezWAjpCjf+Sjc3ZtU+qUEi}
144+ MAX_PLUGIN_PACKAGE_SIZE : ${PLUGIN_MAX_PACKAGE_SIZE:-52428800}
145+ PPROF_ENABLED : ${PLUGIN_PPROF_ENABLED:-false}
146+ DIFY_INNER_API_URL : ${PLUGIN_DIFY_INNER_API_URL:-http://api:5001}
147+ DIFY_INNER_API_KEY : ${PLUGIN_DIFY_INNER_API_KEY:-QaHbTe77CtuXmsfyhR7+vRjI/+XbV1AaFy691iy+kGDv2Jvy0/eAh8Y1}
148+ PLUGIN_REMOTE_INSTALLING_HOST : ${PLUGIN_DEBUGGING_HOST:-0.0.0.0}
149+ PLUGIN_REMOTE_INSTALLING_PORT : ${PLUGIN_DEBUGGING_PORT:-5003}
150+ PLUGIN_WORKING_PATH : ${PLUGIN_WORKING_PATH:-/app/storage/cwd}
151+ FORCE_VERIFYING_SIGNATURE : ${FORCE_VERIFYING_SIGNATURE:-true}
152+ PYTHON_ENV_INIT_TIMEOUT : ${PLUGIN_PYTHON_ENV_INIT_TIMEOUT:-120}
153+ PLUGIN_MAX_EXECUTION_TIMEOUT : ${PLUGIN_MAX_EXECUTION_TIMEOUT:-600}
154+ PIP_MIRROR_URL : ${PIP_MIRROR_URL:-}
155+ ports :
156+ - " ${EXPOSE_PLUGIN_DEBUGGING_PORT:-5003}:${PLUGIN_DEBUGGING_PORT:-5003}"
157+ volumes :
158+ - ./volumes/plugin_daemon:/app/storage
159+ depends_on :
160+ - db
161+
119162 # ssrf_proxy server
120163 # for more information, please refer to
121164 # https://docs.dify.ai/learn-more/faq/install-faq#id-18.-why-is-ssrf_proxy-needed
@@ -125,12 +168,7 @@ services:
125168 volumes :
126169 - ./ssrf_proxy/squid.conf.template:/etc/squid/squid.conf.template
127170 - ./ssrf_proxy/docker-entrypoint.sh:/docker-entrypoint-mount.sh
128- entrypoint :
129- [
130- ' sh' ,
131- ' -c' ,
132- " cp /docker-entrypoint-mount.sh /docker-entrypoint.sh && sed -i 's/\r $$//' /docker-entrypoint.sh && chmod +x /docker-entrypoint.sh && /docker-entrypoint.sh" ,
133- ]
171+ entrypoint : [ 'sh', '-c', "cp /docker-entrypoint-mount.sh /docker-entrypoint.sh && sed -i 's/\r$$//' /docker-entrypoint.sh && chmod +x /docker-entrypoint.sh && /docker-entrypoint.sh" ]
134172 environment :
135173 # pls clearly modify the squid env vars to fit your network environment.
136174 HTTP_PORT : ${SSRF_HTTP_PORT:-3128}
@@ -159,8 +197,8 @@ services:
159197 - CERTBOT_EMAIL=${CERTBOT_EMAIL}
160198 - CERTBOT_DOMAIN=${CERTBOT_DOMAIN}
161199 - CERTBOT_OPTIONS=${CERTBOT_OPTIONS:-}
162- entrypoint : ['/docker-entrypoint.sh']
163- command : ['tail', '-f', '/dev/null']
200+ entrypoint : [ '/docker-entrypoint.sh' ]
201+ command : [ 'tail', '-f', '/dev/null' ]
164202
165203 # The nginx reverse proxy.
166204 # used for reverse proxying the API service and Web service.
@@ -177,12 +215,7 @@ services:
177215 - ./volumes/certbot/conf/live:/etc/letsencrypt/live # cert dir (with certbot container)
178216 - ./volumes/certbot/conf:/etc/letsencrypt
179217 - ./volumes/certbot/www:/var/www/html
180- entrypoint :
181- [
182- ' sh' ,
183- ' -c' ,
184- " cp /docker-entrypoint-mount.sh /docker-entrypoint.sh && sed -i 's/\r $$//' /docker-entrypoint.sh && chmod +x /docker-entrypoint.sh && /docker-entrypoint.sh" ,
185- ]
218+ entrypoint : [ 'sh', '-c', "cp /docker-entrypoint-mount.sh /docker-entrypoint.sh && sed -i 's/\r$$//' /docker-entrypoint.sh && chmod +x /docker-entrypoint.sh && /docker-entrypoint.sh" ]
186219 environment :
187220 NGINX_SERVER_NAME : ${NGINX_SERVER_NAME:-_}
188221 NGINX_HTTPS_ENABLED : ${NGINX_HTTPS_ENABLED:-false}
@@ -207,16 +240,6 @@ services:
207240 - ' ${EXPOSE_NGINX_PORT:-80}:${NGINX_PORT:-80}'
208241 - ' ${EXPOSE_NGINX_SSL_PORT:-443}:${NGINX_SSL_PORT:-443}'
209242
210- # The TiDB vector store.
211- # For production use, please refer to https://github.com/pingcap/tidb-docker-compose
212- tidb :
213- image : pingcap/tidb:v8.4.0
214- profiles :
215- - tidb
216- command :
217- - --store=unistore
218- restart : always
219-
220243 # The Weaviate vector store.
221244 weaviate :
222245 image : semitechnologies/weaviate:1.19.0
@@ -274,7 +297,7 @@ services:
274297 working_dir : /opt/couchbase
275298 stdin_open : true
276299 tty : true
277- entrypoint : ["" ]
300+ entrypoint : [ "" ]
278301 command : sh -c "/opt/couchbase/init/init-cbserver.sh"
279302 volumes :
280303 - ./volumes/couchbase/data:/opt/couchbase/var/lib/couchbase/data
@@ -300,10 +323,15 @@ services:
300323 POSTGRES_DB : ${PGVECTOR_POSTGRES_DB:-dify}
301324 # postgres data directory
302325 PGDATA : ${PGVECTOR_PGDATA:-/var/lib/postgresql/data/pgdata}
326+ # pg_bigm module for full text search
327+ PG_BIGM : ${PGVECTOR_PG_BIGM:-false}
328+ PG_BIGM_VERSION : ${PGVECTOR_PG_BIGM_VERSION:-1.2-20240606}
303329 volumes :
304330 - ./volumes/pgvector/data:/var/lib/postgresql/data
331+ - ./pgvector/docker-entrypoint.sh:/docker-entrypoint.sh
332+ entrypoint : [ '/docker-entrypoint.sh' ]
305333 healthcheck :
306- test : ['CMD', 'pg_isready']
334+ test : [ 'CMD', 'pg_isready' ]
307335 interval : 1s
308336 timeout : 3s
309337 retries : 30
@@ -325,7 +353,7 @@ services:
325353 volumes :
326354 - ./volumes/pgvecto_rs/data:/var/lib/postgresql/data
327355 healthcheck :
328- test : ['CMD', 'pg_isready']
356+ test : [ 'CMD', 'pg_isready' ]
329357 interval : 1s
330358 timeout : 3s
331359 retries : 30
@@ -390,7 +418,7 @@ services:
390418 - ./volumes/milvus/etcd:/etcd
391419 command : etcd -advertise-client-urls=http://127.0.0.1:2379 -listen-client-urls http://0.0.0.0:2379 --data-dir /etcd
392420 healthcheck :
393- test : ['CMD', 'etcdctl', 'endpoint', 'health']
421+ test : [ 'CMD', 'etcdctl', 'endpoint', 'health' ]
394422 interval : 30s
395423 timeout : 20s
396424 retries : 3
@@ -409,7 +437,7 @@ services:
409437 - ./volumes/milvus/minio:/minio_data
410438 command : minio server /minio_data --console-address ":9001"
411439 healthcheck :
412- test : ['CMD', 'curl', '-f', 'http://localhost:9000/minio/health/live']
440+ test : [ 'CMD', 'curl', '-f', 'http://localhost:9000/minio/health/live' ]
413441 interval : 30s
414442 timeout : 20s
415443 retries : 3
@@ -418,18 +446,18 @@ services:
418446
419447 milvus-standalone :
420448 container_name : milvus-standalone
421- image : milvusdb/milvus:v2.3.1
449+ image : milvusdb/milvus:v2.5.0-beta
422450 profiles :
423451 - milvus
424- command : ['milvus', 'run', 'standalone']
452+ command : [ 'milvus', 'run', 'standalone' ]
425453 environment :
426454 ETCD_ENDPOINTS : ${ETCD_ENDPOINTS:-etcd:2379}
427455 MINIO_ADDRESS : ${MINIO_ADDRESS:-minio:9000}
428456 common.security.authorizationEnabled : ${MILVUS_AUTHORIZATION_ENABLED:-true}
429457 volumes :
430458 - ./volumes/milvus/milvus:/var/lib/milvus
431459 healthcheck :
432- test : ['CMD', 'curl', '-f', 'http://localhost:9091/healthz']
460+ test : [ 'CMD', 'curl', '-f', 'http://localhost:9091/healthz' ]
433461 interval : 30s
434462 start_period : 90s
435463 timeout : 20s
@@ -480,6 +508,28 @@ services:
480508 depends_on :
481509 - opensearch
482510
511+ # opengauss vector database.
512+ opengauss :
513+ image : opengauss/opengauss:7.0.0-RC1
514+ profiles :
515+ - opengauss
516+ privileged : true
517+ restart : always
518+ environment :
519+ GS_USERNAME : ${OPENGAUSS_USER:-postgres}
520+ GS_PASSWORD : ${OPENGAUSS_PASSWORD:-Dify@123}
521+ GS_PORT : ${OPENGAUSS_PORT:-6600}
522+ GS_DB : ${OPENGAUSS_DATABASE:-dify}
523+ volumes :
524+ - ./volumes/opengauss/data:/var/lib/opengauss/data
525+ healthcheck :
526+ test : ["CMD-SHELL", "netstat -lntp | grep tcp6 > /dev/null 2>&1"]
527+ interval : 10s
528+ timeout : 10s
529+ retries : 10
530+ ports :
531+ - ${OPENGAUSS_PORT:-6600}:${OPENGAUSS_PORT:-6600}
532+
483533 # MyScale vector database
484534 myscale :
485535 container_name : myscale
@@ -502,22 +552,30 @@ services:
502552 container_name : elasticsearch
503553 profiles :
504554 - elasticsearch
555+ - elasticsearch-ja
505556 restart : always
506557 volumes :
558+ - ./elasticsearch/docker-entrypoint.sh:/docker-entrypoint-mount.sh
507559 - dify_es01_data:/usr/share/elasticsearch/data
508560 environment :
509561 ELASTIC_PASSWORD : ${ELASTICSEARCH_PASSWORD:-elastic}
562+ VECTOR_STORE : ${VECTOR_STORE:-}
510563 cluster.name : dify-es-cluster
511564 node.name : dify-es0
512565 discovery.type : single-node
513- xpack.license.self_generated.type : trial
566+ xpack.license.self_generated.type : basic
514567 xpack.security.enabled : ' true'
515568 xpack.security.enrollment.enabled : ' false'
516569 xpack.security.http.ssl.enabled : ' false'
517570 ports :
518571 - ${ELASTICSEARCH_PORT:-9200}:9200
572+ deploy :
573+ resources :
574+ limits :
575+ memory : 2g
576+ entrypoint : [ 'sh', '-c', "sh /docker-entrypoint-mount.sh" ]
519577 healthcheck :
520- test : ['CMD', 'curl', '-s', 'http://localhost:9200/_cluster/health?pretty']
578+ test : [ 'CMD', 'curl', '-s', 'http://localhost:9200/_cluster/health?pretty' ]
521579 interval : 30s
522580 timeout : 10s
523581 retries : 50
@@ -545,7 +603,7 @@ services:
545603 ports :
546604 - ${KIBANA_PORT:-5601}:5601
547605 healthcheck :
548- test : ['CMD-SHELL', 'curl -s http://localhost:5601 >/dev/null || exit 1']
606+ test : [ 'CMD-SHELL', 'curl -s http://localhost:5601 >/dev/null || exit 1' ]
549607 interval : 30s
550608 timeout : 10s
551609 retries : 3
0 commit comments