-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathanythingllm_docker_compose_2.yml
More file actions
41 lines (39 loc) · 1.54 KB
/
anythingllm_docker_compose_2.yml
File metadata and controls
41 lines (39 loc) · 1.54 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
version: "3.9" # Updated version
services:
anythingllm:
image: mintplexlabs/anythingllm:latest # Added :latest tag
container_name: anythingllm
ports:
- "3001:3001"
cap_add:
- SYS_ADMIN # Consider if this is still needed
environment:
STORAGE_DIR: /app/server/storage # Correct path inside container
JWT_SECRET: "20 character key" # Important!
LLM_PROVIDER: ollama
OLLAMA_BASE_PATH: http://192.168.1.10:7869 # Use service name
OLLAMA_MODEL_PREF: deepseek-r1:14b # Or your preferred model
OLLAMA_MODEL_TOKEN_LIMIT: 4096
EMBEDDING_ENGINE: ollama
EMBEDDING_BASE_PATH: http://192.168.1.10:7869 # Use service name
EMBEDDING_MODEL_PREF: nomic-embed-text:latest # Or your preferred model
EMBEDDING_MODEL_MAX_CHUNK_LENGTH: 8192
VECTOR_DB: lancedb
WHISPER_PROVIDER: local
TTS_PROVIDER: native
PASSWORDMINCHAR: 8
# Add the DATABASE_URL environment variable
DATABASE_URL: "file:/app/server/storage/anythingllm.db"
volumes:
- /home/jwilliams/anythingllm:/app/server/storage # Mount your storage location
- /home/jwilliams/ollama-docker/ollama/ollama/models:/app/models # Mount Ollama models
restart: unless-stopped # More robust restart policy
# Uncomment if you want to run Ollama as a separate service
# ollama:
# image: ollama/ollama:latest
# #container_name: ollama
# ports:
# - "11434:11434" # Optional: Expose Ollama API
# volumes:
# - /storage/ollama/models:/models
# restart: unless-stopped