-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathdocker-compose.yml
More file actions
65 lines (62 loc) · 1.94 KB
/
docker-compose.yml
File metadata and controls
65 lines (62 loc) · 1.94 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
version: "3.9"
services:
# Optional: run Ollama inside Docker (disabled by default).
# Enable with: docker compose --profile with-ollama up
ollama:
profiles: ["with-ollama"]
image: ollama/ollama:latest
container_name: tree-evaluator-ollama
ports:
- "11434:11434"
volumes:
- ollama:/root/.ollama
healthcheck:
test: ["CMD", "ollama", "list"]
interval: 10s
timeout: 5s
retries: 10
api:
build: .
container_name: tree-evaluator-api
environment:
- APP_ENV=development
- APP_NAME=Tree Evaluator API
- APP_VERSION=0.1.0
ports:
- "8000:8000"
volumes:
- ./app:/app/app:ro
command: uvicorn app.main:app --host 0.0.0.0 --port 8000 --reload
streamlit:
build:
context: .
dockerfile: Dockerfile.streamlit
container_name: tree-evaluator-streamlit
ports:
- "8501:8501"
volumes:
- ./streamlit_app:/app/streamlit_app:ro
- ./app:/app/app:ro
- ./dataset:/app/dataset:ro
- chat_data:/app/data
environment:
- PYTHONPATH=/app
- CHAT_DB_PATH=/app/data/chat_index.db
# LLM provider selection: openai | ollama
- LLM_PROVIDER=${LLM_PROVIDER:-openai}
# Ollama settings (used when LLM_PROVIDER=ollama)
# Default points to host Ollama (Docker Desktop): http://host.docker.internal:11434
# If you enable the ollama service profile, set OLLAMA_BASE_URL=http://ollama:11434
- OLLAMA_BASE_URL=${OLLAMA_BASE_URL:-http://host.docker.internal:11434}
- OLLAMA_CHAT_MODEL=${OLLAMA_CHAT_MODEL:-qwen2.5:7b-instruct}
- OLLAMA_EMBEDDING_MODEL=${OLLAMA_EMBEDDING_MODEL:-nomic-embed-text}
# OpenAI API Key (opzionale, può essere inserita dall'UI)
# - OPENAI_API_KEY=${OPENAI_API_KEY}
env_file:
- .env
command: streamlit run streamlit_app/app.py --server.port 8501 --server.address 0.0.0.0
volumes:
chat_data:
driver: local
ollama:
driver: local