Skip to content

Commit 1194c43

Browse files
committed
Add docker-compose examples including remote Ollama setup
- docker-compose.yml: Basic setup with local Ollama - docker-compose.remote-ollama.example.yml: Shows how to use extra_hosts for remote Ollama servers (fixes network isolation on Docker Desktop)
1 parent 13655e8 commit 1194c43

File tree

2 files changed

+56
-0
lines changed

2 files changed

+56
-0
lines changed
Lines changed: 37 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,37 @@
1+
version: '3.8'
2+
3+
# Example: Docker Compose with remote Ollama server
4+
#
5+
# If your Ollama server is on a different machine in your local network,
6+
# use 'extra_hosts' to resolve the server hostname from the container.
7+
#
8+
# Steps:
9+
# 1. Replace 'ollama-server.local' with your server's hostname or IP
10+
# 2. Replace '192.168.1.100' with your Ollama server's IP address
11+
# 3. Adjust other environment variables as needed
12+
13+
services:
14+
translate-book:
15+
image: ghcr.io/hydropix/translatebookswithllms:latest
16+
ports:
17+
- "5000:5000"
18+
environment:
19+
- API_ENDPOINT=http://ollama-server.local:11434/api/generate
20+
- DEFAULT_MODEL=translategemma:27b-it-q8_0
21+
- LLM_PROVIDER=ollama
22+
- PORT=5000
23+
- HOST=0.0.0.0
24+
- REQUEST_TIMEOUT=9000
25+
- MAX_TRANSLATION_ATTEMPTS=2500
26+
- MAIN_CHUNK_SIZE=1000
27+
- AUTO_ADJUST_CONTEXT=true
28+
- OLLAMA_NUM_CTX=131072
29+
- DEFAULT_TARGET_LANGUAGE=Italian
30+
- SIGNATURE_ENABLED=false
31+
- DEBUG_MODE=true
32+
extra_hosts:
33+
# Replace with your Ollama server's hostname and IP
34+
- "ollama-server.local:192.168.1.100"
35+
volumes:
36+
- ./translated_files:/app/translated_files
37+
- ./logs:/app/logs

docker-compose.yml

Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,19 @@
1+
version: '3.8'
2+
3+
services:
4+
translate-book:
5+
image: ghcr.io/hydropix/translatebookswithllms:latest
6+
ports:
7+
- "5000:5000"
8+
environment:
9+
- API_ENDPOINT=http://localhost:11434/api/generate
10+
- DEFAULT_MODEL=qwen3:14b
11+
- LLM_PROVIDER=ollama
12+
- PORT=5000
13+
- HOST=0.0.0.0
14+
- REQUEST_TIMEOUT=900
15+
- DEFAULT_TARGET_LANGUAGE=
16+
- DEBUG_MODE=false
17+
volumes:
18+
- ./translated_files:/app/translated_files
19+
- ./logs:/app/logs

0 commit comments

Comments
 (0)