Skip to content

Commit b012550

Browse files
committed
rm redundant vars, rfk start.sh, add network mode
1 parent bebeb97 commit b012550

File tree

3 files changed

+68
-40
lines changed

3 files changed

+68
-40
lines changed

.env.example

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -9,8 +9,6 @@ DKN_ADMIN_PUBLIC_KEY=0208ef5e65a9c656a6f92fb2c770d5d5e2ecffe02a6aade19207f75110b
99
DKN_MODELS=phi3:3.8b
1010

1111
## DRIA (optional) ##
12-
# info | debug | error | none,dkn_compute=debug
13-
DKN_LOG_LEVEL=info
1412
# P2P address, you don't need to change this unless you really want this port.
1513
DKN_P2P_LISTEN_ADDR=/ip4/0.0.0.0/tcp/4001
1614
# Comma-separated static relay nodes

compose.yml

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@ services:
77
DKN_WALLET_SECRET_KEY: ${DKN_WALLET_SECRET_KEY}
88
DKN_ADMIN_PUBLIC_KEY: ${DKN_ADMIN_PUBLIC_KEY}
99
DKN_MODELS: ${DKN_MODELS}
10-
RUST_LOG: ${RUST_LOG}
10+
RUST_LOG: ${RUST_LOG-none,dkn_compute=info}
1111
DKN_P2P_LISTEN_ADDR: ${DKN_P2P_LISTEN_ADDR}
1212
DKN_RELAY_NODES: ${DKN_RELAY_NODES}
1313
DKN_BOOTSTRAP_NODES: ${DKN_BOOTSTRAP_NODES}
@@ -17,10 +17,12 @@ services:
1717
OLLAMA_HOST: ${OLLAMA_HOST}
1818
OLLAMA_PORT: ${OLLAMA_PORT}
1919
OLLAMA_AUTO_PULL: ${OLLAMA_AUTO_PULL:-true}
20-
network_mode: "host"
20+
network_mode: ${DKN_DOCKER_NETWORK_MODE:-bridge}
2121
extra_hosts:
2222
# for Linux, we need to add this line manually
2323
- "host.docker.internal:host-gateway"
24+
ports:
25+
- 4001:4001
2426
restart: "on-failure"
2527

2628
# Ollama Container (CPU)

start.sh

Lines changed: 64 additions & 36 deletions
Original file line numberDiff line numberDiff line change
@@ -36,18 +36,21 @@ check_docker_compose() {
3636
}
3737
check_docker_compose
3838

39+
# check the operating system
40+
# this is required in case local Ollama is used
41+
# reference: https://stackoverflow.com/a/68706298
3942
OS=""
4043
check_os() {
4144
unameOut=$(uname -a)
4245
case "${unameOut}" in
43-
*Microsoft*) OS="WSL";; #must be first since Windows subsystem for linux will have Linux in the name too
44-
*microsoft*) OS="WSL2";; #WARNING: My v2 uses ubuntu 20.4 at the moment slightly different name may not always work
45-
Linux*) OS="Linux";;
46-
Darwin*) OS="Mac";;
47-
CYGWIN*) OS="Cygwin";;
48-
MINGW*) OS="Windows";;
49-
*Msys) OS="Windows";;
50-
*) OS="UNKNOWN:${unameOut}"
46+
*Microsoft*) OS="WSL";; # must be first since WSL will have Linux in the name too
47+
*microsoft*) OS="WSL2";; #WARNING: My v2 uses Ubuntu 20.4 at the moment slightly different name may not always work
48+
Linux*) OS="Linux";;
49+
Darwin*) OS="Mac";;
50+
CYGWIN*) OS="Cygwin";;
51+
MINGW*) OS="Windows";;
52+
*Msys) OS="Windows";;
53+
*) OS="UNKNOWN:${unameOut}"
5154
esac
5255
}
5356
check_os
@@ -71,6 +74,11 @@ MODELS_LIST=""
7174
LOCAL_OLLAMA_PID=""
7275
DOCKER_HOST="http://host.docker.internal"
7376

77+
# this is the default network mode, but
78+
# based on local Ollama & OS we may set it to `host`
79+
# https://docs.docker.com/engine/network/#drivers
80+
DKN_DOCKER_NETWORK_MODE=bridge
81+
7482
# handle command line arguments
7583
while [ "$#" -gt 0 ]; do
7684
case $1 in
@@ -90,13 +98,18 @@ while [ "$#" -gt 0 ]; do
9098
--trace)
9199
RUST_LOG="none,dkn_compute=trace"
92100
;;
101+
93102
-b|--background) START_MODE="BACKGROUND" ;;
103+
94104
-h|--help) docs ;;
105+
95106
*) echo "ERROR: Unknown parameter passed: $1"; exit 1 ;;
96107
esac
97108
shift
98109
done
99110

111+
# check required environment variables
112+
# we only need the secret key & admin public key
100113
check_required_env_vars() {
101114
required_vars="
102115
DKN_WALLET_SECRET_KEY
@@ -163,6 +176,7 @@ handle_ollama_env() {
163176
OLLAMA_PORT
164177
OLLAMA_AUTO_PULL
165178
"
179+
# loads env variables (TODO: !)
166180
as_pairs "$ollama_env_vars" > /dev/null 2>&1
167181

168182
# if there is no ollama model given, do not add any ollama compose profile
@@ -178,7 +192,10 @@ handle_ollama_env() {
178192
return
179193
fi
180194

181-
# check local ollama
195+
# check local ollama first
196+
# if it can be found, try launching it & configure network to be able to connect to localhost
197+
# if not, use the docker ollama image
198+
# if the user explicitly wants to use the docker ollama image, this condition skips the local checks
182199
if [ "$DOCKER_OLLAMA" = false ]; then
183200
if command -v ollama >/dev/null 2>&1; then
184201
# host machine has ollama installed
@@ -203,10 +220,11 @@ handle_ollama_env() {
203220
curl -s -o /dev/null -w "%{http_code}" ${ollama_url}
204221
}
205222

223+
# check if ollama is already running
206224
if [ "$(check_ollama_server)" -eq 200 ]; then
207225
echo "Local Ollama is already up at $ollama_url and running, using it"
208-
# Using already running local Ollama
209226
else
227+
# ollama is not live, so we launch it ourselves
210228
echo "Local Ollama is not live, running ollama serve"
211229

212230
# `ollama serve` uses `OLLAMA_HOST` variable with both host and port,
@@ -228,51 +246,60 @@ handle_ollama_env() {
228246
RETRY_COUNT=$((RETRY_COUNT + 1))
229247
done
230248

249+
# exit with error if we couldnt launch Ollama
231250
if [ "$RETRY_COUNT" -ge "$MAX_RETRIES" ]; then
232251
echo "Local Ollama server failed to start after $MAX_RETRIES attempts."
233252
echo "You can use the --docker-ollama flag to use the Docker Ollama image instead."
234253
exit 1
235254
else
236255
LOCAL_OLLAMA_PID=$temp_pid
237256
echo "Local Ollama server is up at $ollama_url and running with PID $LOCAL_OLLAMA_PID"
238-
# Using local ollama
239257
fi
240258
fi
241-
# Depending on the host os, use localhost or host.docker.internal for Ollama host
242-
if [ "$OS" = "Mac" ]; then
243-
OLLAMA_HOST="http://host.docker.internal"
244-
elif [ "$OS" = "Linux" ]; then
259+
260+
# to use the local Ollama, we need to configure the network depending on the Host
261+
# Windows and Mac should work with host.docker.internal alright,
262+
# but Linux requires `host` network mode with `localhost` as the Host URL
263+
if [ "$OS" = "Linux" ]; then
245264
OLLAMA_HOST="http://localhost"
265+
DKN_DOCKER_NETWORK_MODE=host
266+
else
267+
OLLAMA_HOST="http://host.docker.internal"
246268
fi
247-
return
248269
else
270+
# although --docker-ollama was not passed, we checked and couldnt find Ollama
271+
# so we will use Docker anyways
272+
echo "Ollama is not installed on this machine, will use Docker Ollama service"
249273
DOCKER_OLLAMA=true
250-
echo "Ollama is not installed on this machine, using the Docker ollama instead"
251274
fi
252275
fi
253276

254-
# check for cuda gpu
255-
if command -v nvidia-smi >/dev/null 2>&1; then
256-
if nvidia-smi >/dev/null 2>&1; then
257-
echo "GPU type detected: CUDA"
258-
COMPOSE_PROFILES="$COMPOSE_PROFILES ollama-cuda"
259-
return
260-
fi
261-
fi
277+
# this is in a separate if condition rather than `else`, due to a fallback condition above
278+
if [ "$DOCKER_OLLAMA" = true ]; then
279+
# check for cuda gpu
280+
if command -v nvidia-smi >/dev/null 2>&1; then
281+
if nvidia-smi >/dev/null 2>&1; then
282+
echo "GPU type detected: CUDA"
283+
COMPOSE_PROFILES="$COMPOSE_PROFILES ollama-cuda"
284+
fi
285+
# check for rocm gpu
286+
elif command -v rocminfo >/dev/null 2>&1; then
287+
if rocminfo >/dev/null 2>&1; then
288+
echo "GPU type detected: ROCM"
289+
COMPOSE_PROFILES="$COMPOSE_PROFILES ollama-rocm"
290+
fi
291+
# otherwise, fallback to cpu
292+
else
293+
echo "No GPU detected, using CPU"
294+
COMPOSE_PROFILES="$COMPOSE_PROFILES ollama-cpu"
295+
fi
262296

263-
# check for rocm gpu
264-
if command -v rocminfo >/dev/null 2>&1; then
265-
if rocminfo >/dev/null 2>&1; then
266-
echo "GPU type detected: ROCM"
267-
COMPOSE_PROFILES="$COMPOSE_PROFILES ollama-rocm"
268-
return
269-
fi
297+
# use docker internal for the Ollama host
298+
OLLAMA_HOST=$DOCKER_HOST
299+
DKN_DOCKER_NETWORK_MODE=bridge
270300
fi
271301

272-
# if there are no local ollama and gpu, use docker-compose with cpu profile
273-
echo "No GPU found, using ollama-cpu"
274-
COMPOSE_PROFILES="$COMPOSE_PROFILES ollama-cpu"
275-
OLLAMA_HOST=$DOCKER_HOST
302+
echo "Ollama host: $OLLAMA_HOST (network mode: $DKN_DOCKER_NETWORK_MODE)"
276303
}
277304
handle_ollama_env
278305

@@ -298,6 +325,7 @@ echo ""
298325
echo "Starting in ${START_MODE} mode..."
299326
echo "Log level: ${RUST_LOG}"
300327
echo "Models: ${DKN_MODELS}"
328+
echo "Operating System: ${OS}"
301329
echo "${COMPOSE_PROFILES}"
302330
echo ""
303331
eval "${COMPOSE_UP}"

0 commit comments

Comments
 (0)