@@ -36,18 +36,21 @@ check_docker_compose() {
36
36
}
37
37
check_docker_compose
38
38
39
+ # check the operating system
40
+ # this is required in case local Ollama is used
41
+ # reference: https://stackoverflow.com/a/68706298
39
42
OS=" "
40
43
check_os () {
41
44
unameOut=$( uname -a)
42
45
case " ${unameOut} " in
43
- * Microsoft* ) OS=" WSL" ;; # must be first since Windows subsystem for linux will have Linux in the name too
44
- * microsoft* ) OS=" WSL2" ;; # WARNING: My v2 uses ubuntu 20.4 at the moment slightly different name may not always work
45
- Linux* ) OS=" Linux" ;;
46
- Darwin* ) OS=" Mac" ;;
47
- CYGWIN* ) OS=" Cygwin" ;;
48
- MINGW* ) OS=" Windows" ;;
49
- * Msys) OS=" Windows" ;;
50
- * ) OS=" UNKNOWN:${unameOut} "
46
+ * Microsoft* ) OS=" WSL" ;; # must be first since WSL will have Linux in the name too
47
+ * microsoft* ) OS=" WSL2" ;; # WARNING: My v2 uses Ubuntu 20.4 at the moment slightly different name may not always work
48
+ Linux* ) OS=" Linux" ;;
49
+ Darwin* ) OS=" Mac" ;;
50
+ CYGWIN* ) OS=" Cygwin" ;;
51
+ MINGW* ) OS=" Windows" ;;
52
+ * Msys) OS=" Windows" ;;
53
+ * ) OS=" UNKNOWN:${unameOut} "
51
54
esac
52
55
}
53
56
check_os
@@ -71,6 +74,11 @@ MODELS_LIST=""
71
74
LOCAL_OLLAMA_PID=" "
72
75
DOCKER_HOST=" http://host.docker.internal"
73
76
77
+ # this is the default network mode, but
78
+ # based on local Ollama & OS we may set it to `host`
79
+ # https://docs.docker.com/engine/network/#drivers
80
+ DKN_DOCKER_NETWORK_MODE=bridge
81
+
74
82
# handle command line arguments
75
83
while [ " $# " -gt 0 ]; do
76
84
case $1 in
@@ -90,13 +98,18 @@ while [ "$#" -gt 0 ]; do
90
98
--trace)
91
99
RUST_LOG=" none,dkn_compute=trace"
92
100
;;
101
+
93
102
-b|--background) START_MODE=" BACKGROUND" ;;
103
+
94
104
-h|--help) docs ;;
105
+
95
106
* ) echo " ERROR: Unknown parameter passed: $1 " ; exit 1 ;;
96
107
esac
97
108
shift
98
109
done
99
110
111
+ # check required environment variables
112
+ # we only need the secret key & admin public key
100
113
check_required_env_vars () {
101
114
required_vars="
102
115
DKN_WALLET_SECRET_KEY
@@ -163,6 +176,7 @@ handle_ollama_env() {
163
176
OLLAMA_PORT
164
177
OLLAMA_AUTO_PULL
165
178
"
179
+ # loads env variables (TODO: !)
166
180
as_pairs " $ollama_env_vars " > /dev/null 2>&1
167
181
168
182
# if there is no ollama model given, do not add any ollama compose profile
@@ -178,7 +192,10 @@ handle_ollama_env() {
178
192
return
179
193
fi
180
194
181
- # check local ollama
195
+ # check local ollama first
196
+ # if it can be found, try launching it & configure network to be able to connect to localhost
197
+ # if not, use the docker ollama image
198
+ # if the user explicitly wants to use the docker ollama image, this condition skips the local checks
182
199
if [ " $DOCKER_OLLAMA " = false ]; then
183
200
if command -v ollama > /dev/null 2>&1 ; then
184
201
# host machine has ollama installed
@@ -203,10 +220,11 @@ handle_ollama_env() {
203
220
curl -s -o /dev/null -w " %{http_code}" ${ollama_url}
204
221
}
205
222
223
+ # check if ollama is already running
206
224
if [ " $( check_ollama_server) " -eq 200 ]; then
207
225
echo " Local Ollama is already up at $ollama_url and running, using it"
208
- # Using already running local Ollama
209
226
else
227
+ # ollama is not live, so we launch it ourselves
210
228
echo " Local Ollama is not live, running ollama serve"
211
229
212
230
# `ollama serve` uses `OLLAMA_HOST` variable with both host and port,
@@ -228,51 +246,60 @@ handle_ollama_env() {
228
246
RETRY_COUNT=$(( RETRY_COUNT + 1 ))
229
247
done
230
248
249
+ # exit with error if we couldnt launch Ollama
231
250
if [ " $RETRY_COUNT " -ge " $MAX_RETRIES " ]; then
232
251
echo " Local Ollama server failed to start after $MAX_RETRIES attempts."
233
252
echo " You can use the --docker-ollama flag to use the Docker Ollama image instead."
234
253
exit 1
235
254
else
236
255
LOCAL_OLLAMA_PID=$temp_pid
237
256
echo " Local Ollama server is up at $ollama_url and running with PID $LOCAL_OLLAMA_PID "
238
- # Using local ollama
239
257
fi
240
258
fi
241
- # Depending on the host os, use localhost or host.docker.internal for Ollama host
242
- if [ " $OS " = " Mac" ]; then
243
- OLLAMA_HOST=" http://host.docker.internal"
244
- elif [ " $OS " = " Linux" ]; then
259
+
260
+ # to use the local Ollama, we need to configure the network depending on the Host
261
+ # Windows and Mac should work with host.docker.internal alright,
262
+ # but Linux requires `host` network mode with `localhost` as the Host URL
263
+ if [ " $OS " = " Linux" ]; then
245
264
OLLAMA_HOST=" http://localhost"
265
+ DKN_DOCKER_NETWORK_MODE=host
266
+ else
267
+ OLLAMA_HOST=" http://host.docker.internal"
246
268
fi
247
- return
248
269
else
270
+ # although --docker-ollama was not passed, we checked and couldnt find Ollama
271
+ # so we will use Docker anyways
272
+ echo " Ollama is not installed on this machine, will use Docker Ollama service"
249
273
DOCKER_OLLAMA=true
250
- echo " Ollama is not installed on this machine, using the Docker ollama instead"
251
274
fi
252
275
fi
253
276
254
- # check for cuda gpu
255
- if command -v nvidia-smi > /dev/null 2>&1 ; then
256
- if nvidia-smi > /dev/null 2>&1 ; then
257
- echo " GPU type detected: CUDA"
258
- COMPOSE_PROFILES=" $COMPOSE_PROFILES ollama-cuda"
259
- return
260
- fi
261
- fi
277
+ # this is in a separate if condition rather than `else`, due to a fallback condition above
278
+ if [ " $DOCKER_OLLAMA " = true ]; then
279
+ # check for cuda gpu
280
+ if command -v nvidia-smi > /dev/null 2>&1 ; then
281
+ if nvidia-smi > /dev/null 2>&1 ; then
282
+ echo " GPU type detected: CUDA"
283
+ COMPOSE_PROFILES=" $COMPOSE_PROFILES ollama-cuda"
284
+ fi
285
+ # check for rocm gpu
286
+ elif command -v rocminfo > /dev/null 2>&1 ; then
287
+ if rocminfo > /dev/null 2>&1 ; then
288
+ echo " GPU type detected: ROCM"
289
+ COMPOSE_PROFILES=" $COMPOSE_PROFILES ollama-rocm"
290
+ fi
291
+ # otherwise, fallback to cpu
292
+ else
293
+ echo " No GPU detected, using CPU"
294
+ COMPOSE_PROFILES=" $COMPOSE_PROFILES ollama-cpu"
295
+ fi
262
296
263
- # check for rocm gpu
264
- if command -v rocminfo > /dev/null 2>&1 ; then
265
- if rocminfo > /dev/null 2>&1 ; then
266
- echo " GPU type detected: ROCM"
267
- COMPOSE_PROFILES=" $COMPOSE_PROFILES ollama-rocm"
268
- return
269
- fi
297
+ # use docker internal for the Ollama host
298
+ OLLAMA_HOST=$DOCKER_HOST
299
+ DKN_DOCKER_NETWORK_MODE=bridge
270
300
fi
271
301
272
- # if there are no local ollama and gpu, use docker-compose with cpu profile
273
- echo " No GPU found, using ollama-cpu"
274
- COMPOSE_PROFILES=" $COMPOSE_PROFILES ollama-cpu"
275
- OLLAMA_HOST=$DOCKER_HOST
302
+ echo " Ollama host: $OLLAMA_HOST (network mode: $DKN_DOCKER_NETWORK_MODE )"
276
303
}
277
304
handle_ollama_env
278
305
@@ -298,6 +325,7 @@ echo ""
298
325
echo " Starting in ${START_MODE} mode..."
299
326
echo " Log level: ${RUST_LOG} "
300
327
echo " Models: ${DKN_MODELS} "
328
+ echo " Operating System: ${OS} "
301
329
echo " ${COMPOSE_PROFILES} "
302
330
echo " "
303
331
eval " ${COMPOSE_UP} "
0 commit comments