11#! /usr/bin/env bash
22set -euo pipefail
33# One-click start script for web backend and optionally Ollama.
4- # Usage: start.sh [--backend] [--ollama [MODEL]] [--all]
4+ # Usage: start.sh [--backend] [--ollama [MODEL]] [--all] [--foreground]
55
66ENV_NAME=" lightllm"
77ROOT_DIR=" $( cd " $( dirname " $0 " ) " && pwd) /.."
@@ -39,21 +39,44 @@ start_backend() {
3939 if command -v conda > /dev/null 2>&1 ; then
4040 # Try to get the python executable path inside the env
4141 PY_CMD=" $( conda run -n " $ENV_NAME " --no-capture-output which python 2> /dev/null || true) "
42- if [ -z " $PY_CMD " ]; then
43- # Fallback to conda run python -m
44- echo " Using 'conda run -n $ENV_NAME python -m uvicorn' fallback"
45- (PYTHONPATH=" $ROOT_DIR " conda run -n " $ENV_NAME " --no-capture-output bash -lc ' nohup python -m uvicorn web.backend:app --host 127.0.0.1 --port 8000 --reload > "$UVICORN_LOG" 2>&1 &' )
42+ if [ -z " $PY_CMD " ]; then
43+ # Fallback to conda run python -m
44+ echo " Using 'conda run -n $ENV_NAME python -m uvicorn' fallback"
45+ if [ " $FOREGROUND " -eq 1 ]; then
46+ echo " Running uvicorn in foreground (conda run)..." ;
47+ PYTHONPATH=" $ROOT_DIR " conda run -n " $ENV_NAME " --no-capture-output bash -lc ' python -m uvicorn web.backend:app --host 127.0.0.1 --port 8000 --reload'
48+ else
49+ (PYTHONPATH=" $ROOT_DIR " conda run -n " $ENV_NAME " --no-capture-output bash -lc ' nohup python -m uvicorn web.backend:app --host 127.0.0.1 --port 8000 --reload > "$UVICORN_LOG" 2>&1 &' )
50+ fi
51+ else
4652 else
4753 echo " Using python: $PY_CMD "
54+ if [ " $FOREGROUND " -eq 1 ]; then
55+ echo " Running uvicorn in foreground using $PY_CMD " ;
56+ PYTHONPATH=" $ROOT_DIR " exec " $PY_CMD " -m uvicorn web.backend:app --host 127.0.0.1 --port 8000
57+ else
4858 (PYTHONPATH=" $ROOT_DIR " nohup " $PY_CMD " -m uvicorn web.backend:app --host 127.0.0.1 --port 8000 > " $UVICORN_LOG " 2>&1 & )
59+ fi
60+ fi
61+ else
4962 fi
5063 else
5164 # No conda: use system python3 if available
5265 if command -v python3 > /dev/null 2>&1 ; then
66+ if [ " $FOREGROUND " -eq 1 ]; then
67+ echo " Running uvicorn in foreground using system python3" ;
68+ PYTHONPATH=" $ROOT_DIR " exec python3 -m uvicorn web.backend:app --host 127.0.0.1 --port 8000
69+ else
5370 (PYTHONPATH=" $ROOT_DIR " nohup python3 -m uvicorn web.backend:app --host 127.0.0.1 --port 8000 > " $UVICORN_LOG " 2>&1 & )
71+ fi
5472 else
5573 # Fallback: try uvicorn directly
74+ if [ " $FOREGROUND " -eq 1 ]; then
75+ echo " Running uvicorn in foreground (uvicorn CLI)" ;
76+ PYTHONPATH=" $ROOT_DIR " exec uvicorn web.backend:app --host 127.0.0.1 --port 8000
77+ else
5678 (PYTHONPATH=" $ROOT_DIR " nohup uvicorn web.backend:app --host 127.0.0.1 --port 8000 > " $UVICORN_LOG " 2>&1 & )
79+ fi
5780 fi
5881 fi
5982
@@ -74,9 +97,14 @@ start_ollama() {
7497 echo " Pulling model: $model (this may take time)" | tee -a " $OLLAMA_LOG "
7598 ollama pull " $model " >> " $OLLAMA_LOG " 2>&1 || true
7699 fi
77- nohup ollama serve > " $OLLAMA_LOG " 2>&1 &
78- sleep 2
79- echo " Ollama served (log: $OLLAMA_LOG )"
100+ if [ " $FOREGROUND " -eq 1 ]; then
101+ echo " Running ollama serve in foreground (logs to stdout/stderr)"
102+ exec ollama serve
103+ else
104+ nohup ollama serve > " $OLLAMA_LOG " 2>&1 &
105+ sleep 2
106+ echo " Ollama served (log: $OLLAMA_LOG )"
107+ fi
80108}
81109
82110print_usage () {
99127MODE_BACKEND=0
100128MODE_OLLAMA=0
101129OLLAMA_MODEL=" "
130+ FOREGROUND=0
102131
103132while [ " $# " -gt 0 ]; do
104133 case " $1 " in
@@ -110,6 +139,8 @@ while [ "$#" -gt 0 ]; do
110139 MODE_BACKEND=1; MODE_OLLAMA=1; shift ;;
111140 --help|-h)
112141 print_usage; exit 0;;
142+ --foreground|--fg)
143+ FOREGROUND=1; shift ;;
113144 * )
114145 echo " Unknown arg: $1 " ; print_usage; exit 1;;
115146 esac
0 commit comments