Skip to content

Commit ca35365

Browse files
authored
Refactor: Ollama & Adding to Website (#4147)
* Refactor: Ollama & Adding to Website * VED -> VE * Update ollama.sh * Update ollama-install.sh * Update ollama.sh
1 parent 8e83943 commit ca35365

File tree

3 files changed

+106
-38
lines changed

3 files changed

+106
-38
lines changed

ct/ollama.sh

Lines changed: 38 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -1,36 +1,60 @@
11
#!/usr/bin/env bash
22
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/build.func)
33
# Copyright (c) 2021-2025 tteck
4-
# Author: tteck | Co-Author: havardthom
4+
# Author: havardthom | Co-Author: MickLesk (CanbiZ)
55
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
66
# Source: https://ollama.com/
77

88
APP="Ollama"
99
var_tags="${var_tags:-ai}"
1010
var_cpu="${var_cpu:-4}"
1111
var_ram="${var_ram:-4096}"
12-
var_disk="${var_disk:-24}"
12+
var_disk="${var_disk:-35}"
1313
var_os="${var_os:-ubuntu}"
14-
var_version="${var_version:-22.04}"
14+
var_version="${var_version:-24.04}"
1515

1616
header_info "$APP"
1717
variables
1818
color
1919
catch_errors
2020

2121
function update_script() {
22-
header_info
23-
check_container_storage
24-
check_container_resources
25-
if [[ ! -d /opt/ollama ]]; then
26-
msg_error "No ${APP} Installation Found!"
27-
exit
22+
header_info
23+
check_container_storage
24+
check_container_resources
25+
if [[ ! -d /usr/local/lib/ollama ]]; then
26+
msg_error "No Ollama Installation Found!"
27+
exit
28+
fi
29+
RELEASE=$(curl -fsSL https://api.github.com/repos/ollama/ollama/releases/latest | grep "tag_name" | awk -F '"' '{print $4}')
30+
if [[ ! -f /opt/Ollama_version.txt ]] || [[ "${RELEASE}" != "$(cat /opt/Ollama_version.txt)" ]]; then
31+
if [[ ! -f /opt/Ollama_version.txt ]]; then
32+
touch /opt/Ollama_version.txt
2833
fi
29-
msg_info "Updating ${APP}"
30-
$STD apt-get update
31-
$STD apt-get -y upgrade
34+
msg_info "Stopping Services"
35+
systemctl stop ollama
36+
msg_ok "Services Stopped"
37+
38+
TMP_TAR=$(mktemp --suffix=.tgz)
39+
curl -fL# -o "${TMP_TAR}" "https://github.com/ollama/ollama/releases/download/${RELEASE}/ollama-linux-amd64.tgz"
40+
msg_info "Updating Ollama to ${RELEASE}"
41+
tar -xzf "${TMP_TAR}" -C /usr/local/lib/ollama
42+
ln -sf /usr/local/lib/ollama/bin/ollama /usr/local/bin/ollama
43+
echo "${RELEASE}" >/opt/Ollama_version.txt
44+
msg_ok "Updated Ollama to ${RELEASE}"
45+
46+
msg_info "Starting Services"
47+
systemctl start ollama
48+
msg_ok "Started Services"
49+
50+
msg_info "Cleaning Up"
51+
rm -f "${TMP_TAR}"
52+
msg_ok "Cleaned"
3253
msg_ok "Updated Successfully"
33-
exit
54+
else
55+
msg_ok "No update required. Ollama is already at ${RELEASE}"
56+
fi
57+
exit
3458
}
3559

3660
start
@@ -40,4 +64,4 @@ description
4064
msg_ok "Completed Successfully!\n"
4165
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
4266
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
43-
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:14434${CL}"
67+
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:11434${CL}"

frontend/public/json/ollama.json

Lines changed: 35 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,35 @@
1+
{
2+
"name": "Ollama",
3+
"slug": "ollama",
4+
"categories": [
5+
20
6+
],
7+
"date_created": "2025-04-28",
8+
"type": "ct",
9+
"updateable": true,
10+
"privileged": false,
11+
"interface_port": 11434,
12+
"documentation": "https://github.com/ollama/ollama/tree/main/docs",
13+
"config_path": "/usr/local/lib/ollama",
14+
"website": "https://ollama.com/",
15+
"logo": "https://raw.githubusercontent.com/selfhst/icons/refs/heads/main/svg/ollama.svg",
16+
"description": "Ollama is a tool that allows you to run large language models locally on your own computer. This means you can experiment with and use these AI models without needing an internet connection or relying on cloud-based services. It simplifies the process of managing and running these models, offering a way to keep your data private and potentially work faster. 1 You can use Ollama to create local chatbots, conduct AI research, develop privacy-focused AI applications, and integrate AI into existing systems.",
17+
"install_methods": [
18+
{
19+
"type": "default",
20+
"script": "ct/ollama.sh",
21+
"resources": {
22+
"cpu": 4,
23+
"ram": 4096,
24+
"hdd": 35,
25+
"os": "Ubuntu",
26+
"version": "24.04"
27+
}
28+
}
29+
],
30+
"default_credentials": {
31+
"username": null,
32+
"password": null
33+
},
34+
"notes": []
35+
}

install/ollama-install.sh

Lines changed: 33 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,7 @@
11
#!/usr/bin/env bash
22

33
# Copyright (c) 2021-2025 tteck
4-
# Author: tteck
5-
# Co-Author: havardthom
4+
# Author: havardthom | Co-Author: MickLesk (CanbiZ)
65
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
76
# Source: https://ollama.com/
87

@@ -16,24 +15,10 @@ update_os
1615

1716
msg_info "Installing Dependencies"
1817
$STD apt-get install -y \
19-
gpg \
20-
git \
2118
build-essential \
22-
pkg-config \
23-
cmake
19+
pkg-config
2420
msg_ok "Installed Dependencies"
2521

26-
msg_info "Installing Golang"
27-
set +o pipefail
28-
temp_file=$(mktemp)
29-
golang_tarball=$(curl -fsSL https://go.dev/dl/ | grep -oP 'go[\d\.]+\.linux-amd64\.tar\.gz' | head -n 1)
30-
curl -fsSL "https://golang.org/dl/${golang_tarball}" -o "$temp_file"
31-
tar -C /usr/local -xzf "$temp_file"
32-
ln -sf /usr/local/go/bin/go /usr/local/bin/go
33-
rm -f "$temp_file"
34-
set -o pipefail
35-
msg_ok "Installed Golang"
36-
3722
msg_info "Setting up Intel® Repositories"
3823
mkdir -p /etc/apt/keyrings
3924
curl -fsSL https://repositories.intel.com/gpu/intel-graphics.key | gpg --dearmor -o /etc/apt/keyrings/intel-graphics.gpg
@@ -59,11 +44,35 @@ $STD apt-get install -y --no-install-recommends intel-basekit-2024.1
5944
msg_ok "Installed Intel® oneAPI Base Toolkit"
6045

6146
msg_info "Installing Ollama (Patience)"
62-
$STD git clone https://github.com/ollama/ollama.git /opt/ollama
63-
cd /opt/ollama
64-
$STD go generate ./...
65-
$STD go build .
66-
msg_ok "Installed Ollama"
47+
RELEASE=$(curl -fsSL https://api.github.com/repos/ollama/ollama/releases/latest | grep "tag_name" | awk -F '"' '{print $4}')
48+
OLLAMA_INSTALL_DIR="/usr/local/lib/ollama"
49+
BINDIR="/usr/local/bin"
50+
mkdir -p $OLLAMA_INSTALL_DIR
51+
OLLAMA_URL="https://github.com/ollama/ollama/releases/download/${RELEASE}/ollama-linux-amd64.tgz"
52+
TMP_TAR="/tmp/ollama.tgz"
53+
echo -e "\n"
54+
if curl -fL# -o "$TMP_TAR" "$OLLAMA_URL"; then
55+
if tar -xzf "$TMP_TAR" -C "$OLLAMA_INSTALL_DIR"; then
56+
ln -sf "$OLLAMA_INSTALL_DIR/bin/ollama" "$BINDIR/ollama"
57+
echo "${RELEASE}" >/opt/Ollama_version.txt
58+
msg_ok "Installed Ollama ${RELEASE}"
59+
else
60+
msg_error "Extraction failed – archive corrupt or incomplete"
61+
exit 1
62+
fi
63+
else
64+
msg_error "Download failed – $OLLAMA_URL not reachable"
65+
exit 1
66+
fi
67+
68+
msg_info "Creating ollama User and Group"
69+
if ! id ollama >/dev/null 2>&1; then
70+
useradd -r -s /usr/sbin/nologin -U -m -d /usr/share/ollama ollama
71+
fi
72+
$STD usermod -aG render ollama || true
73+
$STD usermod -aG video ollama || true
74+
$STD usermod -aG ollama $(id -u -n)
75+
msg_ok "Created ollama User and adjusted Groups"
6776

6877
msg_info "Creating Service"
6978
cat <<EOF >/etc/systemd/system/ollama.service
@@ -73,7 +82,7 @@ After=network-online.target
7382
7483
[Service]
7584
Type=exec
76-
ExecStart=/opt/ollama/ollama serve
85+
ExecStart=/usr/local/bin/ollama serve
7786
Environment=HOME=$HOME
7887
Environment=OLLAMA_INTEL_GPU=true
7988
Environment=OLLAMA_HOST=0.0.0.0
@@ -95,4 +104,4 @@ customize
95104
msg_info "Cleaning up"
96105
$STD apt-get -y autoremove
97106
$STD apt-get -y autoclean
98-
msg_ok "Cleaned"
107+
msg_ok "Cleaned"

0 commit comments

Comments
 (0)