Skip to content

Commit f888e5e

Browse files
Merge pull request #419 from guruswarupa/ollama
ollama setup and utility to run the models locally
2 parents 4c43143 + 4e3db9e commit f888e5e

File tree

2 files changed

+213
-0
lines changed

2 files changed

+213
-0
lines changed

tabs/utils/ollama.sh

Lines changed: 209 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,209 @@
1+
#!/bin/sh -e
2+
3+
. ../common-script.sh
4+
5+
installollama() {
6+
clear
7+
printf "%b\n" "${YELLOW}Checking if ollama is already installed...${RC}"
8+
9+
# Check if ollama is already installed
10+
if command_exists ollama; then
11+
printf "%b\n" "${GREEN}ollama is already installed.${RC}"
12+
else
13+
printf "%b\n" "${YELLOW}Installing ollama...${RC}"
14+
curl -fsSL https://ollama.com/install.sh | sh
15+
$ESCALATION_TOOL systemctl start ollama
16+
fi
17+
}
18+
19+
list_models() {
20+
clear
21+
printf "%b\n" "${YELLOW}Listing all models available on your system...${RC}"
22+
ollama list
23+
}
24+
25+
show_model_info() {
26+
clear
27+
list_models
28+
printf "%b\n" "${YELLOW}Enter the name of the model you want to show information for (e.g., llama3.1):${RC}"
29+
read -r model_name
30+
31+
printf "%b\n" "${YELLOW}Showing information for model '$model_name'...${RC}"
32+
ollama show "$model_name"
33+
}
34+
35+
# Function to display available models
36+
display_models() {
37+
clear
38+
printf "%b\n" "${RED}Available Models${RC}"
39+
printf "1. Llama 3.1 - 8B (4.7GB)\n"
40+
printf "2. Llama 3.1 - 70B (40GB)\n"
41+
printf "3. Llama 3.1 - 405B (231GB)\n"
42+
printf "4. Phi 3 Mini - 3.8B (2.3GB)\n"
43+
printf "5. Phi 3 Medium - 14B (7.9GB)\n"
44+
printf "6. Gemma 2 - 2B (1.6GB)\n"
45+
printf "7. Gemma 2 - 9B (5.5GB)\n"
46+
printf "8. Gemma 2 - 27B (16GB)\n"
47+
printf "9. Mistral - 7B (4.1GB)\n"
48+
printf "10. Moondream 2 - 1.4B (829MB)\n"
49+
printf "11. Neural Chat - 7B (4.1GB)\n"
50+
printf "12. Starling - 7B (4.1GB)\n"
51+
printf "13. Code Llama - 7B (3.8GB)\n"
52+
printf "14. Llama 2 Uncensored - 7B (3.8GB)\n"
53+
printf "15. LLaVA - 7B (4.5GB)\n"
54+
printf "16. Solar - 10.7B (6.1GB)\n"
55+
}
56+
57+
# Function to select model based on user input
58+
select_model() {
59+
local choice="$1"
60+
case $choice in
61+
1) echo "llama3.1";;
62+
2) echo "llama3.1:70b";;
63+
3) echo "llama3.1:405b";;
64+
4) echo "phi3";;
65+
5) echo "phi3:medium";;
66+
6) echo "gemma2:2b";;
67+
7) echo "gemma2";;
68+
8) echo "gemma2:27b";;
69+
9) echo "mistral";;
70+
10) echo "moondream";;
71+
11) echo "neural-chat";;
72+
12) echo "starling-lm";;
73+
13) echo "codellama";;
74+
14) echo "llama2-uncensored";;
75+
15) echo "llava";;
76+
16) echo "solar";;
77+
*) echo "$choice";; # Treat any other input as a custom model name
78+
esac
79+
}
80+
81+
run_model() {
82+
clear
83+
display_models
84+
85+
printf "%b\n" "${GREEN}Installed Models${RC}"
86+
installed_models=$(ollama list)
87+
printf "%b\n" "${installed_models}"
88+
89+
printf "%b\n" "${YELLOW}Custom Models${RC}"
90+
custom_models=$(ollama list | grep 'custom-model-prefix')
91+
92+
printf "%b\n" "${YELLOW}Please select a model to run:${RC}"
93+
printf "%b\n" "${YELLOW}Enter the number corresponding to the model or enter the name of a custom model:${RC}"
94+
95+
read -r model_choice
96+
97+
model=$(select_model "$model_choice")
98+
99+
printf "%b\n" "${YELLOW}Running the model: $model...${RC}"
100+
ollama run "$model"
101+
102+
}
103+
104+
create_model() {
105+
clear
106+
printf "%b\n" "${YELLOW}Let's create a new model in Ollama!${RC}"
107+
display_models
108+
109+
# Prompt for base model
110+
printf "%b\n" "${YELLOW}Enter the base model (e.g. '13' for codellama):${RC}"
111+
read -r base_model
112+
113+
model=$(select_model "$base_model")
114+
115+
printf "%b\n" "${YELLOW}Running the model: $model...${RC}"
116+
ollama pull "$model"
117+
118+
# Prompt for custom model name
119+
printf "%b\n" "${YELLOW}Enter a name for the new customized model:${RC}"
120+
read -r custom_model_name
121+
122+
# Prompt for temperature setting
123+
printf "%b\n" "${YELLOW}Enter the desired temperature (higher values are more creative, lower values are more coherent, e.g., 1):${RC}"
124+
read -r temperature
125+
126+
if [ -z "$temperature" ]; then
127+
temperature=${temperature:-1}
128+
fi
129+
130+
# Prompt for system message
131+
printf "%b\n" "${YELLOW}Enter the system message for the model customization (e.g., 'You are Mario from Super Mario Bros. Answer as Mario, the assistant, only.'):${RC}"
132+
read -r system_message
133+
134+
# Create the Modelfile
135+
printf "%b\n" "${YELLOW}Creating the Modelfile...${RC}"
136+
cat << EOF > Modelfile
137+
FROM $base_model
138+
139+
# set the temperature to $temperature
140+
PARAMETER temperature $temperature
141+
142+
# set the system message
143+
SYSTEM """
144+
$system_message
145+
"""
146+
EOF
147+
148+
# Create the model in Ollama
149+
printf "%b\n" "${YELLOW}Creating the model in Ollama...${RC}"
150+
ollama create "$custom_model_name" -f Modelfile
151+
printf "%b\n" "${GREEN}Model '$custom_model_name' created successfully.${RC}"
152+
}
153+
154+
# Function to remove a model
155+
remove_model() {
156+
clear
157+
printf "%b\n" "${GREEN}Installed Models${RC}"
158+
installed_models=$(ollama list)
159+
printf "%b\n" "${installed_models}"
160+
161+
printf "%b\n" "${YELLOW}Please select a model to remove:${RC}"
162+
printf "%b\n" "${YELLOW}Enter the name of the model you want to remove:${RC}"
163+
164+
read -r model_to_remove
165+
166+
if echo "$installed_models" | grep -q "$model_to_remove"; then
167+
printf "%b\n" "${YELLOW}Removing the model: $model_to_remove...${RC}"
168+
ollama rm "$model_to_remove"
169+
printf "%b\n" "${GREEN}Model '$model_to_remove' has been removed.${RC}"
170+
else
171+
printf "%b\n" "${RED}Model '$model_to_remove' is not installed. Exiting.${RC}"
172+
exit 1
173+
fi
174+
}
175+
176+
menu() {
177+
while true; do
178+
clear
179+
printf "%b\n" "${YELLOW}Please select an option:${RC}"
180+
printf "1) List all models\n"
181+
printf "2) Show model information\n"
182+
printf "3) Create a new model\n"
183+
printf "4) Run a model\n"
184+
printf "5) Remove a model\n"
185+
printf "6) Exit\n"
186+
187+
printf "%b" "${YELLOW}Enter your choice (1-5): ${RC}"
188+
read -r choice
189+
190+
case $choice in
191+
1) list_models ;;
192+
2) show_model_info ;;
193+
3) create_model ;;
194+
4) run_model ;;
195+
5) remove_model;;
196+
6) printf "%b\n" "${GREEN}Exiting...${RC}"; exit 0 ;;
197+
*) printf "%b\n" "${RED}Invalid choice. Please try again.${RC}" ;;
198+
esac
199+
200+
printf "%b\n" "${YELLOW}Press Enter to continue...${RC}"
201+
read -r dummy
202+
done
203+
}
204+
205+
checkEnv
206+
checkEscalationTool
207+
installollama
208+
menu
209+

tabs/utils/tab_data.toml

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,10 @@ script = "bluetooth-control.sh"
1212
name = "Numlock on Startup"
1313
script = "numlock.sh"
1414

15+
[[data]]
16+
name = "Ollama"
17+
script = "ollama.sh"
18+
1519
[[data]]
1620
name = "Service Manager"
1721
script = "service-control.sh"

0 commit comments

Comments
 (0)