Skip to content

Commit 94bda0f

Browse files
authored
AAP-48373: llama-stack: Tidy-up ansible-chatbot-stack once LSC supports required features (#57)
1 parent 61a5d7d commit 94bda0f

12 files changed

+87
-230
lines changed
File renamed without changes.

Containerfile

Lines changed: 59 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,62 @@
11
# Build arguments declared in the global scope.
2-
ARG ANSIBLE_CHATBOT_BASE_IMAGE=ansible-chatbot-stack-base
32
ARG ANSIBLE_CHATBOT_VERSION=latest
4-
ARG LLAMA_STACK_VERSION=0.2.9
53

6-
FROM ${ANSIBLE_CHATBOT_BASE_IMAGE}:${LLAMA_STACK_VERSION}
4+
# ======================================================
5+
# Transient image to construct Python venv
6+
# ------------------------------------------------------
7+
FROM registry.access.redhat.com/ubi9/ubi-minimal AS builder
8+
9+
ARG APP_ROOT=/app-root
10+
RUN microdnf install -y --nodocs --setopt=keepcache=0 --setopt=tsflags=nodocs \
11+
python3.12 python3.12-devel python3.12-pip
12+
WORKDIR /app-root
13+
14+
# UV_PYTHON_DOWNLOADS=0 : Disable Python interpreter downloads and use the system interpreter.
15+
ENV UV_COMPILE_BYTECODE=0 \
16+
UV_LINK_MODE=copy \
17+
UV_PYTHON_DOWNLOADS=0
18+
19+
# Install uv package manager
20+
RUN pip3.12 install uv
21+
22+
# Add explicit files and directories
23+
# (avoid accidental inclusion of local directories or env files or credentials)
24+
COPY pyproject.toml uv.lock LICENSE.md README.md ./
25+
26+
RUN uv sync --locked --no-install-project --no-dev
27+
# ======================================================
28+
29+
# ======================================================
30+
# Final image without uv package manager
31+
# ------------------------------------------------------
32+
FROM quay.io/lightspeed-core/lightspeed-stack:dev-latest
33+
34+
USER 0
735

836
# Re-declaring arguments without a value, inherits the global default one.
937
ARG ANSIBLE_CHATBOT_VERSION
38+
ARG APP_ROOT=/app-root
39+
RUN microdnf install -y --nodocs --setopt=keepcache=0 --setopt=tsflags=nodocs python3.11 jq
40+
WORKDIR /app-root
41+
42+
# PYTHONDONTWRITEBYTECODE 1 : disable the generation of .pyc
43+
# PYTHONUNBUFFERED 1 : force the stdout and stderr streams to be unbuffered
44+
# PYTHONCOERCECLOCALE 0, PYTHONUTF8 1 : skip legacy locales and use UTF-8 mode
45+
ENV PYTHONDONTWRITEBYTECODE=1 \
46+
PYTHONUNBUFFERED=1 \
47+
PYTHONCOERCECLOCALE=0 \
48+
PYTHONUTF8=1 \
49+
PYTHONIOENCODING=UTF-8 \
50+
LANG=en_US.UTF-8
51+
52+
COPY --from=builder --chown=1001:1001 /app-root /app-root
53+
54+
# this directory is checked by ecosystem-cert-preflight-checks task in Konflux
55+
COPY --from=builder /app-root/LICENSE.md /licenses/
56+
57+
# Add executables from .venv to system PATH
58+
ENV PATH="/app-root/.venv/bin:$PATH"
59+
1060
ENV LLAMA_STACK_CONFIG_DIR=/.llama/data
1161

1262
# Data and configuration
@@ -18,14 +68,15 @@ RUN echo -e "\
1868
\"version\": \"${ANSIBLE_CHATBOT_VERSION}\" \n\
1969
}\n\
2070
" > /.llama/distributions/ansible-chatbot/ansible-chatbot-version-info.json
71+
ADD llama-stack/providers.d /.llama/providers.d
2172
RUN chmod -R g+rw /.llama
2273

2374
# Bootstrap
24-
RUN mkdir -p /.llama/temp
25-
ADD entrypoint.sh /.llama/temp
26-
RUN chmod +x /.llama/temp/entrypoint.sh
75+
ADD entrypoint.sh /.llama
76+
RUN chmod +x /.llama/entrypoint.sh
2777

2878
# See https://github.com/meta-llama/llama-stack/issues/1633
29-
# USER 1000
79+
# USER 1001
3080

31-
ENTRYPOINT ["/.llama/temp/entrypoint.sh"]
81+
ENTRYPOINT ["/.llama/entrypoint.sh"]
82+
# ======================================================

Makefile

Lines changed: 24 additions & 76 deletions
Original file line numberDiff line numberDiff line change
@@ -7,11 +7,12 @@ ANSIBLE_CHATBOT_VLLM_URL ?=
77
ANSIBLE_CHATBOT_VLLM_API_TOKEN ?=
88
ANSIBLE_CHATBOT_INFERENCE_MODEL ?=
99
ANSIBLE_CHATBOT_INFERENCE_MODEL_FILTER ?=
10-
AAP_GATEWAY_TOKEN ?=
1110
LLAMA_STACK_PORT ?= 8321
1211
LOCAL_DB_PATH ?= .
1312
CONTAINER_DB_PATH ?= /.llama/data/distributions/ansible-chatbot
14-
RAG_CONTENT_IMAGE ?= quay.io/ansible/aap-rag-content:latest
13+
# quay.io/ansible/aap-rag-content:latest does not work with lightspeed-stack:latest
14+
# aap-rag-content uses llama-stack:0.2.14 whereas lightspeed-stack:latest uses 0.2.13.
15+
RAG_CONTENT_IMAGE ?= quay.io/ansible/aap-rag-content:1.0.1751985495
1516
# Colors for terminal output
1617
RED := \033[0;31m
1718
NC := \033[0m # No Color
@@ -20,9 +21,6 @@ NC := \033[0m # No Color
2021

2122
.EXPORT_ALL_VARIABLES:
2223

23-
PYPI_VERSION=$(shell cat requirements.txt | grep llama-stack== | cut -c 14-)
24-
LLAMA_STACK_VERSION=$(PYPI_VERSION)
25-
LLAMA_STACK_LOGGING="server=debug;core=info"
2624
UV_HTTP_TIMEOUT=120
2725

2826
help:
@@ -32,38 +30,31 @@ help:
3230
@echo " all - Run all steps (setup, build, build-custom)"
3331
@echo " setup - Sets up llama-stack and the external lightspeed providers"
3432
@echo " setup-vector-db - Sets up vector DB and embedding model"
35-
@echo " build - Build the base Ansible Chatbot Stack image"
36-
@echo " build-custom - Build the customized Ansible Chatbot Stack image"
37-
@echo " build-lsc - Build the customized Ansible Chatbot Stack image from lightspeed-core/lightspeed-stack"
38-
@echo " run - Run the Ansible Chatbot Stack container"
33+
@echo " build - Build the customized Ansible Chatbot Stack image from lightspeed-core/lightspeed-stack"
34+
@echo " run - Run the Ansible Chatbot Stack container built with 'build-lsc'"
35+
@echo " run-test - Run some sanity checks for the Ansible Chatbot Stack container built with 'build-lsc'"
3936
@echo " run-local-db - Run the Ansible Chatbot Stack container with local DB mapped to conatiner DB"
40-
@echo " run-lsc - Run the Ansible Chatbot Stack container built with 'build-lsc'"
41-
@echo " run-test-lsc - Run some sanity checks for the Ansible Chatbot Stack container built with 'build-lsc'"
4237
@echo " clean - Clean up generated files and Docker images"
4338
@echo " deploy-k8s - Deploy to Kubernetes cluster"
4439
@echo " shell - Get a shell in the container"
4540
@echo " tag-and-push - Tag and push the container image to quay.io"
4641
@echo ""
4742
@echo "Required Environment variables:"
48-
@echo " ANSIBLE_CHATBOT_VERSION - Version tag for the image (default: $(ANSIBLE_CHATBOT_VERSION))"
49-
@echo " ANSIBLE_CHATBOT_VLLM_URL - URL for the vLLM inference provider"
50-
@echo " ANSIBLE_CHATBOT_VLLM_API_TOKEN - API token for the vLLM inference provider"
51-
@echo " ANSIBLE_CHATBOT_INFERENCE_MODEL - Inference model to use"
43+
@echo " ANSIBLE_CHATBOT_VERSION - Version tag for the image (default: $(ANSIBLE_CHATBOT_VERSION))"
44+
@echo " ANSIBLE_CHATBOT_VLLM_URL - URL for the vLLM inference provider"
45+
@echo " ANSIBLE_CHATBOT_VLLM_API_TOKEN - API token for the vLLM inference provider"
46+
@echo " ANSIBLE_CHATBOT_INFERENCE_MODEL - Inference model to use"
5247
@echo " ANSIBLE_CHATBOT_INFERENCE_MODEL_FILTER - Inference model to use for tools filtering"
53-
@echo " AAP_GATEWAY_TOKEN - API toke for the AAP Gateway"
54-
@echo " CONTAINER_DB_PATH - Path to the container database (default: $(CONTAINER_DB_PATH))"
55-
@echo " LOCAL_DB_PATH - Path to the local database (default: $(LOCAL_DB_PATH))"
56-
@echo " LLAMA_STACK_PORT - Port to expose (default: $(LLAMA_STACK_PORT))"
57-
@echo " QUAY_ORG - Quay organization name (default: $(QUAY_ORG))"
48+
@echo " CONTAINER_DB_PATH - Path to the container database (default: $(CONTAINER_DB_PATH))"
49+
@echo " LOCAL_DB_PATH - Path to the local database (default: $(LOCAL_DB_PATH))"
50+
@echo " LLAMA_STACK_PORT - Port to expose (default: $(LLAMA_STACK_PORT))"
51+
@echo " QUAY_ORG - Quay organization name (default: $(QUAY_ORG))"
5852

5953
setup: setup-vector-db
6054
@echo "Setting up environment..."
61-
python3 -m venv venv
62-
. venv/bin/activate && pip install -r requirements.txt
55+
uv sync
6356
mkdir -p llama-stack/providers.d/inline/agents/
64-
mkdir -p llama-stack/providers.d/remote/tool_runtime/
6557
curl -o llama-stack/providers.d/inline/agents/lightspeed_inline_agent.yaml https://raw.githubusercontent.com/lightspeed-core/lightspeed-providers/refs/heads/main/resources/external_providers/inline/agents/lightspeed_inline_agent.yaml
66-
curl -o llama-stack/providers.d/remote/tool_runtime/lightspeed.yaml https://raw.githubusercontent.com/lightspeed-core/lightspeed-providers/refs/heads/main/resources/external_providers/remote/tool_runtime/lightspeed.yaml
6758
@echo "Environment setup complete."
6859

6960
setup-vector-db:
@@ -76,36 +67,16 @@ setup-vector-db:
7667
docker kill rag-content
7768
gzip -d ./vector_db/aap_faiss_store.db.gz
7869

79-
build:
80-
@echo "Building base Ansible Chatbot Stack image..."
81-
. venv/bin/activate && \
82-
llama stack build --config ansible-chatbot-build.yaml --image-type container
83-
@printf "Base image $(RED)ansible-chatbot-stack-base$(NC) built successfully.\n"
84-
85-
# Pre-check required environment variables for build-custom
86-
check-env-build-custom:
87-
@if [ -z "$(ANSIBLE_CHATBOT_VERSION)" ]; then \
88-
printf "$(RED)Error: ANSIBLE_CHATBOT_VERSION is required but not set$(NC)\n"; \
89-
exit 1; \
90-
fi
91-
92-
build-custom: check-env-build-custom
93-
@echo "Building customized Ansible Chatbot Stack image..."
94-
docker build -f Containerfile -t ansible-chatbot-stack:$(ANSIBLE_CHATBOT_VERSION) \
95-
--build-arg ANSIBLE_CHATBOT_VERSION=$(ANSIBLE_CHATBOT_VERSION) \
96-
--build-arg LLAMA_STACK_VERSION=$(LLAMA_STACK_VERSION) .
97-
@printf "Custom image $(RED)ansible-chatbot-stack:$(ANSIBLE_CHATBOT_VERSION)$(NC) built successfully.\n"
98-
99-
# Pre-check required environment variables for build-lsc
100-
check-env-build-lsc:
70+
# Pre-check required environment variables for build
71+
check-env-build:
10172
@if [ -z "$(ANSIBLE_CHATBOT_VERSION)" ]; then \
10273
printf "$(RED)Error: ANSIBLE_CHATBOT_VERSION is required but not set$(NC)\n"; \
10374
exit 1; \
10475
fi
10576

106-
build-lsc: check-env-build-lsc
77+
build: check-env-build
10778
@echo "Building customized Ansible Chatbot Stack image from lightspeed-core/lightspeed-stack..."
108-
docker build -f ./lightspeed-stack/Containerfile.lsc \
79+
docker build -f ./Containerfile \
10980
--build-arg ANSIBLE_CHATBOT_VERSION=$(ANSIBLE_CHATBOT_VERSION) \
11081
-t ansible-chatbot-stack:$(ANSIBLE_CHATBOT_VERSION) .
11182
@printf "Custom image $(RED)ansible-chatbot-stack:$(ANSIBLE_CHATBOT_VERSION)$(NC) built successfully.\n"
@@ -128,42 +99,22 @@ check-env-run:
12899
printf "$(RED)Error: ANSIBLE_CHATBOT_VERSION is required but not set$(NC)\n"; \
129100
exit 1; \
130101
fi
131-
@if [ -z "$(AAP_GATEWAY_TOKEN)" ]; then \
132-
printf "$(RED)Error: AAP_GATEWAY_TOKEN is required but not set$(NC)\n"; \
133-
exit 1; \
134-
fi
135102

136103
run: check-env-run
137-
@echo "Running Ansible Chatbot Stack container..."
138-
@echo "Using vLLM URL: $(ANSIBLE_CHATBOT_VLLM_URL)"
139-
@echo "Using inference model: $(ANSIBLE_CHATBOT_INFERENCE_MODEL)"
140-
docker run --security-opt label=disable -it -p $(LLAMA_STACK_PORT):$(LLAMA_STACK_PORT) \
141-
-v ./embeddings_model:/app/embeddings_model \
142-
-v ./vector_db/aap_faiss_store.db:$(CONTAINER_DB_PATH)/aap_faiss_store.db \
143-
--env LLAMA_STACK_PORT=$(LLAMA_STACK_PORT) \
144-
--env VLLM_URL=$(ANSIBLE_CHATBOT_VLLM_URL) \
145-
--env VLLM_API_TOKEN=$(ANSIBLE_CHATBOT_VLLM_API_TOKEN) \
146-
--env INFERENCE_MODEL=$(ANSIBLE_CHATBOT_INFERENCE_MODEL) \
147-
--env INFERENCE_MODEL_FILTER=$(ANSIBLE_CHATBOT_INFERENCE_MODEL_FILTER) \
148-
--env AAP_GATEWAY_TOKEN=$(AAP_GATEWAY_TOKEN) \
149-
ansible-chatbot-stack:$(ANSIBLE_CHATBOT_VERSION)
150-
151-
run-lsc: check-env-run
152104
@echo "Running Ansible Chatbot Stack container..."
153105
@echo "Using vLLM URL: $(ANSIBLE_CHATBOT_VLLM_URL)"
154106
@echo "Using inference model: $(ANSIBLE_CHATBOT_INFERENCE_MODEL)"
155107
docker run --security-opt label=disable -it -p $(LLAMA_STACK_PORT):8080 \
156108
-v ./embeddings_model:/.llama/data/embeddings_model \
157109
-v ./vector_db/aap_faiss_store.db:$(CONTAINER_DB_PATH)/aap_faiss_store.db \
158-
-v ./lightspeed-stack/lightspeed-stack.yaml:/.llama/data/lightspeed-stack.yaml \
110+
-v ./lightspeed-stack.yaml:/.llama/data/lightspeed-stack.yaml \
159111
--env VLLM_URL=$(ANSIBLE_CHATBOT_VLLM_URL) \
160112
--env VLLM_API_TOKEN=$(ANSIBLE_CHATBOT_VLLM_API_TOKEN) \
161113
--env INFERENCE_MODEL=$(ANSIBLE_CHATBOT_INFERENCE_MODEL) \
162114
--env INFERENCE_MODEL_FILTER=$(ANSIBLE_CHATBOT_INFERENCE_MODEL_FILTER) \
163-
--env AAP_GATEWAY_TOKEN=$(AAP_GATEWAY_TOKEN) \
164115
ansible-chatbot-stack:$(ANSIBLE_CHATBOT_VERSION)
165116

166-
run-test-lsc:
117+
run-test:
167118
@echo "Running test query against lightspeed-core/lightspeed-stack's /config endpoint..."
168119
curl -X GET http://localhost:$(LLAMA_STACK_PORT)/v1/config | jq .
169120
@echo "Running test query against lightspeed-core/lightspeed-stack's /models endpoint..."
@@ -188,16 +139,15 @@ run-local-db: check-env-run-local-db
188139
@echo "Using inference model: $(ANSIBLE_CHATBOT_INFERENCE_MODEL)"
189140
@echo "Using inference model for tools filtering : $(ANSIBLE_CHATBOT_INFERENCE_MODEL_FILTER)"
190141
@echo "Mapping local DB from $(LOCAL_DB_PATH) to $(CONTAINER_DB_PATH)"
191-
docker run --security-opt label=disable -it -p $(LLAMA_STACK_PORT):$(LLAMA_STACK_PORT) \
142+
docker run --security-opt label=disable -it -p $(LLAMA_STACK_PORT):8080 \
192143
-v $(LOCAL_DB_PATH):$(CONTAINER_DB_PATH) \
193144
-v ./embeddings_model:/app/embeddings_model \
194145
-v ./vector_db/aap_faiss_store.db:$(CONTAINER_DB_PATH)/aap_faiss_store.db \
195-
--env LLAMA_STACK_PORT=$(LLAMA_STACK_PORT) \
146+
-v ./lightspeed-stack.yaml:/.llama/data/lightspeed-stack.yaml \
196147
--env VLLM_URL=$(ANSIBLE_CHATBOT_VLLM_URL) \
197148
--env VLLM_API_TOKEN=$(ANSIBLE_CHATBOT_VLLM_API_TOKEN) \
198149
--env INFERENCE_MODEL=$(ANSIBLE_CHATBOT_INFERENCE_MODEL) \
199150
--env INFERENCE_MODEL_FILTER=$(ANSIBLE_CHATBOT_INFERENCE_MODEL_FILTER) \
200-
--env AAP_GATEWAY_TOKEN=$(AAP_GATEWAY_TOKEN) \
201151
ansible-chatbot-stack:$(ANSIBLE_CHATBOT_VERSION)
202152

203153
clean:
@@ -207,9 +157,7 @@ clean:
207157
rm -rf providers.d/
208158
@echo "Removing ansible-chatbot-stack images..."
209159
docker rmi -f $$(docker images -a -q --filter reference=ansible-chatbot-stack) || true
210-
@echo "Removing ansible-chatbot-stack-base image..."
211-
docker rmi -f $$(docker images -a -q --filter reference=ansible-chatbot-stack-base) || true
212-
@echo "Removing ansible-chatbot-stack-base containers..."
160+
@echo "Removing ansible-chatbot-stack containers..."
213161
docker rm -f $$(docker ps -a -q --filter ancestor=ansible-chatbot-stack) || true
214162
@echo "Clean-up complete."
215163

ansible-chatbot-build.yaml

Lines changed: 0 additions & 29 deletions
This file was deleted.

ansible-chatbot-run.yaml

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -68,10 +68,9 @@ providers:
6868
- provider_id: rag-runtime
6969
provider_type: inline::rag-runtime
7070
config: {}
71-
- provider_id: lightspeed
72-
provider_type: remote::lightspeed
73-
config:
74-
api_key: ${env.AAP_GATEWAY_TOKEN:}
71+
- provider_id: model-context-protocol
72+
provider_type: remote::model-context-protocol
73+
config: {}
7574
metadata_store: null
7675
models:
7776
- metadata: {}

entrypoint.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,4 +23,4 @@ else
2323
fi
2424
fi
2525

26-
python -m llama_stack.distribution.server.server --config /.llama/distributions/ansible-chatbot/ansible-chatbot-run.yaml
26+
python3.12 src/lightspeed_stack.py --config /.llama/data/lightspeed-stack.yaml
File renamed without changes.

0 commit comments

Comments
 (0)