Skip to content

Commit 574d37a

Browse files
authored
add mcp support and update paths (#73)
issue: https://issues.redhat.com/browse/AAP-50401 - add mcp support - update paths - Fixes llama-stack integration Note this is a first step, more work to be done to update and fix the container for a right dependency management with lightspeed-stack. Signed-off-by: Djebran Lezzoum <[email protected]>
1 parent a278659 commit 574d37a

10 files changed

+227
-46
lines changed

Containerfile

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -29,14 +29,16 @@ RUN uv sync --locked --no-install-project --no-dev
2929
# ======================================================
3030
# Final image without uv package manager
3131
# ------------------------------------------------------
32-
FROM quay.io/lightspeed-core/lightspeed-stack:dev-latest
32+
#FROM quay.io/lightspeed-core/lightspeed-stack:dev-latest
33+
# the lastest was broken, replace by the latest working image for now
34+
FROM quay.io/lightspeed-core/lightspeed-stack:dev-20250722-28abfaf
3335

3436
USER 0
3537

3638
# Re-declaring arguments without a value, inherits the global default one.
3739
ARG APP_ROOT
3840
ARG ANSIBLE_CHATBOT_VERSION
39-
RUN microdnf install -y --nodocs --setopt=keepcache=0 --setopt=tsflags=nodocs python3.11 jq
41+
RUN microdnf install -y --nodocs --setopt=keepcache=0 --setopt=tsflags=nodocs jq
4042

4143
# PYTHONDONTWRITEBYTECODE 1 : disable the generation of .pyc
4244
# PYTHONUNBUFFERED 1 : force the stdout and stderr streams to be unbuffered
@@ -61,8 +63,6 @@ ENV LLAMA_STACK_CONFIG_DIR=/.llama/data
6163
# Data and configuration
6264
RUN mkdir -p /.llama/distributions/ansible-chatbot
6365
RUN mkdir -p /.llama/data/distributions/ansible-chatbot
64-
ADD lightspeed-stack.yaml /.llama/distributions/ansible-chatbot
65-
ADD ansible-chatbot-run.yaml /.llama/distributions/ansible-chatbot
6666
RUN echo -e "\
6767
{\n\
6868
\"version\": \"${ANSIBLE_CHATBOT_VERSION}\" \n\

Makefile

Lines changed: 15 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -10,9 +10,9 @@ ANSIBLE_CHATBOT_INFERENCE_MODEL_FILTER ?=
1010
LLAMA_STACK_PORT ?= 8321
1111
LOCAL_DB_PATH ?= .
1212
CONTAINER_DB_PATH ?= /.llama/data/distributions/ansible-chatbot
13-
# quay.io/ansible/aap-rag-content:latest does not work with lightspeed-stack:latest
14-
# aap-rag-content uses llama-stack:0.2.14 whereas lightspeed-stack:latest uses 0.2.13.
15-
RAG_CONTENT_IMAGE ?= quay.io/ansible/aap-rag-content:1.0.1751985495
13+
RAG_CONTENT_IMAGE ?= quay.io/ansible/aap-rag-content:latest
14+
LIGHTSPEED_STACK_CONFIG ?= lightspeed-stack.yaml
15+
LLAMA_STACK_RUN_CONFIG ?= ansible-chatbot-run.yaml
1616
# Colors for terminal output
1717
RED := \033[0;31m
1818
NC := \033[0m # No Color
@@ -23,6 +23,8 @@ NC := \033[0m # No Color
2323

2424
UV_HTTP_TIMEOUT=120
2525

26+
PLATFORM ?= "linux/amd64"
27+
2628
help:
2729
@echo "Makefile for Ansible Chatbot Stack"
2830
@echo "Available targets:"
@@ -61,7 +63,7 @@ setup-vector-db:
6163
@echo "Setting up vector db and embedding image..."
6264
rm -rf ./vector_db ./embeddings_model
6365
mkdir -p ./vector_db
64-
docker run -d --rm --name rag-content $(RAG_CONTENT_IMAGE) sleep infinity
66+
docker run --platform $(PLATFORM) -d --rm --name rag-content $(RAG_CONTENT_IMAGE) sleep infinity
6567
docker cp rag-content:/rag/llama_stack_vector_db/faiss_store.db.gz ./vector_db/aap_faiss_store.db.gz
6668
docker cp rag-content:/rag/embeddings_model .
6769
docker kill rag-content
@@ -76,8 +78,9 @@ check-env-build:
7678

7779
build: check-env-build
7880
@echo "Building customized Ansible Chatbot Stack image from lightspeed-core/lightspeed-stack..."
79-
docker build -f ./Containerfile \
81+
docker build --platform $(PLATFORM) -f ./Containerfile \
8082
--build-arg ANSIBLE_CHATBOT_VERSION=$(ANSIBLE_CHATBOT_VERSION) \
83+
--build-arg LLAMA_STACK_RUN_CONFIG=$(LLAMA_STACK_RUN_CONFIG) \
8184
-t ansible-chatbot-stack:$(ANSIBLE_CHATBOT_VERSION) .
8285
@printf "Custom image $(RED)ansible-chatbot-stack:$(ANSIBLE_CHATBOT_VERSION)$(NC) built successfully.\n"
8386

@@ -104,10 +107,11 @@ run: check-env-run
104107
@echo "Running Ansible Chatbot Stack container..."
105108
@echo "Using vLLM URL: $(ANSIBLE_CHATBOT_VLLM_URL)"
106109
@echo "Using inference model: $(ANSIBLE_CHATBOT_INFERENCE_MODEL)"
107-
docker run --security-opt label=disable -it -p $(LLAMA_STACK_PORT):8080 \
110+
docker run --platform $(PLATFORM) --security-opt label=disable -it -p $(LLAMA_STACK_PORT):8080 \
108111
-v ./embeddings_model:/.llama/data/embeddings_model \
109112
-v ./vector_db/aap_faiss_store.db:$(CONTAINER_DB_PATH)/aap_faiss_store.db \
110-
-v ./lightspeed-stack.yaml:/.llama/data/lightspeed-stack.yaml \
113+
-v ./$(LIGHTSPEED_STACK_CONFIG):/.llama/distributions/ansible-chatbot/config/lightspeed-stack.yaml \
114+
-v ./$(LLAMA_STACK_RUN_CONFIG):/.llama/distributions/llama-stack/config/ansible-chatbot-run.yaml \
111115
-v ./ansible-chatbot-system-prompt.txt:/.llama/distributions/ansible-chatbot/system-prompts/default.txt \
112116
--env VLLM_URL=$(ANSIBLE_CHATBOT_VLLM_URL) \
113117
--env VLLM_API_TOKEN=$(ANSIBLE_CHATBOT_VLLM_API_TOKEN) \
@@ -140,11 +144,13 @@ run-local-db: check-env-run-local-db
140144
@echo "Using inference model: $(ANSIBLE_CHATBOT_INFERENCE_MODEL)"
141145
@echo "Using inference model for tools filtering : $(ANSIBLE_CHATBOT_INFERENCE_MODEL_FILTER)"
142146
@echo "Mapping local DB from $(LOCAL_DB_PATH) to $(CONTAINER_DB_PATH)"
143-
docker run --security-opt label=disable -it -p $(LLAMA_STACK_PORT):8080 \
147+
docker run --platform $(PLATFORM) --security-opt label=disable -it -p $(LLAMA_STACK_PORT):8080 \
144148
-v $(LOCAL_DB_PATH):$(CONTAINER_DB_PATH) \
145149
-v ./embeddings_model:/app/embeddings_model \
146150
-v ./vector_db/aap_faiss_store.db:$(CONTAINER_DB_PATH)/aap_faiss_store.db \
147-
-v ./lightspeed-stack.yaml:/.llama/data/lightspeed-stack.yaml \
151+
-v ./$(LIGHTSPEED_STACK_CONFIG):/.llama/distributions/ansible-chatbot/config/lightspeed-stack.yaml \
152+
-v ./$(LLAMA_STACK_RUN_CONFIG):/.llama/distributions/llama-stack/config/ansible-chatbot-run.yaml \
153+
-v ./ansible-chatbot-system-prompt.txt:/.llama/distributions/ansible-chatbot/system-prompts/default.txt \
148154
--env VLLM_URL=$(ANSIBLE_CHATBOT_VLLM_URL) \
149155
--env VLLM_API_TOKEN=$(ANSIBLE_CHATBOT_VLLM_API_TOKEN) \
150156
--env INFERENCE_MODEL=$(ANSIBLE_CHATBOT_INFERENCE_MODEL) \

README.md

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -95,10 +95,13 @@ Builds the image `ansible-chatbot-stack:$ANSIBLE_CHATBOT_VERSION`.
9595
```commandline
9696
└── .llama/
9797
├── distributions/
98+
│ └── llama-stack/
99+
│ └── config
100+
│ └── ansible-chatbot-run.yaml
98101
│ └── ansible-chatbot/
99-
│ ├── lightspeed-stack.yaml
100-
── ansible-chatbot-run.yaml
101-
── ansible-chatbot-version-info.json
102+
│ ├── ansible-chatbot-version-info.json
103+
── config
104+
── lightspeed-stack.yaml
102105
│ └── system-prompts/
103106
│ └── default.txt
104107
└── providers.d

ansible-chatbot-mcp-run.yaml

Lines changed: 117 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,117 @@
1+
version: '2'
2+
image_name: ansible-chatbot
3+
container_image: ansible-chatbot
4+
apis:
5+
- inference
6+
- vector_io
7+
- safety
8+
- agents
9+
- datasetio
10+
- telemetry
11+
- tool_runtime
12+
providers:
13+
inference:
14+
- provider_id: my_rhoai_dev
15+
provider_type: remote::vllm
16+
config:
17+
url: ${env.VLLM_URL}
18+
max_tokens: ${env.VLLM_MAX_TOKENS:=4096}
19+
api_token: ${env.VLLM_API_TOKEN:=fake}
20+
tls_verify: ${env.VLLM_TLS_VERIFY:=true}
21+
- provider_id: inline_sentence-transformer
22+
provider_type: inline::sentence-transformers
23+
config: {}
24+
vector_io:
25+
- provider_id: aap_faiss
26+
provider_type: inline::faiss
27+
config:
28+
kvstore:
29+
type: sqlite
30+
namespace: null
31+
db_path: ${env.VECTOR_DB_DIR:=/.llama/data/distributions/ansible-chatbot}/aap_faiss_store.db
32+
safety:
33+
- provider_id: llama-guard
34+
provider_type: inline::llama-guard
35+
config:
36+
excluded_categories: []
37+
agents:
38+
- provider_id: lightspeed_inline_agent
39+
provider_type: inline::lightspeed_inline_agent
40+
config:
41+
persistence_store:
42+
type: sqlite
43+
namespace: null
44+
db_path: ${env.PROVIDERS_DB_DIR:=/.llama/data/distributions/ansible-chatbot}/agents_store.db
45+
responses_store:
46+
type: sqlite
47+
namespace: null
48+
db_path: ${env.PROVIDERS_DB_DIR:=/.llama/data/distributions/ansible-chatbot}/responses_store.db
49+
tools_filter:
50+
enabled: true
51+
model_id: ${env.INFERENCE_MODEL_FILTER:=}
52+
datasetio:
53+
- provider_id: localfs
54+
provider_type: inline::localfs
55+
config:
56+
kvstore:
57+
type: sqlite
58+
namespace: null
59+
db_path: ${env.PROVIDERS_DB_DIR:=/.llama/data/distributions/ansible-chatbot}/localfs_datasetio.db
60+
telemetry:
61+
- provider_id: meta-reference
62+
provider_type: inline::meta-reference
63+
config:
64+
service_name: ${env.OTEL_SERVICE_NAME:=ansible-chatbot-stack}
65+
sinks: ${env.TELEMETRY_SINKS:=console,sqlite}
66+
sqlite_db_path: ${env.PROVIDERS_DB_DIR:=/.llama/data/distributions/ansible-chatbot}/trace_store.db
67+
tool_runtime:
68+
- provider_id: rag-runtime
69+
provider_type: inline::rag-runtime
70+
config: {}
71+
- provider_id: model-context-protocol
72+
provider_type: remote::model-context-protocol
73+
config: {}
74+
metadata_store:
75+
namespace: null
76+
type: sqlite
77+
db_path: ${env.PROVIDERS_DB_DIR:=/.llama/data/distributions/ansible-chatbot}/registry.db
78+
models:
79+
- metadata: {}
80+
model_id: ${env.INFERENCE_MODEL}
81+
provider_id: my_rhoai_dev
82+
provider_model_id: null
83+
- metadata:
84+
embedding_dimension: 768
85+
model_id: ${env.EMBEDDINGS_MODEL:=/.llama/data/distributions/ansible-chatbot/embeddings_model}
86+
provider_id: inline_sentence-transformer
87+
model_type: embedding
88+
shields: []
89+
vector_dbs:
90+
- metadata: {}
91+
vector_db_id: "aap-product-docs-2_5"
92+
embedding_model: ${env.EMBEDDINGS_MODEL:=/.llama/data/distributions/ansible-chatbot/embeddings_model}
93+
embedding_dimension: 768
94+
provider_id: "aap_faiss"
95+
datasets: []
96+
scoring_fns: []
97+
benchmarks: []
98+
tool_groups:
99+
- toolgroup_id: builtin::rag
100+
provider_id: rag-runtime
101+
- toolgroup_id: mcp::aap-controller
102+
provider_id: model-context-protocol
103+
mcp_endpoint:
104+
uri: http://localhost:8004/sse
105+
- toolgroup_id: mcp::aap-lightspeed
106+
provider_id: model-context-protocol
107+
mcp_endpoint:
108+
uri: http://localhost:8005/sse
109+
logging: null
110+
server:
111+
port: 8321
112+
tls_certfile: null
113+
tls_keyfile: null
114+
tls_cafile: null
115+
auth: null
116+
disable_ipv6: false
117+
external_providers_dir: ${env.EXTERNAL_PROVIDERS_DIR:=/.llama/providers.d}

ansible-chatbot-run.yaml

Lines changed: 15 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -15,9 +15,9 @@ providers:
1515
provider_type: remote::vllm
1616
config:
1717
url: ${env.VLLM_URL}
18-
max_tokens: ${env.VLLM_MAX_TOKENS:4096}
19-
api_token: ${env.VLLM_API_TOKEN:fake}
20-
tls_verify: ${env.VLLM_TLS_VERIFY:true}
18+
max_tokens: ${env.VLLM_MAX_TOKENS:=4096}
19+
api_token: ${env.VLLM_API_TOKEN:=fake}
20+
tls_verify: ${env.VLLM_TLS_VERIFY:=true}
2121
- provider_id: inline_sentence-transformer
2222
provider_type: inline::sentence-transformers
2323
config: {}
@@ -28,7 +28,7 @@ providers:
2828
kvstore:
2929
type: sqlite
3030
namespace: null
31-
db_path: ${env.VECTOR_DB_DIR:/.llama/data/distributions/ansible-chatbot}/aap_faiss_store.db
31+
db_path: ${env.VECTOR_DB_DIR:=/.llama/data/distributions/ansible-chatbot}/aap_faiss_store.db
3232
safety:
3333
- provider_id: llama-guard
3434
provider_type: inline::llama-guard
@@ -41,29 +41,29 @@ providers:
4141
persistence_store:
4242
type: sqlite
4343
namespace: null
44-
db_path: ${env.PROVIDERS_DB_DIR:/.llama/data/distributions/ansible-chatbot}/agents_store.db
44+
db_path: ${env.PROVIDERS_DB_DIR:=/.llama/data/distributions/ansible-chatbot}/agents_store.db
4545
responses_store:
4646
type: sqlite
4747
namespace: null
48-
db_path: ${env.PROVIDERS_DB_DIR:/.llama/data/distributions/ansible-chatbot}/responses_store.db
48+
db_path: ${env.PROVIDERS_DB_DIR:=/.llama/data/distributions/ansible-chatbot}/responses_store.db
4949
tools_filter:
5050
enabled: true
51-
model_id: ${env.INFERENCE_MODEL_FILTER:}
51+
model_id: ${env.INFERENCE_MODEL_FILTER:=}
5252
datasetio:
5353
- provider_id: localfs
5454
provider_type: inline::localfs
5555
config:
5656
kvstore:
5757
type: sqlite
5858
namespace: null
59-
db_path: ${env.PROVIDERS_DB_DIR:/.llama/data/distributions/ansible-chatbot}/localfs_datasetio.db
59+
db_path: ${env.PROVIDERS_DB_DIR:=/.llama/data/distributions/ansible-chatbot}/localfs_datasetio.db
6060
telemetry:
6161
- provider_id: meta-reference
6262
provider_type: inline::meta-reference
6363
config:
64-
service_name: ${env.OTEL_SERVICE_NAME:}
65-
sinks: ${env.TELEMETRY_SINKS:console,sqlite}
66-
sqlite_db_path: ${env.PROVIDERS_DB_DIR:/.llama/data/distributions/ansible-chatbot}/trace_store.db
64+
service_name: ${env.OTEL_SERVICE_NAME:=ansible-chatbot-stack}
65+
sinks: ${env.TELEMETRY_SINKS:=console,sqlite}
66+
sqlite_db_path: ${env.PROVIDERS_DB_DIR:=/.llama/data/distributions/ansible-chatbot}/trace_store.db
6767
tool_runtime:
6868
- provider_id: rag-runtime
6969
provider_type: inline::rag-runtime
@@ -74,22 +74,22 @@ providers:
7474
metadata_store:
7575
namespace: null
7676
type: sqlite
77-
db_path: ${env.PROVIDERS_DB_DIR:/.llama/data/distributions/ansible-chatbot}/registry.db
77+
db_path: ${env.PROVIDERS_DB_DIR:=/.llama/data/distributions/ansible-chatbot}/registry.db
7878
models:
7979
- metadata: {}
8080
model_id: ${env.INFERENCE_MODEL}
8181
provider_id: my_rhoai_dev
8282
provider_model_id: null
8383
- metadata:
8484
embedding_dimension: 768
85-
model_id: ${env.EMBEDDINGS_MODEL:/.llama/data/distributions/ansible-chatbot/embeddings_model}
85+
model_id: ${env.EMBEDDINGS_MODEL:=/.llama/data/distributions/ansible-chatbot/embeddings_model}
8686
provider_id: inline_sentence-transformer
8787
model_type: embedding
8888
shields: []
8989
vector_dbs:
9090
- metadata: {}
9191
vector_db_id: "aap-product-docs-2_5"
92-
embedding_model: ${env.EMBEDDINGS_MODEL:/.llama/data/distributions/ansible-chatbot/embeddings_model}
92+
embedding_model: ${env.EMBEDDINGS_MODEL:=/.llama/data/distributions/ansible-chatbot/embeddings_model}
9393
embedding_dimension: 768
9494
provider_id: "aap_faiss"
9595
datasets: []
@@ -106,4 +106,4 @@ server:
106106
tls_cafile: null
107107
auth: null
108108
disable_ipv6: false
109-
external_providers_dir: ${env.EXTERNAL_PROVIDERS_DIR:/.llama/providers.d}
109+
external_providers_dir: ${env.EXTERNAL_PROVIDERS_DIR:=/.llama/providers.d}

entrypoint.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,4 +23,4 @@ else
2323
fi
2424
fi
2525

26-
python3.12 /app-root/src/lightspeed_stack.py --config /.llama/distributions/ansible-chatbot/lightspeed-stack.yaml
26+
python3.12 /app-root/src/lightspeed_stack.py --config /.llama/distributions/ansible-chatbot/config/lightspeed-stack.yaml

lightspeed-stack-mcp.yaml

Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,23 @@
1+
name: Ansible Lightspeed Intelligent Assistant
2+
service:
3+
host: 0.0.0.0
4+
port: 8080
5+
auth_enabled: false
6+
workers: 1
7+
color_log: true
8+
access_log: true
9+
llama_stack:
10+
use_as_library_client: true
11+
library_client_config_path: /.llama/distributions/llama-stack/config/ansible-chatbot-run.yaml
12+
user_data_collection:
13+
feedback_disabled: true
14+
transcripts_disabled: true
15+
customization:
16+
system_prompt_path: /.llama/distributions/ansible-chatbot/system-prompts/default.txt
17+
mcp_servers:
18+
- name: mcp::aap-controller
19+
provider_id: model-context-protocol
20+
url: http://localhost:8004/sse
21+
- name: mcp::aap-lightspeed
22+
provider_id: model-context-protocol
23+
url: http://localhost:8005/sse

lightspeed-stack.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ service:
88
access_log: true
99
llama_stack:
1010
use_as_library_client: true
11-
library_client_config_path: /.llama/distributions/ansible-chatbot/ansible-chatbot-run.yaml
11+
library_client_config_path: /.llama/distributions/llama-stack/config/ansible-chatbot-run.yaml
1212
user_data_collection:
1313
feedback_disabled: true
1414
transcripts_disabled: true

pyproject.toml

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,8 @@ dependencies = [
88
"aiosqlite~=0.21.0",
99
"faiss-cpu~=1.11.0",
1010
"fire~=0.7.0",
11-
"lightspeed-stack-providers~=0.1.9",
11+
"lightspeed-stack-providers~=0.1.10",
12+
"llama-stack==0.2.14",
1213
"mcp~=1.9.4",
1314
"numpy==2.2.6",
1415
"opentelemetry-api~=1.34.1",

0 commit comments

Comments
 (0)