@@ -13,6 +13,7 @@ CONTAINER_DB_PATH ?= /.llama/data/distributions/ansible-chatbot
13
13
RAG_CONTENT_IMAGE ?= quay.io/ansible/aap-rag-content:1.0.1753876830
14
14
LIGHTSPEED_STACK_CONFIG ?= lightspeed-stack.yaml
15
15
LLAMA_STACK_RUN_CONFIG ?= ansible-chatbot-run.yaml
16
+ SYSTEM_PROMPT ?= ansible-chatbot-system-prompt.txt
16
17
# Colors for terminal output
17
18
RED := \033[0;31m
18
19
NC := \033[0m # No Color
@@ -132,12 +133,15 @@ run: check-env-run
132
133
-v ./vector_db/aap_faiss_store.db:$(CONTAINER_DB_PATH ) /aap_faiss_store.db \
133
134
-v ./$(LIGHTSPEED_STACK_CONFIG ) :/.llama/distributions/ansible-chatbot/config/lightspeed-stack.yaml \
134
135
-v ./$(LLAMA_STACK_RUN_CONFIG ) :/.llama/distributions/llama-stack/config/ansible-chatbot-run.yaml \
135
- -v ./ansible-chatbot-system-prompt.txt :/.llama/distributions/ansible-chatbot/system-prompts/default.txt \
136
+ -v ./$( SYSTEM_PROMPT ) :/.llama/distributions/ansible-chatbot/system-prompts/default.txt \
136
137
--env VLLM_URL=$(ANSIBLE_CHATBOT_VLLM_URL ) \
137
138
--env VLLM_API_TOKEN=$(ANSIBLE_CHATBOT_VLLM_API_TOKEN ) \
138
139
--env INFERENCE_MODEL=$(ANSIBLE_CHATBOT_INFERENCE_MODEL ) \
139
140
--env INFERENCE_MODEL_FILTER=$(ANSIBLE_CHATBOT_INFERENCE_MODEL_FILTER ) \
140
141
--env GEMINI_API_KEY=$(GEMINI_API_KEY ) \
142
+ --env OPENAI_INFERENCE_MODEL=$(OPENAI_INFERENCE_MODEL ) \
143
+ --env OPENAI_API_KEY=$(OPENAI_API_KEY ) \
144
+ --env OPENAI_BASE_URL=$(OPENAI_BASE_URL ) \
141
145
$(IMAGE_PREFIX ) ansible-chatbot-stack:$(ANSIBLE_CHATBOT_VERSION )
142
146
143
147
run-test :
@@ -171,12 +175,15 @@ run-local-db: check-env-run-local-db
171
175
-v ./vector_db/aap_faiss_store.db:$(CONTAINER_DB_PATH ) /aap_faiss_store.db \
172
176
-v ./$(LIGHTSPEED_STACK_CONFIG ) :/.llama/distributions/ansible-chatbot/config/lightspeed-stack.yaml \
173
177
-v ./$(LLAMA_STACK_RUN_CONFIG ) :/.llama/distributions/llama-stack/config/ansible-chatbot-run.yaml \
174
- -v ./ansible-chatbot-system-prompt.txt :/.llama/distributions/ansible-chatbot/system-prompts/default.txt \
178
+ -v ./$( SYSTEM_PROMPT ) :/.llama/distributions/ansible-chatbot/system-prompts/default.txt \
175
179
--env VLLM_URL=$(ANSIBLE_CHATBOT_VLLM_URL ) \
176
180
--env VLLM_API_TOKEN=$(ANSIBLE_CHATBOT_VLLM_API_TOKEN ) \
177
181
--env INFERENCE_MODEL=$(ANSIBLE_CHATBOT_INFERENCE_MODEL ) \
178
182
--env INFERENCE_MODEL_FILTER=$(ANSIBLE_CHATBOT_INFERENCE_MODEL_FILTER ) \
179
183
--env GEMINI_API_KEY=$(GEMINI_API_KEY ) \
184
+ --env OPENAI_INFERENCE_MODEL=$(OPENAI_INFERENCE_MODEL ) \
185
+ --env OPENAI_API_KEY=$(OPENAI_API_KEY ) \
186
+ --env OPENAI_BASE_URL=$(OPENAI_BASE_URL ) \
180
187
$(IMAGE_PREFIX ) ansible-chatbot-stack:$(ANSIBLE_CHATBOT_VERSION )
181
188
182
189
clean :
0 commit comments