Skip to content

Commit b93fe70

Browse files
committed
Support bypass tools option on lightspeed-stack /streaming_query API
1 parent 7347ea3 commit b93fe70

File tree

17 files changed

+222
-4
lines changed

17 files changed

+222
-4
lines changed

aap_chatbot/src/AnsibleChatbot/AnsibleChatbot.tsx

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -108,6 +108,8 @@ export const AnsibleChatbot: React.FunctionComponent<ChatbotContext> = (
108108
hasStopButton,
109109
handleStopButton,
110110
isStreamingSupported,
111+
bypassTools,
112+
setBypassTools,
111113
} = useChatbot();
112114
const [chatbotVisible, setChatbotVisible] = useState<boolean>(true);
113115
const [displayMode] = useState<ChatbotDisplayMode>(
@@ -226,6 +228,8 @@ export const AnsibleChatbot: React.FunctionComponent<ChatbotContext> = (
226228
<SystemPromptModal
227229
systemPrompt={systemPrompt}
228230
setSystemPrompt={setSystemPrompt}
231+
bypassTools={bypassTools}
232+
setBypassTools={setBypassTools}
229233
/>
230234
)}
231235
</ChatbotHeaderActions>

aap_chatbot/src/App.test.tsx

Lines changed: 34 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,6 @@ import React from "react";
88
// vitest-browser-react documentation
99
/* eslint-disable testing-library/prefer-screen-queries */
1010
/* eslint-disable no-nested-ternary */
11-
1211
import { assert, beforeEach, expect, test, vi } from "vitest";
1312
import { render } from "vitest-browser-react";
1413
import { MemoryRouter } from "react-router-dom";
@@ -709,6 +708,40 @@ test("Test system prompt override", async () => {
709708
);
710709
});
711710

711+
test("Test system prompt override with no_tools option", async () => {
712+
const spy = mockAxios(200);
713+
await renderApp(true);
714+
715+
await expect.element(page.getByLabelText("SystemPrompt")).toBeVisible();
716+
const systemPromptIcon = page.getByLabelText("SystemPrompt");
717+
await systemPromptIcon.click();
718+
719+
const systemPromptTextArea = page.getByLabelText(
720+
"system-prompt-form-text-area",
721+
);
722+
await systemPromptTextArea.fill("MY SYSTEM PROMPT WITH NO_TOOLS OPTION");
723+
724+
const bypassToolsCheckbox = page.getByRole("checkbox");
725+
expect(bypassToolsCheckbox).not.toBeChecked();
726+
await bypassToolsCheckbox.click();
727+
expect(bypassToolsCheckbox).toBeChecked();
728+
729+
const systemPromptButton = page.getByLabelText("system-prompt-form-button");
730+
await systemPromptButton.click();
731+
732+
await sendMessage("Hello with system prompt override with no_tools option");
733+
expect(spy).toHaveBeenCalledWith(
734+
expect.anything(),
735+
expect.objectContaining({
736+
conversation_id: undefined,
737+
no_tools: true,
738+
query: "Hello with system prompt override with no_tools option",
739+
system_prompt: "MY SYSTEM PROMPT WITH NO_TOOLS OPTION",
740+
}),
741+
expect.anything(),
742+
);
743+
});
744+
712745
test("Chat streaming test", async () => {
713746
let ghIssueLinkSpy = 0;
714747
let ghIssueUrl = "";

aap_chatbot/src/SystemPromptModal/SystemPromptModal.tsx

Lines changed: 15 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
import React from "react";
22
import {
33
Button,
4+
Checkbox,
45
Form,
56
FormGroup,
67
Modal,
@@ -15,13 +16,15 @@ import WrenchIcon from "@patternfly/react-icons/dist/esm/icons/wrench-icon";
1516
interface SystemPromptModalProps {
1617
systemPrompt: string;
1718
setSystemPrompt: (s: string) => void;
19+
bypassTools: boolean;
20+
setBypassTools: (b: boolean) => void;
1821
}
1922

2023
export const SystemPromptModal: React.FunctionComponent<
2124
SystemPromptModalProps
2225
> = (props) => {
2326
const [isModalOpen, setModalOpen] = React.useState(false);
24-
const { systemPrompt, setSystemPrompt } = props;
27+
const { systemPrompt, setSystemPrompt, bypassTools, setBypassTools } = props;
2528

2629
const handleModalToggle = (_event: KeyboardEvent | React.MouseEvent) => {
2730
setModalOpen(!isModalOpen);
@@ -31,6 +34,10 @@ export const SystemPromptModal: React.FunctionComponent<
3134
setSystemPrompt(value);
3235
};
3336

37+
const handleBypassToolsChange = (_event: any, value: boolean) => {
38+
setBypassTools(value);
39+
};
40+
3441
return (
3542
<React.Fragment>
3643
<Button
@@ -64,6 +71,13 @@ export const SystemPromptModal: React.FunctionComponent<
6471
aria-label="system-prompt-form-text-area"
6572
rows={15}
6673
/>
74+
<Checkbox
75+
id="bypass-tools"
76+
label="Bypass Tools"
77+
isChecked={bypassTools}
78+
aria-label="bypass-tools-checkbox"
79+
onChange={handleBypassToolsChange}
80+
></Checkbox>
6781
</FormGroup>
6882
</Form>
6983
</ModalBody>

aap_chatbot/src/types/Message.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@ type LLMRequest = {
1010
attachments?: object[] | null;
1111
system_prompt?: string | null;
1212
media_type?: "text/plain" | "application/json";
13+
no_tools?: boolean | null;
1314
};
1415

1516
type LLMResponse = {

aap_chatbot/src/useChatbot/useChatbot.ts

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,9 @@ const botName =
3333
ANSIBLE_LIGHTSPEED_PRODUCT_NAME;
3434

3535
export const modelsSupported: LLMModel[] = [
36-
{ model: "granite-3.3-8b-instruct", provider: "rhoai" },
36+
{ model: "granite-3.3-8b-instruct", provider: "my_rhoai_dev" },
37+
{ model: "gemini/gemini-2.5-flash", provider: "gemini" },
38+
{ model: "gemini/gemini-2.5-pro", provider: "gemini" },
3739
];
3840

3941
export const readCookie = (name: string): string | null => {
@@ -182,6 +184,7 @@ export const useChatbot = () => {
182184
const [systemPrompt, setSystemPrompt] = useState(QUERY_SYSTEM_INSTRUCTION);
183185
const [hasStopButton, setHasStopButton] = useState<boolean>(false);
184186
const [abortController, setAbortController] = useState(new AbortController());
187+
const [bypassTools, setBypassTools] = useState<boolean>(false);
185188

186189
const [stream, setStream] = useState(false);
187190
useEffect(() => {
@@ -465,6 +468,9 @@ export const useChatbot = () => {
465468
if (systemPrompt !== QUERY_SYSTEM_INSTRUCTION) {
466469
chatRequest.system_prompt = systemPrompt;
467470
}
471+
if (bypassTools) {
472+
chatRequest.no_tools = true;
473+
}
468474

469475
if (inDebugMode()) {
470476
for (const m of modelsSupported) {
@@ -656,5 +662,7 @@ export const useChatbot = () => {
656662
hasStopButton,
657663
handleStopButton,
658664
isStreamingSupported,
665+
bypassTools,
666+
setBypassTools,
659667
};
660668
};

ansible_ai_connect/ai/api/model_pipelines/http/pipelines.py

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -179,6 +179,7 @@ def invoke(self, params: ChatBotParameters) -> ChatBotResponse:
179179
provider = params.provider
180180
model_id = params.model_id
181181
system_prompt = params.system_prompt or settings.CHATBOT_DEFAULT_SYSTEM_PROMPT
182+
no_tools = params.no_tools
182183

183184
data = {
184185
"query": query,
@@ -189,6 +190,8 @@ def invoke(self, params: ChatBotParameters) -> ChatBotResponse:
189190
data["conversation_id"] = str(conversation_id)
190191
if system_prompt:
191192
data["system_prompt"] = str(system_prompt)
193+
if no_tools:
194+
data["no_tools"] = bool(no_tools)
192195

193196
headers = self.headers or {}
194197
if params.mcp_headers:
@@ -281,6 +284,7 @@ async def async_invoke(self, params: StreamingChatBotParameters) -> AsyncGenerat
281284
model_id = params.model_id
282285
system_prompt = params.system_prompt or settings.CHATBOT_DEFAULT_SYSTEM_PROMPT
283286
media_type = params.media_type
287+
no_tools = params.no_tools
284288

285289
data = {
286290
"query": query,
@@ -293,6 +297,8 @@ async def async_invoke(self, params: StreamingChatBotParameters) -> AsyncGenerat
293297
data["system_prompt"] = str(system_prompt)
294298
if media_type:
295299
data["media_type"] = str(media_type)
300+
if no_tools:
301+
data["no_tools"] = bool(no_tools)
296302

297303
async with session.post(
298304
self.config.inference_url + "/v1/streaming_query",
@@ -315,6 +321,7 @@ async def async_invoke(self, params: StreamingChatBotParameters) -> AsyncGenerat
315321
ev.provider_id = params.provider
316322
ev.conversation_id = params.conversation_id
317323
ev.modelName = params.model_id
324+
ev.no_tools = params.no_tools
318325

319326
async for chunk in response.content:
320327
try:

ansible_ai_connect/ai/api/model_pipelines/pipelines.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -232,6 +232,7 @@ class ChatBotParameters:
232232
conversation_id: Optional[str]
233233
system_prompt: str
234234
mcp_headers: Optional[dict[str, dict[str, str]]] = field(kw_only=True, default=None)
235+
no_tools: bool
235236

236237
@classmethod
237238
def init(
@@ -242,6 +243,7 @@ def init(
242243
conversation_id: Optional[str] = None,
243244
system_prompt: Optional[str] = None,
244245
mcp_headers: Optional[dict[str, dict[str, str]]] = None,
246+
no_tools: Optional[bool] = False,
245247
):
246248
return cls(
247249
query=query,
@@ -250,6 +252,7 @@ def init(
250252
conversation_id=conversation_id,
251253
system_prompt=system_prompt,
252254
mcp_headers=mcp_headers,
255+
no_tools=no_tools,
253256
)
254257

255258

@@ -270,6 +273,7 @@ def init(
270273
system_prompt: Optional[str] = None,
271274
media_type: Optional[str] = None,
272275
mcp_headers: Optional[dict[str, dict[str, str]]] = None,
276+
no_tools: Optional[bool] = False,
273277
):
274278
return cls(
275279
query=query,
@@ -279,6 +283,7 @@ def init(
279283
system_prompt=system_prompt,
280284
media_type=media_type,
281285
mcp_headers=mcp_headers,
286+
no_tools=no_tools,
282287
)
283288

284289

ansible_ai_connect/ai/api/serializers.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -345,6 +345,11 @@ class ChatRequestSerializer(serializers.Serializer):
345345
label="System prompt",
346346
help_text=("An optional non-default system prompt to be used on LLM (debug mode only)."),
347347
)
348+
no_tools = serializers.BooleanField(
349+
required=False,
350+
label="Bypass tools",
351+
help_text=("Whether to bypass all tools and MCP servers"),
352+
)
348353

349354

350355
class StreamingChatRequestSerializer(ChatRequestSerializer):

ansible_ai_connect/ai/api/telemetry/schema1.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -212,6 +212,7 @@ class ChatBotBaseEvent(Schema1Event):
212212
converter=str,
213213
default=settings.CHATBOT_DEFAULT_PROVIDER,
214214
)
215+
no_tools: bool = field(validator=validators.instance_of(bool), converter=bool, default=False)
215216

216217
def __attrs_post_init__(self):
217218
self.chat_prompt = anonymize_struct(self.chat_prompt)

ansible_ai_connect/ai/api/tests/test_chat_view.py

Lines changed: 71 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -99,6 +99,11 @@ class TestChatView(APIVersionTestCaseBase, WisdomServiceAPITestCaseBase):
9999
"system_prompt": "System prompt override",
100100
}
101101

102+
PAYLOAD_WITH_NO_TOOLS_OPTION = {
103+
"query": "Payload with no tools option",
104+
"no_tools": True,
105+
}
106+
102107
JSON_RESPONSE = {
103108
"response": "AAP 2.5 introduces an updated, unified UI.",
104109
"conversation_id": "123e4567-e89b-12d3-a456-426614174000",
@@ -169,6 +174,7 @@ def json(self):
169174
elif (
170175
kwargs["json"]["query"] == TestChatView.PAYLOAD_WITH_MODEL_AND_PROVIDER["query"]
171176
or kwargs["json"]["query"] == TestChatView.PAYLOAD_WITH_SYSTEM_PROMPT_OVERRIDE["query"]
177+
or kwargs["json"]["query"] == TestChatView.PAYLOAD_WITH_NO_TOOLS_OPTION["query"]
172178
):
173179
status_code = 200
174180
json_response["response"] = input
@@ -438,6 +444,40 @@ def test_operational_telemetry_with_system_prompt_override(self):
438444
segment_events[0]["properties"]["chat_system_prompt"],
439445
TestChatView.PAYLOAD_WITH_SYSTEM_PROMPT_OVERRIDE["system_prompt"],
440446
)
447+
self.assertFalse(segment_events[0]["properties"]["no_tools"])
448+
449+
@override_settings(SEGMENT_WRITE_KEY="DUMMY_KEY_VALUE")
450+
def test_operational_telemetry_with_no_tools_option(self):
451+
self.user.rh_user_has_seat = True
452+
self.user.organization = Organization.objects.get_or_create(id=1)[0]
453+
self.client.force_authenticate(user=self.user)
454+
with (
455+
patch.object(
456+
apps.get_app_config("ai"),
457+
"get_model_pipeline",
458+
Mock(
459+
return_value=HttpChatBotPipeline(
460+
mock_pipeline_config("http", model_id="granite-8b")
461+
)
462+
),
463+
),
464+
self.assertLogs(logger="root", level="DEBUG") as log,
465+
):
466+
r = self.query_with_no_error(TestChatView.PAYLOAD_WITH_NO_TOOLS_OPTION)
467+
self.assertEqual(r.status_code, HTTPStatus.OK)
468+
segment_events = self.extractSegmentEventsFromLog(log)
469+
# Verify that chat_prompt is excluded (not in allow list)
470+
self.assertNotIn("chat_prompt", segment_events[0]["properties"])
471+
self.assertEqual(segment_events[0]["properties"]["modelName"], "granite-8b")
472+
self.assertIn(
473+
TestChatView.PAYLOAD_WITH_NO_TOOLS_OPTION["query"],
474+
segment_events[0]["properties"]["chat_response"],
475+
)
476+
self.assertEqual(
477+
segment_events[0]["properties"]["chat_truncated"],
478+
TestChatView.JSON_RESPONSE["truncated"],
479+
)
480+
self.assertTrue(segment_events[0]["properties"]["no_tools"])
441481

442482
def test_chat_rate_limit(self):
443483
# Call chat API five times using self.user
@@ -737,6 +777,37 @@ def test_operational_telemetry_with_system_prompt_override(self):
737777
segment_events[0]["properties"]["chat_system_prompt"],
738778
TestChatView.PAYLOAD_WITH_SYSTEM_PROMPT_OVERRIDE["system_prompt"],
739779
)
780+
self.assertFalse(segment_events[0]["properties"]["no_tools"])
781+
782+
@override_settings(SEGMENT_WRITE_KEY="DUMMY_KEY_VALUE")
783+
def test_operational_telemetry_with_no_tools_option(self):
784+
self.user.rh_user_has_seat = True
785+
self.user.organization = Organization.objects.get_or_create(id=1)[0]
786+
self.client.force_authenticate(user=self.user)
787+
with (
788+
patch.object(
789+
apps.get_app_config("ai"),
790+
"get_model_pipeline",
791+
Mock(
792+
return_value=HttpStreamingChatBotPipeline(
793+
mock_pipeline_config("http", model_id="granite-8b")
794+
)
795+
),
796+
),
797+
self.assertLogs(logger="root", level="DEBUG") as log,
798+
):
799+
r = self.query_with_no_error(TestChatView.PAYLOAD_WITH_NO_TOOLS_OPTION)
800+
self.assertEqual(r.status_code, HTTPStatus.OK)
801+
segment_events = self.extractSegmentEventsFromLog(log)
802+
# Verify that chat_prompt is excluded (not in allow list)
803+
self.assertNotIn("chat_prompt", segment_events[0]["properties"])
804+
self.assertEqual(segment_events[0]["properties"]["modelName"], "granite-8b")
805+
self.assertEqual(
806+
segment_events[0]["properties"]["chat_truncated"],
807+
TestChatView.JSON_RESPONSE["truncated"],
808+
)
809+
self.assertEqual(len(segment_events[0]["properties"]["chat_referenced_documents"]), 0)
810+
self.assertTrue(segment_events[0]["properties"]["no_tools"])
740811

741812
def test_chat_rate_limit(self):
742813
# Call chat API five times using self.user

0 commit comments

Comments
 (0)