Skip to content

Commit 76504a7

Browse files
authored
Support bypass tools option on lightspeed-stack /streaming_query API (#1737)
* Support bypass tools option on lightspeed-stack /streaming_query API * npm audit fix * Added a comment to a test case * Revert provider name for Granite used in on-prem mode * Hide gemini-2.5-pro for now * Remove a comment line
1 parent 3982b32 commit 76504a7

File tree

23 files changed

+271
-540
lines changed

23 files changed

+271
-540
lines changed

aap_chatbot/package-lock.json

Lines changed: 14 additions & 266 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

aap_chatbot/src/AnsibleChatbot/AnsibleChatbot.tsx

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -108,6 +108,8 @@ export const AnsibleChatbot: React.FunctionComponent<ChatbotContext> = (
108108
hasStopButton,
109109
handleStopButton,
110110
isStreamingSupported,
111+
bypassTools,
112+
setBypassTools,
111113
} = useChatbot();
112114
const [chatbotVisible, setChatbotVisible] = useState<boolean>(true);
113115
const [displayMode] = useState<ChatbotDisplayMode>(
@@ -226,6 +228,8 @@ export const AnsibleChatbot: React.FunctionComponent<ChatbotContext> = (
226228
<SystemPromptModal
227229
systemPrompt={systemPrompt}
228230
setSystemPrompt={setSystemPrompt}
231+
bypassTools={bypassTools}
232+
setBypassTools={setBypassTools}
229233
/>
230234
)}
231235
</ChatbotHeaderActions>

aap_chatbot/src/App.test.tsx

Lines changed: 34 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,6 @@ import React from "react";
88
// vitest-browser-react documentation
99
/* eslint-disable testing-library/prefer-screen-queries */
1010
/* eslint-disable no-nested-ternary */
11-
1211
import { assert, beforeEach, expect, test, vi } from "vitest";
1312
import { render } from "vitest-browser-react";
1413
import { MemoryRouter } from "react-router-dom";
@@ -709,6 +708,40 @@ test("Test system prompt override", async () => {
709708
);
710709
});
711710

711+
test("Test system prompt override with no_tools option", async () => {
712+
const spy = mockAxios(200);
713+
await renderApp(true);
714+
715+
await expect.element(page.getByLabelText("SystemPrompt")).toBeVisible();
716+
const systemPromptIcon = page.getByLabelText("SystemPrompt");
717+
await systemPromptIcon.click();
718+
719+
const systemPromptTextArea = page.getByLabelText(
720+
"system-prompt-form-text-area",
721+
);
722+
await systemPromptTextArea.fill("MY SYSTEM PROMPT WITH NO_TOOLS OPTION");
723+
724+
const bypassToolsCheckbox = page.getByRole("checkbox");
725+
expect(bypassToolsCheckbox).not.toBeChecked();
726+
await bypassToolsCheckbox.click();
727+
expect(bypassToolsCheckbox).toBeChecked();
728+
729+
const systemPromptButton = page.getByLabelText("system-prompt-form-button");
730+
await systemPromptButton.click();
731+
732+
await sendMessage("Hello with system prompt override with no_tools option");
733+
expect(spy).toHaveBeenCalledWith(
734+
expect.anything(),
735+
expect.objectContaining({
736+
conversation_id: undefined,
737+
no_tools: true,
738+
query: "Hello with system prompt override with no_tools option",
739+
system_prompt: "MY SYSTEM PROMPT WITH NO_TOOLS OPTION",
740+
}),
741+
expect.anything(),
742+
);
743+
});
744+
712745
test("Chat streaming test", async () => {
713746
let ghIssueLinkSpy = 0;
714747
let ghIssueUrl = "";

aap_chatbot/src/SystemPromptModal/SystemPromptModal.tsx

Lines changed: 15 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
import React from "react";
22
import {
33
Button,
4+
Checkbox,
45
Form,
56
FormGroup,
67
Modal,
@@ -15,13 +16,15 @@ import WrenchIcon from "@patternfly/react-icons/dist/esm/icons/wrench-icon";
1516
interface SystemPromptModalProps {
1617
systemPrompt: string;
1718
setSystemPrompt: (s: string) => void;
19+
bypassTools: boolean;
20+
setBypassTools: (b: boolean) => void;
1821
}
1922

2023
export const SystemPromptModal: React.FunctionComponent<
2124
SystemPromptModalProps
2225
> = (props) => {
2326
const [isModalOpen, setModalOpen] = React.useState(false);
24-
const { systemPrompt, setSystemPrompt } = props;
27+
const { systemPrompt, setSystemPrompt, bypassTools, setBypassTools } = props;
2528

2629
const handleModalToggle = (_event: KeyboardEvent | React.MouseEvent) => {
2730
setModalOpen(!isModalOpen);
@@ -31,6 +34,10 @@ export const SystemPromptModal: React.FunctionComponent<
3134
setSystemPrompt(value);
3235
};
3336

37+
const handleBypassToolsChange = (_event: any, value: boolean) => {
38+
setBypassTools(value);
39+
};
40+
3441
return (
3542
<React.Fragment>
3643
<Button
@@ -64,6 +71,13 @@ export const SystemPromptModal: React.FunctionComponent<
6471
aria-label="system-prompt-form-text-area"
6572
rows={15}
6673
/>
74+
<Checkbox
75+
id="bypass-tools"
76+
label="Bypass Tools"
77+
isChecked={bypassTools}
78+
aria-label="bypass-tools-checkbox"
79+
onChange={handleBypassToolsChange}
80+
></Checkbox>
6781
</FormGroup>
6882
</Form>
6983
</ModalBody>

aap_chatbot/src/types/Message.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@ type LLMRequest = {
1010
attachments?: object[] | null;
1111
system_prompt?: string | null;
1212
media_type?: "text/plain" | "application/json";
13+
no_tools?: boolean | null;
1314
};
1415

1516
type LLMResponse = {

aap_chatbot/src/useChatbot/useChatbot.ts

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -182,6 +182,7 @@ export const useChatbot = () => {
182182
const [systemPrompt, setSystemPrompt] = useState(QUERY_SYSTEM_INSTRUCTION);
183183
const [hasStopButton, setHasStopButton] = useState<boolean>(false);
184184
const [abortController, setAbortController] = useState(new AbortController());
185+
const [bypassTools, setBypassTools] = useState<boolean>(false);
185186

186187
const [stream, setStream] = useState(false);
187188
useEffect(() => {
@@ -465,6 +466,9 @@ export const useChatbot = () => {
465466
if (systemPrompt !== QUERY_SYSTEM_INSTRUCTION) {
466467
chatRequest.system_prompt = systemPrompt;
467468
}
469+
if (bypassTools) {
470+
chatRequest.no_tools = true;
471+
}
468472

469473
if (inDebugMode()) {
470474
for (const m of modelsSupported) {
@@ -656,5 +660,7 @@ export const useChatbot = () => {
656660
hasStopButton,
657661
handleStopButton,
658662
isStreamingSupported,
663+
bypassTools,
664+
setBypassTools,
659665
};
660666
};

ansible_ai_connect/ai/api/model_pipelines/http/pipelines.py

Lines changed: 12 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
import json
1616
import logging
1717
from json import JSONDecodeError
18-
from typing import AsyncGenerator
18+
from typing import Any, AsyncGenerator
1919

2020
import aiohttp
2121
import requests
@@ -179,8 +179,9 @@ def invoke(self, params: ChatBotParameters) -> ChatBotResponse:
179179
provider = params.provider
180180
model_id = params.model_id
181181
system_prompt = params.system_prompt or settings.CHATBOT_DEFAULT_SYSTEM_PROMPT
182+
no_tools = params.no_tools
182183

183-
data = {
184+
data: dict[str, Any] = {
184185
"query": query,
185186
"model": model_id,
186187
"provider": provider,
@@ -189,6 +190,8 @@ def invoke(self, params: ChatBotParameters) -> ChatBotResponse:
189190
data["conversation_id"] = str(conversation_id)
190191
if system_prompt:
191192
data["system_prompt"] = str(system_prompt)
193+
if no_tools:
194+
data["no_tools"] = bool(no_tools)
192195

193196
headers = self.headers or {}
194197
if params.mcp_headers:
@@ -281,8 +284,9 @@ async def async_invoke(self, params: StreamingChatBotParameters) -> AsyncGenerat
281284
model_id = params.model_id
282285
system_prompt = params.system_prompt or settings.CHATBOT_DEFAULT_SYSTEM_PROMPT
283286
media_type = params.media_type
287+
no_tools = params.no_tools
284288

285-
data = {
289+
data: dict[str, Any] = {
286290
"query": query,
287291
"model": model_id,
288292
"provider": provider,
@@ -293,6 +297,8 @@ async def async_invoke(self, params: StreamingChatBotParameters) -> AsyncGenerat
293297
data["system_prompt"] = str(system_prompt)
294298
if media_type:
295299
data["media_type"] = str(media_type)
300+
if no_tools:
301+
data["no_tools"] = bool(no_tools)
296302

297303
async with session.post(
298304
self.config.inference_url + "/v1/streaming_query",
@@ -315,6 +321,7 @@ async def async_invoke(self, params: StreamingChatBotParameters) -> AsyncGenerat
315321
ev.provider_id = params.provider
316322
ev.conversation_id = params.conversation_id
317323
ev.modelName = params.model_id
324+
ev.no_tools = params.no_tools
318325

319326
async for chunk in response.content:
320327
try:
@@ -332,9 +339,9 @@ async def async_invoke(self, params: StreamingChatBotParameters) -> AsyncGenerat
332339
logger.error(
333340
"An error received in chat streaming content:"
334341
+ " response="
335-
+ data.get("response")
342+
+ str(data.get("response"))
336343
+ ", cause="
337-
+ data.get("cause")
344+
+ str(data.get("cause"))
338345
)
339346
elif event == "start":
340347
ev.phase = event

ansible_ai_connect/ai/api/model_pipelines/http/tests/test_pipelines.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -147,6 +147,7 @@ def get_params(self) -> StreamingChatBotParameters:
147147
conversation_id=None,
148148
system_prompt="You are a helpful assistant",
149149
media_type="application/json",
150+
no_tools=False, # Do not bypass tool callings
150151
)
151152

152153
def send_event(self, ev):

ansible_ai_connect/ai/api/model_pipelines/llamastack/tests/test_pipelines.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -344,6 +344,7 @@ def get_params(self) -> StreamingChatBotParameters:
344344
conversation_id=None,
345345
system_prompt="",
346346
media_type="application/json",
347+
no_tools=False,
347348
)
348349

349350
@patch("llama_stack_client.lib.agents.agent.AsyncAgent.create_session")

ansible_ai_connect/ai/api/model_pipelines/pipelines.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -232,6 +232,7 @@ class ChatBotParameters:
232232
conversation_id: Optional[str]
233233
system_prompt: str
234234
mcp_headers: Optional[dict[str, dict[str, str]]] = field(kw_only=True, default=None)
235+
no_tools: bool
235236

236237
@classmethod
237238
def init(
@@ -242,6 +243,7 @@ def init(
242243
conversation_id: Optional[str] = None,
243244
system_prompt: Optional[str] = None,
244245
mcp_headers: Optional[dict[str, dict[str, str]]] = None,
246+
no_tools: Optional[bool] = False,
245247
):
246248
return cls(
247249
query=query,
@@ -250,6 +252,7 @@ def init(
250252
conversation_id=conversation_id,
251253
system_prompt=system_prompt,
252254
mcp_headers=mcp_headers,
255+
no_tools=no_tools,
253256
)
254257

255258

@@ -270,6 +273,7 @@ def init(
270273
system_prompt: Optional[str] = None,
271274
media_type: Optional[str] = None,
272275
mcp_headers: Optional[dict[str, dict[str, str]]] = None,
276+
no_tools: Optional[bool] = False,
273277
):
274278
return cls(
275279
query=query,
@@ -279,6 +283,7 @@ def init(
279283
system_prompt=system_prompt,
280284
media_type=media_type,
281285
mcp_headers=mcp_headers,
286+
no_tools=no_tools,
282287
)
283288

284289

0 commit comments

Comments
 (0)