Skip to content

Commit dae0254

Browse files
committed
Support bypass tools option on lightspeed-stack /streaming_query API
1 parent 7347ea3 commit dae0254

File tree

19 files changed

+224
-4
lines changed

19 files changed

+224
-4
lines changed

aap_chatbot/src/AnsibleChatbot/AnsibleChatbot.tsx

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -108,6 +108,8 @@ export const AnsibleChatbot: React.FunctionComponent<ChatbotContext> = (
108108
hasStopButton,
109109
handleStopButton,
110110
isStreamingSupported,
111+
bypassTools,
112+
setBypassTools,
111113
} = useChatbot();
112114
const [chatbotVisible, setChatbotVisible] = useState<boolean>(true);
113115
const [displayMode] = useState<ChatbotDisplayMode>(
@@ -226,6 +228,8 @@ export const AnsibleChatbot: React.FunctionComponent<ChatbotContext> = (
226228
<SystemPromptModal
227229
systemPrompt={systemPrompt}
228230
setSystemPrompt={setSystemPrompt}
231+
bypassTools={bypassTools}
232+
setBypassTools={setBypassTools}
229233
/>
230234
)}
231235
</ChatbotHeaderActions>

aap_chatbot/src/App.test.tsx

Lines changed: 34 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,6 @@ import React from "react";
88
// vitest-browser-react documentation
99
/* eslint-disable testing-library/prefer-screen-queries */
1010
/* eslint-disable no-nested-ternary */
11-
1211
import { assert, beforeEach, expect, test, vi } from "vitest";
1312
import { render } from "vitest-browser-react";
1413
import { MemoryRouter } from "react-router-dom";
@@ -709,6 +708,40 @@ test("Test system prompt override", async () => {
709708
);
710709
});
711710

711+
test("Test system prompt override with no_tools option", async () => {
712+
const spy = mockAxios(200);
713+
await renderApp(true);
714+
715+
await expect.element(page.getByLabelText("SystemPrompt")).toBeVisible();
716+
const systemPromptIcon = page.getByLabelText("SystemPrompt");
717+
await systemPromptIcon.click();
718+
719+
const systemPromptTextArea = page.getByLabelText(
720+
"system-prompt-form-text-area",
721+
);
722+
await systemPromptTextArea.fill("MY SYSTEM PROMPT WITH NO_TOOLS OPTION");
723+
724+
const bypassToolsCheckbox = page.getByRole("checkbox");
725+
expect(bypassToolsCheckbox).not.toBeChecked();
726+
await bypassToolsCheckbox.click();
727+
expect(bypassToolsCheckbox).toBeChecked();
728+
729+
const systemPromptButton = page.getByLabelText("system-prompt-form-button");
730+
await systemPromptButton.click();
731+
732+
await sendMessage("Hello with system prompt override with no_tools option");
733+
expect(spy).toHaveBeenCalledWith(
734+
expect.anything(),
735+
expect.objectContaining({
736+
conversation_id: undefined,
737+
no_tools: true,
738+
query: "Hello with system prompt override with no_tools option",
739+
system_prompt: "MY SYSTEM PROMPT WITH NO_TOOLS OPTION",
740+
}),
741+
expect.anything(),
742+
);
743+
});
744+
712745
test("Chat streaming test", async () => {
713746
let ghIssueLinkSpy = 0;
714747
let ghIssueUrl = "";

aap_chatbot/src/SystemPromptModal/SystemPromptModal.tsx

Lines changed: 15 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
import React from "react";
22
import {
33
Button,
4+
Checkbox,
45
Form,
56
FormGroup,
67
Modal,
@@ -15,13 +16,15 @@ import WrenchIcon from "@patternfly/react-icons/dist/esm/icons/wrench-icon";
1516
interface SystemPromptModalProps {
1617
systemPrompt: string;
1718
setSystemPrompt: (s: string) => void;
19+
bypassTools: boolean;
20+
setBypassTools: (b: boolean) => void;
1821
}
1922

2023
export const SystemPromptModal: React.FunctionComponent<
2124
SystemPromptModalProps
2225
> = (props) => {
2326
const [isModalOpen, setModalOpen] = React.useState(false);
24-
const { systemPrompt, setSystemPrompt } = props;
27+
const { systemPrompt, setSystemPrompt, bypassTools, setBypassTools } = props;
2528

2629
const handleModalToggle = (_event: KeyboardEvent | React.MouseEvent) => {
2730
setModalOpen(!isModalOpen);
@@ -31,6 +34,10 @@ export const SystemPromptModal: React.FunctionComponent<
3134
setSystemPrompt(value);
3235
};
3336

37+
const handleBypassToolsChange = (_event: any, value: boolean) => {
38+
setBypassTools(value);
39+
};
40+
3441
return (
3542
<React.Fragment>
3643
<Button
@@ -64,6 +71,13 @@ export const SystemPromptModal: React.FunctionComponent<
6471
aria-label="system-prompt-form-text-area"
6572
rows={15}
6673
/>
74+
<Checkbox
75+
id="bypass-tools"
76+
label="Bypass Tools"
77+
isChecked={bypassTools}
78+
aria-label="bypass-tools-checkbox"
79+
onChange={handleBypassToolsChange}
80+
></Checkbox>
6781
</FormGroup>
6882
</Form>
6983
</ModalBody>

aap_chatbot/src/types/Message.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@ type LLMRequest = {
1010
attachments?: object[] | null;
1111
system_prompt?: string | null;
1212
media_type?: "text/plain" | "application/json";
13+
no_tools?: boolean | null;
1314
};
1415

1516
type LLMResponse = {

aap_chatbot/src/useChatbot/useChatbot.ts

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,9 @@ const botName =
3333
ANSIBLE_LIGHTSPEED_PRODUCT_NAME;
3434

3535
export const modelsSupported: LLMModel[] = [
36-
{ model: "granite-3.3-8b-instruct", provider: "rhoai" },
36+
{ model: "granite-3.3-8b-instruct", provider: "my_rhoai_dev" },
37+
{ model: "gemini/gemini-2.5-flash", provider: "gemini" },
38+
{ model: "gemini/gemini-2.5-pro", provider: "gemini" },
3739
];
3840

3941
export const readCookie = (name: string): string | null => {
@@ -182,6 +184,7 @@ export const useChatbot = () => {
182184
const [systemPrompt, setSystemPrompt] = useState(QUERY_SYSTEM_INSTRUCTION);
183185
const [hasStopButton, setHasStopButton] = useState<boolean>(false);
184186
const [abortController, setAbortController] = useState(new AbortController());
187+
const [bypassTools, setBypassTools] = useState<boolean>(false);
185188

186189
const [stream, setStream] = useState(false);
187190
useEffect(() => {
@@ -465,6 +468,9 @@ export const useChatbot = () => {
465468
if (systemPrompt !== QUERY_SYSTEM_INSTRUCTION) {
466469
chatRequest.system_prompt = systemPrompt;
467470
}
471+
if (bypassTools) {
472+
chatRequest.no_tools = true;
473+
}
468474

469475
if (inDebugMode()) {
470476
for (const m of modelsSupported) {
@@ -656,5 +662,7 @@ export const useChatbot = () => {
656662
hasStopButton,
657663
handleStopButton,
658664
isStreamingSupported,
665+
bypassTools,
666+
setBypassTools,
659667
};
660668
};

ansible_ai_connect/ai/api/model_pipelines/http/pipelines.py

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -179,6 +179,7 @@ def invoke(self, params: ChatBotParameters) -> ChatBotResponse:
179179
provider = params.provider
180180
model_id = params.model_id
181181
system_prompt = params.system_prompt or settings.CHATBOT_DEFAULT_SYSTEM_PROMPT
182+
no_tools = params.no_tools
182183

183184
data = {
184185
"query": query,
@@ -189,6 +190,8 @@ def invoke(self, params: ChatBotParameters) -> ChatBotResponse:
189190
data["conversation_id"] = str(conversation_id)
190191
if system_prompt:
191192
data["system_prompt"] = str(system_prompt)
193+
if no_tools:
194+
data["no_tools"] = bool(no_tools)
192195

193196
headers = self.headers or {}
194197
if params.mcp_headers:
@@ -281,6 +284,7 @@ async def async_invoke(self, params: StreamingChatBotParameters) -> AsyncGenerat
281284
model_id = params.model_id
282285
system_prompt = params.system_prompt or settings.CHATBOT_DEFAULT_SYSTEM_PROMPT
283286
media_type = params.media_type
287+
no_tools = params.no_tools
284288

285289
data = {
286290
"query": query,
@@ -293,6 +297,8 @@ async def async_invoke(self, params: StreamingChatBotParameters) -> AsyncGenerat
293297
data["system_prompt"] = str(system_prompt)
294298
if media_type:
295299
data["media_type"] = str(media_type)
300+
if no_tools:
301+
data["no_tools"] = bool(no_tools)
296302

297303
async with session.post(
298304
self.config.inference_url + "/v1/streaming_query",
@@ -315,6 +321,7 @@ async def async_invoke(self, params: StreamingChatBotParameters) -> AsyncGenerat
315321
ev.provider_id = params.provider
316322
ev.conversation_id = params.conversation_id
317323
ev.modelName = params.model_id
324+
ev.no_tools = params.no_tools
318325

319326
async for chunk in response.content:
320327
try:

ansible_ai_connect/ai/api/model_pipelines/http/tests/test_pipelines.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -147,6 +147,7 @@ def get_params(self) -> StreamingChatBotParameters:
147147
conversation_id=None,
148148
system_prompt="You are a helpful assistant",
149149
media_type="application/json",
150+
no_tools=False,
150151
)
151152

152153
def send_event(self, ev):

ansible_ai_connect/ai/api/model_pipelines/llamastack/tests/test_pipelines.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -344,6 +344,7 @@ def get_params(self) -> StreamingChatBotParameters:
344344
conversation_id=None,
345345
system_prompt="",
346346
media_type="application/json",
347+
no_tools=False,
347348
)
348349

349350
@patch("llama_stack_client.lib.agents.agent.AsyncAgent.create_session")

ansible_ai_connect/ai/api/model_pipelines/pipelines.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -232,6 +232,7 @@ class ChatBotParameters:
232232
conversation_id: Optional[str]
233233
system_prompt: str
234234
mcp_headers: Optional[dict[str, dict[str, str]]] = field(kw_only=True, default=None)
235+
no_tools: bool
235236

236237
@classmethod
237238
def init(
@@ -242,6 +243,7 @@ def init(
242243
conversation_id: Optional[str] = None,
243244
system_prompt: Optional[str] = None,
244245
mcp_headers: Optional[dict[str, dict[str, str]]] = None,
246+
no_tools: Optional[bool] = False,
245247
):
246248
return cls(
247249
query=query,
@@ -250,6 +252,7 @@ def init(
250252
conversation_id=conversation_id,
251253
system_prompt=system_prompt,
252254
mcp_headers=mcp_headers,
255+
no_tools=no_tools,
253256
)
254257

255258

@@ -270,6 +273,7 @@ def init(
270273
system_prompt: Optional[str] = None,
271274
media_type: Optional[str] = None,
272275
mcp_headers: Optional[dict[str, dict[str, str]]] = None,
276+
no_tools: Optional[bool] = False,
273277
):
274278
return cls(
275279
query=query,
@@ -279,6 +283,7 @@ def init(
279283
system_prompt=system_prompt,
280284
media_type=media_type,
281285
mcp_headers=mcp_headers,
286+
no_tools=no_tools,
282287
)
283288

284289

ansible_ai_connect/ai/api/serializers.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -345,6 +345,11 @@ class ChatRequestSerializer(serializers.Serializer):
345345
label="System prompt",
346346
help_text=("An optional non-default system prompt to be used on LLM (debug mode only)."),
347347
)
348+
no_tools = serializers.BooleanField(
349+
required=False,
350+
label="Bypass tools",
351+
help_text=("Whether to bypass all tools and MCP servers"),
352+
)
348353

349354

350355
class StreamingChatRequestSerializer(ChatRequestSerializer):

0 commit comments

Comments
 (0)