Skip to content

Commit 40ec258

Browse files
jackfranklinDevtools-frontend LUCI CQ
authored andcommitted
AI: manually strip 5 backticks from LLM response in Insights agent
See bug for context, we sometimes see the LLM wrap its response in 5 backticks for no apparent reason. We're going to sync with the Aida folks to see if we can understand why or fix this on their side, but for now a fix is also to strip the 5 backticks if they are found in the response. Bug: 405054694 Change-Id: Ieb7499af142f7f7cba9ffea999729f49e1fbe536 Reviewed-on: https://chromium-review.googlesource.com/c/devtools/devtools-frontend/+/6375525 Auto-Submit: Jack Franklin <[email protected]> Reviewed-by: Ergün Erdoğmuş <[email protected]> Commit-Queue: Ergün Erdoğmuş <[email protected]>
1 parent 95461d3 commit 40ec258

File tree

2 files changed

+64
-1
lines changed

2 files changed

+64
-1
lines changed

front_end/models/ai_assistance/agents/PerformanceInsightsAgent.test.ts

Lines changed: 45 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -37,6 +37,51 @@ describeWithEnvironment('PerformanceInsightsAgent', () => {
3737
assert.strictEqual(context.getTitle(), 'Insight: LCP by phase');
3838
});
3939

40+
// See b/405054694 for context on why we do this.
41+
describe('parsing text responses', () => {
42+
it('strips out 5 backticks if the response has them', async () => {
43+
const agent = new PerformanceInsightsAgent({aidaClient: mockAidaClient()});
44+
const response = agent.parseTextResponse('`````hello world`````');
45+
assert.deepEqual(response, {answer: 'hello world'});
46+
});
47+
48+
it('strips any newlines before the backticks', async () => {
49+
const agent = new PerformanceInsightsAgent({aidaClient: mockAidaClient()});
50+
const response = agent.parseTextResponse('\n\n`````hello world`````');
51+
assert.deepEqual(response, {answer: 'hello world'});
52+
});
53+
54+
it('does not strip the backticks if the response does not fully start and end with them', async () => {
55+
const agent = new PerformanceInsightsAgent({aidaClient: mockAidaClient()});
56+
const response = agent.parseTextResponse('answer: `````hello world`````');
57+
assert.deepEqual(response, {answer: 'answer: `````hello world`````'});
58+
});
59+
60+
it('does not strip the backticks in the middle of the response even if the response is also wrapped', async () => {
61+
const agent = new PerformanceInsightsAgent({aidaClient: mockAidaClient()});
62+
const response = agent.parseTextResponse('`````hello ````` world`````');
63+
assert.deepEqual(response, {answer: 'hello ````` world'});
64+
});
65+
66+
it('does not strip out inline code backticks', async () => {
67+
const agent = new PerformanceInsightsAgent({aidaClient: mockAidaClient()});
68+
const response = agent.parseTextResponse('This is code `console.log("hello")`');
69+
assert.deepEqual(response, {answer: 'This is code `console.log("hello")`'});
70+
});
71+
72+
it('does not strip out code block 3 backticks', async () => {
73+
const agent = new PerformanceInsightsAgent({aidaClient: mockAidaClient()});
74+
const response = agent.parseTextResponse(`\`\`\`
75+
code
76+
\`\`\``);
77+
assert.deepEqual(response, {
78+
answer: `\`\`\`
79+
code
80+
\`\`\``
81+
});
82+
});
83+
});
84+
4085
describe('handleContextDetails', () => {
4186
it('outputs the right context for the initial query from the user', async () => {
4287
const mockInsight = new TimelineUtils.InsightAIContext.ActiveInsight(FAKE_LCP_MODEL, FAKE_PARSED_TRACE);

front_end/models/ai_assistance/agents/PerformanceInsightsAgent.ts

Lines changed: 19 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,9 +17,10 @@ import {
1717
type ContextDetail,
1818
type ContextResponse,
1919
ConversationContext,
20+
type ParsedResponse,
2021
type RequestOptions,
2122
type ResponseData,
22-
ResponseType,
23+
ResponseType
2324
} from './AiAgent.js';
2425

2526
const UIStringsNotTranslated = {
@@ -266,6 +267,23 @@ The fields are:
266267
});
267268
}
268269

270+
override parseTextResponse(response: string): ParsedResponse {
271+
/**
272+
* Sometimes the LLM responds with code chunks that wrap a text based markdown response.
273+
* If this happens, we want to remove those before continuing.
274+
* See b/405054694 for more details.
275+
*/
276+
const trimmed = response.trim();
277+
const FIVE_BACKTICKS = '`````';
278+
if (trimmed.startsWith(FIVE_BACKTICKS) && trimmed.endsWith(FIVE_BACKTICKS)) {
279+
// Purposefully use the trimmed text here; we might as well remove any
280+
// newlines that are at the very start or end.
281+
const stripped = trimmed.slice(FIVE_BACKTICKS.length, -FIVE_BACKTICKS.length);
282+
return super.parseTextResponse(stripped);
283+
}
284+
return super.parseTextResponse(response);
285+
}
286+
269287
override async enhanceQuery(
270288
query: string,
271289
selectedInsight: ConversationContext<TimelineUtils.InsightAIContext.ActiveInsight>|null): Promise<string> {

0 commit comments

Comments
 (0)