Skip to content

Commit c62bfff

Browse files
authored
When rendering the prompts, exclude turns from the history that errored due to prompt filtration (#399)
* When rendering the prompts, exclude turns from the history that errored due to prompt filtration. This was resulting in users getting stuck in a state where all their subsequent requests were being filtered. * Differentiate between response-filtered and prompt-filtered results (response-filtered requests should be rendered in the message history whereas prompt-filtered requests should not)
1 parent ff93334 commit c62bfff

File tree

9 files changed

+41
-5
lines changed

9 files changed

+41
-5
lines changed

src/extension/intents/node/toolCallingLoop.ts

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -131,11 +131,13 @@ export abstract class ToolCallingLoop<TOptions extends IToolCallingLoopOptions =
131131
const query = isContinuation ?
132132
'Please continue' :
133133
this.turn.request.message;
134+
// exclude turns from the history that errored due to prompt filtration
135+
const history = this.options.conversation.turns.slice(0, -1).filter(turn => turn.responseStatus !== TurnStatus.PromptFiltered);
134136

135137
return {
136138
requestId: this.turn.id,
137139
query,
138-
history: this.options.conversation.turns.slice(0, -1),
140+
history,
139141
toolCallResults: this.toolCallResults,
140142
toolCallRounds: this.toolCallRounds,
141143
editedFileEvents: this.options.request.editedFileEvents,

src/extension/prompt/common/conversation.ts

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@
55

66
import { PromptReference, Raw } from '@vscode/prompt-tsx';
77
import type { ChatRequestEditedFileEvent, ChatResponseStream, ChatResult, LanguageModelToolResult } from 'vscode';
8+
import { FilterReason } from '../../../platform/networking/common/openai';
89
import { isLocation, toLocation } from '../../../util/common/types';
910
import { ResourceMap } from '../../../util/vs/base/common/map';
1011
import { assertType } from '../../../util/vs/base/common/types';
@@ -22,6 +23,7 @@ export enum TurnStatus {
2223
Cancelled = 'cancelled',
2324
OffTopic = 'off-topic',
2425
Filtered = 'filtered',
26+
PromptFiltered = 'prompt-filtered',
2527
Error = 'error',
2628
}
2729

@@ -315,6 +317,7 @@ export interface IResultMetadata {
315317
renderedUserMessage?: Raw.ChatCompletionContentPart[];
316318
renderedGlobalContext?: Raw.ChatCompletionContentPart[];
317319
command?: string;
320+
filterCategory?: FilterReason;
318321

319322
/**
320323
* All code blocks that were in the response

src/extension/prompt/node/chatMLFetcher.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -499,7 +499,7 @@ export class ChatMLFetcherImpl extends AbstractChatMLFetcher {
499499
return { type: ChatFetchResponseType.Failed, reason, requestId, serverRequestId };
500500
}
501501
if (response.failKind === ChatFailKind.ContentFilter) {
502-
return { type: ChatFetchResponseType.Filtered, reason, category: FilterReason.Prompt, requestId, serverRequestId };
502+
return { type: ChatFetchResponseType.PromptFiltered, reason, category: FilterReason.Prompt, requestId, serverRequestId };
503503
}
504504
if (response.failKind === ChatFailKind.AgentUnauthorized) {
505505
return { type: ChatFetchResponseType.AgentUnauthorized, reason, authorizationUrl: response.data!.authorize_url, requestId, serverRequestId };

src/extension/prompt/node/chatParticipantRequestHandler.ts

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,7 @@ import { CanceledMessage, ChatLocation } from '../../../platform/chat/common/com
1111
import { IEndpointProvider } from '../../../platform/endpoint/common/endpointProvider';
1212
import { IIgnoreService } from '../../../platform/ignore/common/ignoreService';
1313
import { ILogService } from '../../../platform/log/common/logService';
14+
import { FilterReason } from '../../../platform/networking/common/openai';
1415
import { ITabsAndEditorsService } from '../../../platform/tabs/common/tabsAndEditorsService';
1516
import { getWorkspaceFileDisplayPath, IWorkspaceService } from '../../../platform/workspace/common/workspaceService';
1617
import { ChatResponseStreamImpl } from '../../../util/common/chatResponseStreamImpl';
@@ -413,7 +414,11 @@ function createTurnFromVSCodeChatHistoryTurns(
413414
if (!chatResponseTurn.result.errorDetails) {
414415
status = TurnStatus.Success;
415416
} else if (chatResponseTurn.result.errorDetails?.responseIsFiltered) {
416-
status = TurnStatus.Filtered;
417+
if (chatResponseTurn.result.metadata?.category === FilterReason.Prompt) {
418+
status = TurnStatus.PromptFiltered;
419+
} else {
420+
status = TurnStatus.Filtered;
421+
}
417422
} else if (chatResponseTurn.result.errorDetails.message === 'Cancelled' || chatResponseTurn.result.errorDetails.message === CanceledMessage.message) {
418423
status = TurnStatus.Cancelled;
419424
} else {

src/extension/prompt/node/defaultIntentRequestHandler.ts

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@ import { IEndpointProvider } from '../../../platform/endpoint/common/endpointPro
1616
import { HAS_IGNORED_FILES_MESSAGE } from '../../../platform/ignore/common/ignoreService';
1717
import { ILogService } from '../../../platform/log/common/logService';
1818
import { FinishedCallback, IResponseDelta, OptionalChatRequestParams } from '../../../platform/networking/common/fetch';
19+
import { FilterReason } from '../../../platform/networking/common/openai';
1920
import { IRequestLogger } from '../../../platform/requestLogger/node/requestLogger';
2021
import { ISurveyService } from '../../../platform/survey/common/surveyService';
2122
import { IExperimentationService } from '../../../platform/telemetry/common/nullExperimentationService';
@@ -432,10 +433,16 @@ export class DefaultIntentRequestHandler {
432433
}
433434
case ChatFetchResponseType.Filtered: {
434435
const errorDetails = getErrorDetailsFromChatFetchError(fetchResult, (await this._authenticationService.getCopilotToken()).copilotPlan);
435-
const chatResult = { errorDetails, metadata: metadataFragment };
436+
const chatResult = { errorDetails, metadata: { ...metadataFragment, filterReason: fetchResult.category } };
436437
this.turn.setResponse(TurnStatus.Filtered, undefined, baseModelTelemetry.properties.messageId, chatResult);
437438
return chatResult;
438439
}
440+
case ChatFetchResponseType.PromptFiltered: {
441+
const errorDetails = getErrorDetailsFromChatFetchError(fetchResult, (await this._authenticationService.getCopilotToken()).copilotPlan);
442+
const chatResult = { errorDetails, metadata: { ...metadataFragment, filterReason: FilterReason.Prompt } };
443+
this.turn.setResponse(TurnStatus.PromptFiltered, undefined, baseModelTelemetry.properties.messageId, chatResult);
444+
return chatResult;
445+
}
439446
case ChatFetchResponseType.AgentUnauthorized: {
440447
const chatResult = {};
441448
this.turn.setResponse(TurnStatus.Error, undefined, baseModelTelemetry.properties.messageId, chatResult);

src/extension/prompts/node/panel/conversationHistory.tsx

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -72,7 +72,8 @@ export class HistoryWithInstructions extends PromptElement<Omit<ConversationHist
7272
*/
7373
export class ConversationHistory extends PromptElement<ConversationHistoryProps> {
7474
override render(_state: void, _sizing: PromptSizing): PromptPiece<any, any> | undefined {
75-
let turnHistory = this.props.history;
75+
// exclude turns from the history that errored due to prompt filtration
76+
let turnHistory = this.props.history.filter(turn => turn.responseStatus !== TurnStatus.PromptFiltered);
7677

7778
if (this.props.inline && turnHistory.length > 0) {
7879
const historyMessage = `The current code is a result of a previous interaction with you. Here are my previous messages: \n- ${turnHistory.map(r => r.request.message).join('\n- ')}`;

src/extension/xtab/node/xtabProvider.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -774,6 +774,7 @@ export class XtabProvider extends ChainedStatelessNextEditProvider {
774774
return new NoNextEditReason.GotCancelled('afterFetchCall');
775775
case ChatFetchResponseType.OffTopic:
776776
case ChatFetchResponseType.Filtered:
777+
case ChatFetchResponseType.PromptFiltered:
777778
case ChatFetchResponseType.Length:
778779
case ChatFetchResponseType.RateLimited:
779780
case ChatFetchResponseType.QuotaExceeded:

src/platform/chat/common/commonTypes.ts

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -87,6 +87,7 @@ export enum ChatFetchResponseType {
8787
OffTopic = 'offTopic',
8888
Canceled = 'canceled',
8989
Filtered = 'filtered',
90+
PromptFiltered = 'promptFiltered',
9091
Length = 'length',
9192
RateLimited = 'rateLimited',
9293
QuotaExceeded = 'quotaExceeded',
@@ -123,6 +124,10 @@ export type ChatFetchError =
123124
* We requested conversation, but the response was filtered by RAI.
124125
*/
125126
| { type: ChatFetchResponseType.Filtered; reason: string; category: FilterReason; requestId: string; serverRequestId: string | undefined }
127+
/**
128+
* We requested conversation, but the prompt was filtered by RAI.
129+
*/
130+
| { type: ChatFetchResponseType.PromptFiltered; reason: string; category: FilterReason; requestId: string; serverRequestId: string | undefined }
126131
/**
127132
* We requested conversation, but the response was too long.
128133
*/
@@ -257,6 +262,7 @@ export function getErrorDetailsFromChatFetchError(fetchResult: ChatFetchError, c
257262
case ChatFetchResponseType.Failed:
258263
return { message: l10n.t(`Sorry, your request failed. Please try again. Request id: {0}\n\nReason: {1}`, fetchResult.requestId, fetchResult.reason) };
259264
case ChatFetchResponseType.Filtered:
265+
case ChatFetchResponseType.PromptFiltered:
260266
return {
261267
message: getFilteredMessage(fetchResult.category),
262268
responseIsFiltered: true,
@@ -289,6 +295,16 @@ export function getFilteredMessage(category: FilterReason, supportsMarkdown: boo
289295
} else {
290296
return l10n.t(`Sorry, the response matched public code so it was blocked. Please rephrase your prompt.`);
291297
}
298+
case FilterReason.Prompt:
299+
if (supportsMarkdown) {
300+
return l10n.t({
301+
message:
302+
`Sorry, your prompt was filtered by the Responsible AI Service. Please rephrase your prompt and try again. [Learn more](https://aka.ms/copilot-chat-filtered-docs).`,
303+
comment: ["{Locked='](https://aka.ms/copilot-chat-filtered-docs)'}"]
304+
});
305+
} else {
306+
return l10n.t(`Sorry, your prompt was filtered by the Responsible AI Service. Please rephrase your prompt and try again.`);
307+
}
292308
default:
293309
if (supportsMarkdown) {
294310
return l10n.t({

test/base/cachingChatMLFetcher.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -239,6 +239,7 @@ export class CachingChatMLFetcher extends AbstractChatMLFetcher implements IDisp
239239
if (
240240
result.type === ChatFetchResponseType.OffTopic
241241
|| result.type === ChatFetchResponseType.Filtered
242+
|| result.type === ChatFetchResponseType.PromptFiltered
242243
|| result.type === ChatFetchResponseType.Length
243244
|| result.type === ChatFetchResponseType.Success
244245
) {

0 commit comments

Comments
 (0)