Skip to content

Commit 6d7150f

Browse files
authored
chore(participant) Wire up telemetry for user prompts VSCODE-606 (#836)
* Wire up telemetry for user prompts * Remove .only * Clean up rebase artifacts * Remove .only * Address CR feedback * history_length -> history_size
1 parent 60021d6 commit 6d7150f

File tree

9 files changed

+330
-60
lines changed

9 files changed

+330
-60
lines changed

src/participant/participant.ts

Lines changed: 26 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -39,6 +39,7 @@ import {
3939
} from '../telemetry/telemetryService';
4040
import { DocsChatbotAIService } from './docsChatbotAIService';
4141
import type TelemetryService from '../telemetry/telemetryService';
42+
import type { ModelInput } from './prompts/promptBase';
4243
import { processStreamWithIdentifiers } from './streamParsing';
4344
import type { PromptIntent } from './prompts/intent';
4445

@@ -164,10 +165,10 @@ export default class ParticipantController {
164165
}
165166

166167
async _getChatResponse({
167-
messages,
168+
modelInput,
168169
token,
169170
}: {
170-
messages: vscode.LanguageModelChatMessage[];
171+
modelInput: ModelInput;
171172
token: vscode.CancellationToken;
172173
}): Promise<vscode.LanguageModelChatResponse> {
173174
const model = await getCopilotModel();
@@ -176,20 +177,22 @@ export default class ParticipantController {
176177
throw new Error('Copilot model not found');
177178
}
178179

179-
return await model.sendRequest(messages, {}, token);
180+
this._telemetryService.trackCopilotParticipantPrompt(modelInput.stats);
181+
182+
return await model.sendRequest(modelInput.messages, {}, token);
180183
}
181184

182185
async streamChatResponse({
183-
messages,
186+
modelInput,
184187
stream,
185188
token,
186189
}: {
187-
messages: vscode.LanguageModelChatMessage[];
190+
modelInput: ModelInput;
188191
stream: vscode.ChatResponseStream;
189192
token: vscode.CancellationToken;
190193
}): Promise<void> {
191194
const chatResponse = await this._getChatResponse({
192-
messages,
195+
modelInput,
193196
token,
194197
});
195198
for await (const fragment of chatResponse.text) {
@@ -226,16 +229,16 @@ export default class ParticipantController {
226229
}
227230

228231
async streamChatResponseContentWithCodeActions({
229-
messages,
232+
modelInput,
230233
stream,
231234
token,
232235
}: {
233-
messages: vscode.LanguageModelChatMessage[];
236+
modelInput: ModelInput;
234237
stream: vscode.ChatResponseStream;
235238
token: vscode.CancellationToken;
236239
}): Promise<void> {
237240
const chatResponse = await this._getChatResponse({
238-
messages,
241+
modelInput,
239242
token,
240243
});
241244

@@ -254,15 +257,15 @@ export default class ParticipantController {
254257
// This will stream all of the response content and create a string from it.
255258
// It should only be used when the entire response is needed at one time.
256259
async getChatResponseContent({
257-
messages,
260+
modelInput,
258261
token,
259262
}: {
260-
messages: vscode.LanguageModelChatMessage[];
263+
modelInput: ModelInput;
261264
token: vscode.CancellationToken;
262265
}): Promise<string> {
263266
let responseContent = '';
264267
const chatResponse = await this._getChatResponse({
265-
messages,
268+
modelInput,
266269
token,
267270
});
268271
for await (const fragment of chatResponse.text) {
@@ -278,14 +281,14 @@ export default class ParticipantController {
278281
stream: vscode.ChatResponseStream,
279282
token: vscode.CancellationToken
280283
): Promise<ChatResult> {
281-
const messages = await Prompts.generic.buildMessages({
284+
const modelInput = await Prompts.generic.buildMessages({
282285
request,
283286
context,
284287
connectionNames: this._getConnectionNames(),
285288
});
286289

287290
await this.streamChatResponseContentWithCodeActions({
288-
messages,
291+
modelInput,
289292
token,
290293
stream,
291294
});
@@ -334,14 +337,14 @@ export default class ParticipantController {
334337
request: vscode.ChatRequest;
335338
token: vscode.CancellationToken;
336339
}): Promise<PromptIntent> {
337-
const messages = await Prompts.intent.buildMessages({
340+
const modelInput = await Prompts.intent.buildMessages({
338341
connectionNames: this._getConnectionNames(),
339342
request,
340343
context,
341344
});
342345

343346
const responseContent = await this.getChatResponseContent({
344-
messages,
347+
modelInput,
345348
token,
346349
});
347350

@@ -708,7 +711,7 @@ export default class ParticipantController {
708711
connectionNames: this._getConnectionNames(),
709712
});
710713
const responseContentWithNamespace = await this.getChatResponseContent({
711-
messages: messagesWithNamespace,
714+
modelInput: messagesWithNamespace,
712715
token,
713716
});
714717
const { databaseName, collectionName } =
@@ -1043,7 +1046,7 @@ export default class ParticipantController {
10431046
return schemaRequestChatResult(context.history);
10441047
}
10451048

1046-
const messages = await Prompts.schema.buildMessages({
1049+
const modelInput = await Prompts.schema.buildMessages({
10471050
request,
10481051
context,
10491052
databaseName,
@@ -1054,7 +1057,7 @@ export default class ParticipantController {
10541057
...(sampleDocuments ? { sampleDocuments } : {}),
10551058
});
10561059
await this.streamChatResponse({
1057-
messages,
1060+
modelInput,
10581061
stream,
10591062
token,
10601063
});
@@ -1147,7 +1150,7 @@ export default class ParticipantController {
11471150
);
11481151
}
11491152

1150-
const messages = await Prompts.query.buildMessages({
1153+
const modelInput = await Prompts.query.buildMessages({
11511154
request,
11521155
context,
11531156
databaseName,
@@ -1158,7 +1161,7 @@ export default class ParticipantController {
11581161
});
11591162

11601163
await this.streamChatResponseContentWithCodeActions({
1161-
messages,
1164+
modelInput,
11621165
stream,
11631166
token,
11641167
});
@@ -1230,14 +1233,14 @@ export default class ParticipantController {
12301233
]
12311234
): Promise<void> {
12321235
const [request, context, stream, token] = args;
1233-
const messages = await Prompts.generic.buildMessages({
1236+
const modelInput = await Prompts.generic.buildMessages({
12341237
request,
12351238
context,
12361239
connectionNames: this._getConnectionNames(),
12371240
});
12381241

12391242
await this.streamChatResponseContentWithCodeActions({
1240-
messages,
1243+
modelInput,
12411244
stream,
12421245
token,
12431246
});

src/participant/prompts/intent.ts

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
import type { InternalPromptPurpose } from '../../telemetry/telemetryService';
12
import type { PromptArgsBase } from './promptBase';
23
import { PromptBase } from './promptBase';
34

@@ -47,4 +48,8 @@ Docs`;
4748
return 'Default';
4849
}
4950
}
51+
52+
protected get internalPurposeForTelemetry(): InternalPromptPurpose {
53+
return 'intent';
54+
}
5055
}

src/participant/prompts/namespace.ts

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
import type { InternalPromptPurpose } from '../../telemetry/telemetryService';
12
import type { PromptArgsBase } from './promptBase';
23
import { PromptBase } from './promptBase';
34

@@ -50,4 +51,8 @@ No names found.
5051
const collectionName = text.match(COL_NAME_REGEX)?.[1].trim();
5152
return { databaseName, collectionName };
5253
}
54+
55+
protected get internalPurposeForTelemetry(): InternalPromptPurpose {
56+
return 'namespace';
57+
}
5358
}

src/participant/prompts/promptBase.ts

Lines changed: 50 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,9 @@
11
import * as vscode from 'vscode';
22
import type { ChatResult, ParticipantResponseType } from '../constants';
3+
import type {
4+
InternalPromptPurpose,
5+
ParticipantPromptProperties,
6+
} from '../../telemetry/telemetryService';
37

48
export interface PromptArgsBase {
59
request: {
@@ -10,14 +14,31 @@ export interface PromptArgsBase {
1014
connectionNames: string[];
1115
}
1216

17+
export interface UserPromptResponse {
18+
prompt: string;
19+
hasSampleDocs: boolean;
20+
}
21+
22+
export interface ModelInput {
23+
messages: vscode.LanguageModelChatMessage[];
24+
stats: ParticipantPromptProperties;
25+
}
26+
1327
export abstract class PromptBase<TArgs extends PromptArgsBase> {
1428
protected abstract getAssistantPrompt(args: TArgs): string;
1529

16-
protected getUserPrompt(args: TArgs): Promise<string> {
17-
return Promise.resolve(args.request.prompt);
30+
protected get internalPurposeForTelemetry(): InternalPromptPurpose {
31+
return undefined;
1832
}
1933

20-
async buildMessages(args: TArgs): Promise<vscode.LanguageModelChatMessage[]> {
34+
protected getUserPrompt(args: TArgs): Promise<UserPromptResponse> {
35+
return Promise.resolve({
36+
prompt: args.request.prompt,
37+
hasSampleDocs: false,
38+
});
39+
}
40+
41+
async buildMessages(args: TArgs): Promise<ModelInput> {
2142
let historyMessages = this.getHistoryMessages(args);
2243
// If the current user's prompt is a connection name, and the last
2344
// message was to connect. We want to use the last
@@ -49,13 +70,37 @@ export abstract class PromptBase<TArgs extends PromptArgsBase> {
4970
}
5071
}
5172

52-
return [
73+
const { prompt, hasSampleDocs } = await this.getUserPrompt(args);
74+
const messages = [
5375
// eslint-disable-next-line new-cap
5476
vscode.LanguageModelChatMessage.Assistant(this.getAssistantPrompt(args)),
5577
...historyMessages,
5678
// eslint-disable-next-line new-cap
57-
vscode.LanguageModelChatMessage.User(await this.getUserPrompt(args)),
79+
vscode.LanguageModelChatMessage.User(prompt),
5880
];
81+
82+
return {
83+
messages,
84+
stats: this.getStats(messages, args, hasSampleDocs),
85+
};
86+
}
87+
88+
protected getStats(
89+
messages: vscode.LanguageModelChatMessage[],
90+
{ request, context }: TArgs,
91+
hasSampleDocs: boolean
92+
): ParticipantPromptProperties {
93+
return {
94+
total_message_length: messages.reduce(
95+
(acc, message) => acc + message.content.length,
96+
0
97+
),
98+
user_input_length: request.prompt.length,
99+
has_sample_documents: hasSampleDocs,
100+
command: request.command || 'generic',
101+
history_size: context.history.length,
102+
internal_purpose: this.internalPurposeForTelemetry,
103+
};
59104
}
60105

61106
// When passing the history to the model we only want contextual messages

src/participant/prompts/query.ts

Lines changed: 11 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -2,8 +2,8 @@ import * as vscode from 'vscode';
22
import type { Document } from 'bson';
33

44
import { getStringifiedSampleDocuments } from '../sampleDocuments';
5+
import type { PromptArgsBase, UserPromptResponse } from './promptBase';
56
import { codeBlockIdentifier } from '../constants';
6-
import type { PromptArgsBase } from './promptBase';
77
import { PromptBase } from './promptBase';
88

99
interface QueryPromptArgs extends PromptArgsBase {
@@ -59,21 +59,23 @@ db.getCollection('');\n`;
5959
request,
6060
schema,
6161
sampleDocuments,
62-
}: QueryPromptArgs): Promise<string> {
62+
}: QueryPromptArgs): Promise<UserPromptResponse> {
6363
let prompt = request.prompt;
6464
prompt += `\nDatabase name: ${databaseName}\n`;
6565
prompt += `Collection name: ${collectionName}\n`;
6666
if (schema) {
6767
prompt += `Collection schema: ${schema}\n`;
6868
}
69-
if (sampleDocuments) {
70-
prompt += await getStringifiedSampleDocuments({
71-
sampleDocuments,
72-
prompt,
73-
});
74-
}
7569

76-
return prompt;
70+
const sampleDocumentsPrompt = await getStringifiedSampleDocuments({
71+
sampleDocuments,
72+
prompt,
73+
});
74+
75+
return {
76+
prompt: `${prompt}${sampleDocumentsPrompt}`,
77+
hasSampleDocs: !!sampleDocumentsPrompt,
78+
};
7779
}
7880

7981
get emptyRequestResponse(): string {

src/participant/prompts/schema.ts

Lines changed: 9 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
import type { UserPromptResponse } from './promptBase';
12
import { PromptBase, type PromptArgsBase } from './promptBase';
23

34
export const DOCUMENTS_TO_SAMPLE_FOR_SCHEMA_PROMPT = 100;
@@ -11,7 +12,6 @@ export interface SchemaPromptArgs extends PromptArgsBase {
1112
collectionName: string;
1213
schema: string;
1314
amountOfDocumentsSampled: number;
14-
connectionNames: string[];
1515
}
1616

1717
export class SchemaPrompt extends PromptBase<SchemaPromptArgs> {
@@ -30,13 +30,16 @@ Amount of documents sampled: ${amountOfDocumentsSampled}.`;
3030
collectionName,
3131
request,
3232
schema,
33-
}: SchemaPromptArgs): Promise<string> {
33+
}: SchemaPromptArgs): Promise<UserPromptResponse> {
3434
const prompt = request.prompt;
35-
return Promise.resolve(`${
36-
prompt ? `The user provided additional information: "${prompt}"\n` : ''
37-
}Database name: ${databaseName}
35+
return Promise.resolve({
36+
prompt: `${
37+
prompt ? `The user provided additional information: "${prompt}"\n` : ''
38+
}Database name: ${databaseName}
3839
Collection name: ${collectionName}
3940
Schema:
40-
${schema}`);
41+
${schema}`,
42+
hasSampleDocs: false,
43+
});
4144
}
4245
}

0 commit comments

Comments
 (0)