Skip to content

Commit 315a8d1

Browse files
authored
Merge branch 'main' into tyriar/autoapprove_merge__regex_flags
2 parents 129ab8a + cc1d86c commit 315a8d1

File tree

21 files changed

+717
-525
lines changed

21 files changed

+717
-525
lines changed

extensions/vscode-api-tests/src/singlefolder-tests/chat.test.ts

Lines changed: 15 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -15,22 +15,25 @@ suite('chat', () => {
1515
disposables = [];
1616

1717
// Register a dummy default model which is required for a participant request to go through
18-
disposables.push(lm.registerChatModelProvider('test-lm', {
19-
async provideLanguageModelResponse(_messages, _options, _extensionId, _progress, _token) {
18+
disposables.push(lm.registerChatModelProvider('test-lm-vendor', {
19+
async prepareLanguageModelChat(_options, _token) {
20+
return [{
21+
id: 'test-lm',
22+
name: 'test-lm',
23+
family: 'test',
24+
version: '1.0.0',
25+
maxInputTokens: 100,
26+
maxOutputTokens: 100,
27+
isDefault: true,
28+
isUserSelectable: true
29+
}];
30+
},
31+
async provideLanguageModelChatResponse(_model, _messages, _options, _progress, _token) {
2032
return undefined;
2133
},
22-
async provideTokenCount(_text, _token) {
34+
async provideTokenCount(_model, _text, _token) {
2335
return 1;
2436
},
25-
}, {
26-
name: 'test-lm',
27-
version: '1.0.0',
28-
family: 'test',
29-
vendor: 'test-lm-vendor',
30-
maxInputTokens: 100,
31-
maxOutputTokens: 100,
32-
isDefault: true,
33-
isUserSelectable: true
3437
}));
3538
});
3639

extensions/vscode-api-tests/src/singlefolder-tests/lm.test.ts

Lines changed: 47 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -13,11 +13,11 @@ suite('lm', function () {
1313

1414
let disposables: vscode.Disposable[] = [];
1515

16-
const testProviderOptions: vscode.ChatResponseProviderMetadata = {
16+
const testProviderOptions: vscode.LanguageModelChatInformation = {
17+
id: 'test-lm',
1718
name: 'test-lm',
1819
version: '1.0.0',
1920
family: 'test',
20-
vendor: 'test-lm-vendor',
2121
maxInputTokens: 100,
2222
maxOutputTokens: 100,
2323
};
@@ -38,15 +38,23 @@ suite('lm', function () {
3838
let p: vscode.Progress<vscode.ChatResponseFragment2> | undefined;
3939
const defer = new DeferredPromise<void>();
4040

41-
disposables.push(vscode.lm.registerChatModelProvider('test-lm', {
42-
async provideLanguageModelResponse(_messages, _options, _extensionId, progress, _token) {
43-
p = progress;
44-
return defer.p;
45-
},
46-
async provideTokenCount(_text, _token) {
47-
return 1;
48-
},
49-
}, testProviderOptions));
41+
try {
42+
disposables.push(vscode.lm.registerChatModelProvider('test-lm-vendor', {
43+
async prepareLanguageModelChat(_options, _token) {
44+
return [testProviderOptions];
45+
},
46+
async provideLanguageModelChatResponse(_model, _messages, _options, progress, _token) {
47+
p = progress;
48+
return defer.p;
49+
},
50+
async provideTokenCount(_model, _text, _token) {
51+
return 1;
52+
},
53+
}));
54+
} catch (e) {
55+
assert.fail(`Failed to register chat model provider: ${e}`);
56+
}
57+
5058

5159
const models = await vscode.lm.selectChatModels({ id: 'test-lm' });
5260
assert.strictEqual(models.length, 1);
@@ -83,14 +91,17 @@ suite('lm', function () {
8391

8492
test('lm request fail', async function () {
8593

86-
disposables.push(vscode.lm.registerChatModelProvider('test-lm', {
87-
async provideLanguageModelResponse(_messages, _options, _extensionId, _progress, _token) {
94+
disposables.push(vscode.lm.registerChatModelProvider('test-lm-vendor', {
95+
async prepareLanguageModelChat(_options, _token) {
96+
return [testProviderOptions];
97+
},
98+
async provideLanguageModelChatResponse(_model, _messages, _options, _progress, _token) {
8899
throw new Error('BAD');
89100
},
90-
async provideTokenCount(_text, _token) {
101+
async provideTokenCount(_model, _text, _token) {
91102
return 1;
92103
},
93-
}, testProviderOptions));
104+
}));
94105

95106
const models = await vscode.lm.selectChatModels({ id: 'test-lm' });
96107
assert.strictEqual(models.length, 1);
@@ -107,14 +118,17 @@ suite('lm', function () {
107118

108119
const defer = new DeferredPromise<void>();
109120

110-
disposables.push(vscode.lm.registerChatModelProvider('test-lm', {
111-
async provideLanguageModelResponse(_messages, _options, _extensionId, _progress, _token) {
121+
disposables.push(vscode.lm.registerChatModelProvider('test-lm-vendor', {
122+
async prepareLanguageModelChat(_options, _token) {
123+
return [testProviderOptions];
124+
},
125+
async provideLanguageModelChatResponse(_model, _messages, _options, _progress, _token) {
112126
return defer.p;
113127
},
114-
async provideTokenCount(_text, _token) {
128+
async provideTokenCount(_model, _text, _token) {
115129
return 1;
116130
}
117-
}, testProviderOptions));
131+
}));
118132

119133
const models = await vscode.lm.selectChatModels({ id: 'test-lm' });
120134
assert.strictEqual(models.length, 1);
@@ -142,14 +156,17 @@ suite('lm', function () {
142156

143157
test('LanguageModelError instance is not thrown to extensions#235322 (SYNC)', async function () {
144158

145-
disposables.push(vscode.lm.registerChatModelProvider('test-lm', {
146-
provideLanguageModelResponse(_messages, _options, _extensionId, _progress, _token) {
159+
disposables.push(vscode.lm.registerChatModelProvider('test-lm-vendor', {
160+
async prepareLanguageModelChat(_options, _token) {
161+
return [testProviderOptions];
162+
},
163+
provideLanguageModelChatResponse(_model, _messages, _options, _progress, _token) {
147164
throw vscode.LanguageModelError.Blocked('You have been blocked SYNC');
148165
},
149-
async provideTokenCount(_text, _token) {
166+
async provideTokenCount(_model, _text, _token) {
150167
return 1;
151168
}
152-
}, testProviderOptions));
169+
}));
153170

154171
const models = await vscode.lm.selectChatModels({ id: 'test-lm' });
155172
assert.strictEqual(models.length, 1);
@@ -165,14 +182,17 @@ suite('lm', function () {
165182

166183
test('LanguageModelError instance is not thrown to extensions#235322 (ASYNC)', async function () {
167184

168-
disposables.push(vscode.lm.registerChatModelProvider('test-lm', {
169-
async provideLanguageModelResponse(_messages, _options, _extensionId, _progress, _token) {
185+
disposables.push(vscode.lm.registerChatModelProvider('test-lm-vendor', {
186+
async prepareLanguageModelChat(_options, _token) {
187+
return [testProviderOptions];
188+
},
189+
async provideLanguageModelChatResponse(_model, _messages, _options, _progress, _token) {
170190
throw vscode.LanguageModelError.Blocked('You have been blocked ASYNC');
171191
},
172-
async provideTokenCount(_text, _token) {
192+
async provideTokenCount(_model, _text, _token) {
173193
return 1;
174194
}
175-
}, testProviderOptions));
195+
}));
176196

177197
const models = await vscode.lm.selectChatModels({ id: 'test-lm' });
178198
assert.strictEqual(models.length, 1);

src/vs/platform/actions/common/actions.ts

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -235,7 +235,6 @@ export class MenuId {
235235
static readonly ChatExecuteSecondary = new MenuId('ChatExecuteSecondary');
236236
static readonly ChatInput = new MenuId('ChatInput');
237237
static readonly ChatInputSide = new MenuId('ChatInputSide');
238-
static readonly ChatModelPicker = new MenuId('ChatModelPicker');
239238
static readonly ChatModePicker = new MenuId('ChatModePicker');
240239
static readonly ChatEditingWidgetToolbar = new MenuId('ChatEditingWidgetToolbar');
241240
static readonly ChatEditingEditorContent = new MenuId('ChatEditingEditorContent');

src/vs/workbench/api/browser/mainThreadLanguageModels.ts

Lines changed: 31 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -16,8 +16,7 @@ import { ExtensionIdentifier } from '../../../platform/extensions/common/extensi
1616
import { ILogService } from '../../../platform/log/common/log.js';
1717
import { resizeImage } from '../../contrib/chat/browser/imageUtils.js';
1818
import { ILanguageModelIgnoredFilesService } from '../../contrib/chat/common/ignoredFiles.js';
19-
import { ILanguageModelStatsService } from '../../contrib/chat/common/languageModelStats.js';
20-
import { IChatMessage, IChatResponseFragment, ILanguageModelChatMetadata, ILanguageModelChatResponse, ILanguageModelChatSelector, ILanguageModelsService } from '../../contrib/chat/common/languageModels.js';
19+
import { IChatMessage, IChatResponseFragment, ILanguageModelChatResponse, ILanguageModelChatSelector, ILanguageModelsService } from '../../contrib/chat/common/languageModels.js';
2120
import { IAuthenticationAccessService } from '../../services/authentication/browser/authenticationAccessService.js';
2221
import { AuthenticationSession, AuthenticationSessionsChangeEvent, IAuthenticationProvider, IAuthenticationService, INTERNAL_AUTH_PROVIDER_PREFIX } from '../../services/authentication/common/authentication.js';
2322
import { IExtHostContext, extHostNamedCustomer } from '../../services/extensions/common/extHostCustomers.js';
@@ -31,36 +30,44 @@ export class MainThreadLanguageModels implements MainThreadLanguageModelsShape {
3130

3231
private readonly _proxy: ExtHostLanguageModelsShape;
3332
private readonly _store = new DisposableStore();
34-
private readonly _providerRegistrations = new DisposableMap<number>();
33+
private readonly _providerRegistrations = new DisposableMap<string>();
34+
private readonly _lmProviderChange = new Emitter<{ vendor: string }>();
3535
private readonly _pendingProgress = new Map<number, { defer: DeferredPromise<any>; stream: AsyncIterableSource<IChatResponseFragment | IChatResponseFragment[]> }>();
3636
private readonly _ignoredFileProviderRegistrations = new DisposableMap<number>();
3737

3838
constructor(
3939
extHostContext: IExtHostContext,
4040
@ILanguageModelsService private readonly _chatProviderService: ILanguageModelsService,
41-
@ILanguageModelStatsService private readonly _languageModelStatsService: ILanguageModelStatsService,
4241
@ILogService private readonly _logService: ILogService,
4342
@IAuthenticationService private readonly _authenticationService: IAuthenticationService,
4443
@IAuthenticationAccessService private readonly _authenticationAccessService: IAuthenticationAccessService,
4544
@IExtensionService private readonly _extensionService: IExtensionService,
4645
@ILanguageModelIgnoredFilesService private readonly _ignoredFilesService: ILanguageModelIgnoredFilesService,
4746
) {
4847
this._proxy = extHostContext.getProxy(ExtHostContext.ExtHostChatProvider);
49-
this._proxy.$acceptChatModelMetadata({ added: _chatProviderService.getLanguageModelIds().map(id => ({ identifier: id, metadata: _chatProviderService.lookupLanguageModel(id)! })) });
50-
this._store.add(_chatProviderService.onDidChangeLanguageModels(this._proxy.$acceptChatModelMetadata, this._proxy));
5148
}
5249

5350
dispose(): void {
51+
this._lmProviderChange.dispose();
5452
this._providerRegistrations.dispose();
5553
this._ignoredFileProviderRegistrations.dispose();
5654
this._store.dispose();
5755
}
5856

59-
$registerLanguageModelProvider(handle: number, identifier: string, metadata: ILanguageModelChatMetadata): void {
57+
$registerLanguageModelProvider(vendor: string): void {
6058
const dipsosables = new DisposableStore();
61-
dipsosables.add(this._chatProviderService.registerLanguageModelChat(identifier, {
62-
metadata,
63-
sendChatRequest: async (messages, from, options, token) => {
59+
dipsosables.add(this._chatProviderService.registerLanguageModelProvider(vendor, {
60+
onDidChange: Event.filter(this._lmProviderChange.event, e => e.vendor === vendor, dipsosables) as unknown as Event<void>,
61+
prepareLanguageModelChat: async (options, token) => {
62+
const modelsAndIdentifiers = await this._proxy.$prepareLanguageModelProvider(vendor, options, token);
63+
modelsAndIdentifiers.forEach(m => {
64+
if (m.metadata.auth) {
65+
dipsosables.add(this._registerAuthenticationProvider(m.metadata.extension, m.metadata.auth));
66+
}
67+
});
68+
return modelsAndIdentifiers;
69+
},
70+
sendChatRequest: async (modelId, messages, from, options, token) => {
6471
const requestId = (Math.random() * 1e6) | 0;
6572
const defer = new DeferredPromise<any>();
6673
const stream = new AsyncIterableSource<IChatResponseFragment | IChatResponseFragment[]>();
@@ -74,7 +81,7 @@ export class MainThreadLanguageModels implements MainThreadLanguageModelsShape {
7481
part.value.data = VSBuffer.wrap(await resizeImage(part.value.data.buffer));
7582
})
7683
);
77-
await this._proxy.$startChatRequest(handle, requestId, from, new SerializableObjectWithBuffers(messages), options, token);
84+
await this._proxy.$startChatRequest(modelId, requestId, from, new SerializableObjectWithBuffers(messages), options, token);
7885
} catch (err) {
7986
this._pendingProgress.delete(requestId);
8087
throw err;
@@ -85,14 +92,15 @@ export class MainThreadLanguageModels implements MainThreadLanguageModelsShape {
8592
stream: stream.asyncIterable
8693
} satisfies ILanguageModelChatResponse;
8794
},
88-
provideTokenCount: (str, token) => {
89-
return this._proxy.$provideTokenLength(handle, str, token);
95+
provideTokenCount: (modelId, str, token) => {
96+
return this._proxy.$provideTokenLength(modelId, str, token);
9097
},
9198
}));
92-
if (metadata.auth) {
93-
dipsosables.add(this._registerAuthenticationProvider(metadata.extension, metadata.auth));
94-
}
95-
this._providerRegistrations.set(handle, dipsosables);
99+
this._providerRegistrations.set(vendor, dipsosables);
100+
}
101+
102+
$onLMProviderChange(vendor: string): void {
103+
this._lmProviderChange.fire({ vendor });
96104
}
97105

98106
async $reportResponsePart(requestId: number, chunk: IChatResponseFragment | IChatResponseFragment[]): Promise<void> {
@@ -119,24 +127,20 @@ export class MainThreadLanguageModels implements MainThreadLanguageModelsShape {
119127
}
120128
}
121129

122-
$unregisterProvider(handle: number): void {
123-
this._providerRegistrations.deleteAndDispose(handle);
130+
$unregisterProvider(vendor: string): void {
131+
this._providerRegistrations.deleteAndDispose(vendor);
124132
}
125133

126134
$selectChatModels(selector: ILanguageModelChatSelector): Promise<string[]> {
127135
return this._chatProviderService.selectLanguageModels(selector);
128136
}
129137

130-
$whenLanguageModelChatRequestMade(identifier: string, extensionId: ExtensionIdentifier, participant?: string | undefined, tokenCount?: number | undefined): void {
131-
this._languageModelStatsService.update(identifier, extensionId, participant, tokenCount);
132-
}
133-
134-
async $tryStartChatRequest(extension: ExtensionIdentifier, providerId: string, requestId: number, messages: SerializableObjectWithBuffers<IChatMessage[]>, options: {}, token: CancellationToken): Promise<any> {
138+
async $tryStartChatRequest(extension: ExtensionIdentifier, modelIdentifier: string, requestId: number, messages: SerializableObjectWithBuffers<IChatMessage[]>, options: {}, token: CancellationToken): Promise<any> {
135139
this._logService.trace('[CHAT] request STARTED', extension.value, requestId);
136140

137141
let response: ILanguageModelChatResponse;
138142
try {
139-
response = await this._chatProviderService.sendChatRequest(providerId, extension, messages.value, options, token);
143+
response = await this._chatProviderService.sendChatRequest(modelIdentifier, extension, messages.value, options, token);
140144
} catch (err) {
141145
this._logService.error('[CHAT] request FAILED', extension.value, requestId, err);
142146
throw err;
@@ -170,8 +174,8 @@ export class MainThreadLanguageModels implements MainThreadLanguageModelsShape {
170174
}
171175

172176

173-
$countTokens(provider: string, value: string | IChatMessage, token: CancellationToken): Promise<number> {
174-
return this._chatProviderService.computeTokenLength(provider, value, token);
177+
$countTokens(modelId: string, value: string | IChatMessage, token: CancellationToken): Promise<number> {
178+
return this._chatProviderService.computeTokenLength(modelId, value, token);
175179
}
176180

177181
private _registerAuthenticationProvider(extension: ExtensionIdentifier, auth: { providerLabel: string; accountLabel?: string | undefined }): IDisposable {

src/vs/workbench/api/common/extHost.api.impl.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1519,9 +1519,9 @@ export function createApiFactoryAndRegisterActors(accessor: ServicesAccessor): I
15191519
onDidChangeChatModels: (listener, thisArgs?, disposables?) => {
15201520
return extHostLanguageModels.onDidChangeProviders(listener, thisArgs, disposables);
15211521
},
1522-
registerChatModelProvider: (id, provider, metadata) => {
1522+
registerChatModelProvider: (vendor, provider) => {
15231523
checkProposedApiEnabled(extension, 'chatProvider');
1524-
return extHostLanguageModels.registerLanguageModel(extension, id, provider, metadata);
1524+
return extHostLanguageModels.registerLanguageModelProvider(extension, vendor, provider);
15251525
},
15261526
// --- embeddings
15271527
get embeddingModels() {

src/vs/workbench/api/common/extHost.protocol.ts

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,7 @@ import { IChatContentInlineReference, IChatFollowup, IChatNotebookEdit, IChatPro
6161
import { IChatSessionContent } from '../../contrib/chat/common/chatSessionsService.js';
6262
import { IChatRequestVariableValue } from '../../contrib/chat/common/chatVariables.js';
6363
import { ChatAgentLocation } from '../../contrib/chat/common/constants.js';
64-
import { IChatMessage, IChatResponseFragment, ILanguageModelChatMetadata, ILanguageModelChatSelector, ILanguageModelsChangeEvent } from '../../contrib/chat/common/languageModels.js';
64+
import { IChatMessage, IChatResponseFragment, ILanguageModelChatMetadataAndIdentifier, ILanguageModelChatSelector } from '../../contrib/chat/common/languageModels.js';
6565
import { IPreparedToolInvocation, IToolInvocation, IToolInvocationPreparationContext, IToolProgressStep, IToolResult, ToolDataSource } from '../../contrib/chat/common/languageModelToolsService.js';
6666
import { DebugConfigurationProviderTriggerKind, IAdapterDescriptor, IConfig, IDebugSessionReplMode, IDebugTestRunReference, IDebugVisualization, IDebugVisualizationContext, IDebugVisualizationTreeItem, MainThreadDebugVisualization } from '../../contrib/debug/common/debug.js';
6767
import { McpCollectionDefinition, McpConnectionState, McpServerDefinition, McpServerLaunch } from '../../contrib/mcp/common/mcpTypes.js';
@@ -1258,26 +1258,26 @@ export interface ExtHostSpeechShape {
12581258
}
12591259

12601260
export interface MainThreadLanguageModelsShape extends IDisposable {
1261-
$registerLanguageModelProvider(handle: number, identifier: string, metadata: ILanguageModelChatMetadata): void;
1262-
$unregisterProvider(handle: number): void;
1263-
$tryStartChatRequest(extension: ExtensionIdentifier, provider: string, requestId: number, messages: SerializableObjectWithBuffers<IChatMessage[]>, options: {}, token: CancellationToken): Promise<void>;
1261+
$registerLanguageModelProvider(vendor: string): void;
1262+
$onLMProviderChange(vendor: string): void;
1263+
$unregisterProvider(vendor: string): void;
1264+
$tryStartChatRequest(extension: ExtensionIdentifier, modelIdentifier: string, requestId: number, messages: SerializableObjectWithBuffers<IChatMessage[]>, options: {}, token: CancellationToken): Promise<void>;
12641265
$reportResponsePart(requestId: number, chunk: IChatResponseFragment | IChatResponseFragment[]): Promise<void>;
12651266
$reportResponseDone(requestId: number, error: SerializedError | undefined): Promise<void>;
12661267
$selectChatModels(selector: ILanguageModelChatSelector): Promise<string[]>;
1267-
$whenLanguageModelChatRequestMade(identifier: string, extension: ExtensionIdentifier, participant?: string, tokenCount?: number): void;
1268-
$countTokens(provider: string, value: string | IChatMessage, token: CancellationToken): Promise<number>;
1268+
$countTokens(modelId: string, value: string | IChatMessage, token: CancellationToken): Promise<number>;
12691269
$fileIsIgnored(uri: UriComponents, token: CancellationToken): Promise<boolean>;
12701270
$registerFileIgnoreProvider(handle: number): void;
12711271
$unregisterFileIgnoreProvider(handle: number): void;
12721272
}
12731273

12741274
export interface ExtHostLanguageModelsShape {
1275-
$acceptChatModelMetadata(data: ILanguageModelsChangeEvent): void;
1275+
$prepareLanguageModelProvider(vendor: string, options: { silent: boolean }, token: CancellationToken): Promise<ILanguageModelChatMetadataAndIdentifier[]>;
12761276
$updateModelAccesslist(data: { from: ExtensionIdentifier; to: ExtensionIdentifier; enabled: boolean }[]): void;
1277-
$startChatRequest(handle: number, requestId: number, from: ExtensionIdentifier, messages: SerializableObjectWithBuffers<IChatMessage[]>, options: { [name: string]: any }, token: CancellationToken): Promise<void>;
1277+
$startChatRequest(modelId: string, requestId: number, from: ExtensionIdentifier, messages: SerializableObjectWithBuffers<IChatMessage[]>, options: { [name: string]: any }, token: CancellationToken): Promise<void>;
12781278
$acceptResponsePart(requestId: number, chunk: IChatResponseFragment | IChatResponseFragment[]): Promise<void>;
12791279
$acceptResponseDone(requestId: number, error: SerializedError | undefined): Promise<void>;
1280-
$provideTokenLength(handle: number, value: string | IChatMessage, token: CancellationToken): Promise<number>;
1280+
$provideTokenLength(modelId: string, value: string | IChatMessage, token: CancellationToken): Promise<number>;
12811281
$isFileIgnored(handle: number, uri: UriComponents, token: CancellationToken): Promise<boolean>;
12821282
}
12831283

0 commit comments

Comments
 (0)