Skip to content

Commit 528855b

Browse files
committed
Adds new AI models
1 parent a608328 commit 528855b

File tree

8 files changed

+244
-37
lines changed

8 files changed

+244
-37
lines changed

docs/telemetry-events.md

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -120,7 +120,7 @@
120120
'failed.reason': 'user-declined' | 'user-cancelled' | 'error',
121121
'input.length': number,
122122
'model.id': string,
123-
'model.provider.id': 'anthropic' | 'deepseek' | 'gemini' | 'github' | 'huggingface' | 'openai' | 'vscode' | 'xai',
123+
'model.provider.id': 'anthropic' | 'deepseek' | 'gemini' | 'github' | 'gitkraken' | 'huggingface' | 'openai' | 'vscode' | 'xai',
124124
'model.provider.name': string,
125125
'output.length': number,
126126
'retry.count': number,
@@ -139,7 +139,7 @@
139139
'failed.reason': 'user-declined' | 'user-cancelled' | 'error',
140140
'input.length': number,
141141
'model.id': string,
142-
'model.provider.id': 'anthropic' | 'deepseek' | 'gemini' | 'github' | 'huggingface' | 'openai' | 'vscode' | 'xai',
142+
'model.provider.id': 'anthropic' | 'deepseek' | 'gemini' | 'github' | 'gitkraken' | 'huggingface' | 'openai' | 'vscode' | 'xai',
143143
'model.provider.name': string,
144144
'output.length': number,
145145
'retry.count': number,
@@ -157,7 +157,7 @@ or
157157
'failed.reason': 'user-declined' | 'user-cancelled' | 'error',
158158
'input.length': number,
159159
'model.id': string,
160-
'model.provider.id': 'anthropic' | 'deepseek' | 'gemini' | 'github' | 'huggingface' | 'openai' | 'vscode' | 'xai',
160+
'model.provider.id': 'anthropic' | 'deepseek' | 'gemini' | 'github' | 'gitkraken' | 'huggingface' | 'openai' | 'vscode' | 'xai',
161161
'model.provider.name': string,
162162
'output.length': number,
163163
'retry.count': number,
@@ -174,7 +174,7 @@ or
174174
'failed.reason': 'user-declined' | 'user-cancelled' | 'error',
175175
'input.length': number,
176176
'model.id': string,
177-
'model.provider.id': 'anthropic' | 'deepseek' | 'gemini' | 'github' | 'huggingface' | 'openai' | 'vscode' | 'xai',
177+
'model.provider.id': 'anthropic' | 'deepseek' | 'gemini' | 'github' | 'gitkraken' | 'huggingface' | 'openai' | 'vscode' | 'xai',
178178
'model.provider.name': string,
179179
'output.length': number,
180180
'retry.count': number,
@@ -191,7 +191,7 @@ or
191191
'failed.reason': 'user-declined' | 'user-cancelled' | 'error',
192192
'input.length': number,
193193
'model.id': string,
194-
'model.provider.id': 'anthropic' | 'deepseek' | 'gemini' | 'github' | 'huggingface' | 'openai' | 'vscode' | 'xai',
194+
'model.provider.id': 'anthropic' | 'deepseek' | 'gemini' | 'github' | 'gitkraken' | 'huggingface' | 'openai' | 'vscode' | 'xai',
195195
'model.provider.name': string,
196196
'output.length': number,
197197
'retry.count': number,
@@ -206,7 +206,7 @@ or
206206
```typescript
207207
{
208208
'model.id': string,
209-
'model.provider.id': 'anthropic' | 'deepseek' | 'gemini' | 'github' | 'huggingface' | 'openai' | 'vscode' | 'xai',
209+
'model.provider.id': 'anthropic' | 'deepseek' | 'gemini' | 'github' | 'gitkraken' | 'huggingface' | 'openai' | 'vscode' | 'xai',
210210
'model.provider.name': string
211211
}
212212
```

package.json

Lines changed: 17 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -4023,22 +4023,36 @@
40234023
"null"
40244024
],
40254025
"default": null,
4026-
"pattern": "^((anthropic|deepseek|github|google|huggingface|openai|xai):([\\w.-]+)|vscode)$",
4027-
"markdownDescription": "Specifies the AI model to use for GitLens' AI features. Should be formatted as `provider:model` (e.g. `openai:gpt-4o` or `anthropic:claude-3-5-sonnet-latest`), or `vscode` for models provided by the VS Code extension API (e.g. Copilot)",
4026+
"pattern": "^((anthropic|deepseek|github|google|huggingface|openai|xai):([\\w.-]+)|gitkraken|vscode)$",
4027+
"markdownDescription": "Specifies the AI provider and model to use for GitLens' AI features. Should be formatted as `provider:model` (e.g. `openai:gpt-4o` or `anthropic:claude-3-5-sonnet-latest`), `gitkraken` for GitKraken AI provided models, or `vscode` for models provided by the VS Code extension API (e.g. Copilot)",
40284028
"scope": "window",
40294029
"order": 10,
40304030
"tags": [
40314031
"preview"
40324032
]
40334033
},
4034+
"gitlens.ai.gitkraken.model": {
4035+
"type": [
4036+
"string",
4037+
"null"
4038+
],
4039+
"default": null,
4040+
"pattern": "^(.*):(.*)$",
4041+
"markdownDescription": "Specifies the GitKraken AI provided model to use for GitLens' AI features, formatted as `provider:model`",
4042+
"scope": "window",
4043+
"order": 5,
4044+
"tags": [
4045+
"preview"
4046+
]
4047+
},
40344048
"gitlens.ai.vscode.model": {
40354049
"type": [
40364050
"string",
40374051
"null"
40384052
],
40394053
"default": null,
40404054
"pattern": "^(.*):(.*)$",
4041-
"markdownDescription": "Specifies the VS Code provided model to use for GitLens' AI features, formatted as `vendor:family`",
4055+
"markdownDescription": "Specifies the VS Code provided model to use for GitLens' AI features, formatted as `provider:model`",
40424056
"scope": "window",
40434057
"order": 20,
40444058
"tags": [

src/config.ts

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
import type { SupportedAIModels, VSCodeAIModels } from './constants.ai';
1+
import type { AIProviderAndModel, SupportedAIModels } from './constants.ai';
22
import type { GroupableTreeViewTypes } from './constants.views';
33
import type { DateTimeFormat } from './system/date';
44
import type { LogLevel } from './system/logger.constants';
@@ -227,6 +227,9 @@ interface AIConfig {
227227
readonly generateCodeSuggestMessage: {
228228
readonly customInstructions: string;
229229
};
230+
readonly gitkraken: {
231+
readonly model: AIProviderAndModel | null;
232+
};
230233
readonly model: SupportedAIModels | null;
231234
readonly modelOptions: {
232235
readonly temperature: number;
@@ -235,7 +238,7 @@ interface AIConfig {
235238
readonly url: string | null;
236239
};
237240
readonly vscode: {
238-
readonly model: VSCodeAIModels | null;
241+
readonly model: AIProviderAndModel | null;
239242
};
240243
}
241244

src/constants.ai.ts

Lines changed: 14 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,15 @@
1-
export type VSCodeAIModels = `${string}:${string}`;
1+
export type AIProviders =
2+
| 'anthropic'
3+
| 'deepseek'
4+
| 'gemini'
5+
| 'github'
6+
| 'gitkraken'
7+
| 'huggingface'
8+
| 'openai'
9+
| 'vscode'
10+
| 'xai';
11+
export type AIPrimaryProviders = Extract<AIProviders, 'gitkraken' | 'vscode'>;
12+
export const primaryAIProviders = ['gitkraken', 'vscode'] as const satisfies readonly AIPrimaryProviders[];
213

3-
export type AIProviders = 'anthropic' | 'deepseek' | 'gemini' | 'github' | 'huggingface' | 'openai' | 'vscode' | 'xai';
4-
export type SupportedAIModels = `${Exclude<AIProviders, 'vscode'>}:${string}` | 'vscode';
14+
export type AIProviderAndModel = `${string}:${string}`;
15+
export type SupportedAIModels = `${Exclude<AIProviders, AIPrimaryProviders>}:${string}` | AIPrimaryProviders;

src/plus/ai/aiProviderService.ts

Lines changed: 45 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
import type { CancellationToken, Disposable, Event, MessageItem, ProgressOptions } from 'vscode';
22
import { env, EventEmitter, window } from 'vscode';
3-
import type { AIProviders, SupportedAIModels, VSCodeAIModels } from '../../constants.ai';
3+
import type { AIPrimaryProviders, AIProviderAndModel, AIProviders, SupportedAIModels } from '../../constants.ai';
4+
import { primaryAIProviders } from '../../constants.ai';
45
import type { AIGenerateDraftEventData, Source, TelemetryEvents } from '../../constants.telemetry';
56
import type { Container } from '../../container';
67
import { CancellationError } from '../../errors';
@@ -41,6 +42,17 @@ interface AIProviderConstructor<Provider extends AIProviders = AIProviders> {
4142

4243
// Order matters for sorting the picker
4344
const _supportedProviderTypes = new Map<AIProviders, Lazy<Promise<AIProviderConstructor>>>([
45+
...(configuration.getAny('gitkraken.ai.enabled', undefined, false)
46+
? [
47+
[
48+
'gitkraken',
49+
lazy(
50+
async () =>
51+
(await import(/* webpackChunkName: "ai" */ './gitkrakenProvider')).GitKrakenProvider,
52+
),
53+
],
54+
]
55+
: []),
4456
...(supportedInVSCodeVersion('language-models')
4557
? [
4658
[
@@ -105,10 +117,10 @@ export class AIProviderService implements Disposable {
105117
if (providerId != null && this.supports(providerId)) {
106118
if (modelId != null) {
107119
return { provider: providerId, model: modelId };
108-
} else if (providerId === 'vscode') {
109-
modelId = configuration.get('ai.vscode.model') as VSCodeAIModels;
120+
} else if (isPrimaryAIProvider(providerId)) {
121+
modelId = configuration.get(`ai.${providerId}.model`) ?? undefined;
110122
if (modelId != null) {
111-
// Model ids are in the form of `vendor:family`
123+
// Model ids are in the form of `provider:model`
112124
if (/^(.+):(.+)$/.test(modelId)) {
113125
return { provider: providerId, model: modelId };
114126
}
@@ -119,19 +131,26 @@ export class AIProviderService implements Disposable {
119131
return undefined;
120132
}
121133

122-
async getModels(): Promise<readonly AIModel[]> {
123-
const modelResults = await Promise.allSettled(
124-
map(_supportedProviderTypes.values(), t =>
125-
t.value.then(async t => {
126-
const p = new t(this.container, this.connection);
127-
try {
128-
return await p.getModels();
129-
} finally {
130-
p.dispose();
131-
}
132-
}),
133-
),
134-
);
134+
async getModels(providerId?: AIProviders): Promise<readonly AIModel[]> {
135+
const loadModels = async (type: Lazy<Promise<AIProviderConstructor>>) => {
136+
return type.value.then(async t => {
137+
const p = new t(this.container, this.connection);
138+
try {
139+
return await p.getModels();
140+
} finally {
141+
p.dispose();
142+
}
143+
});
144+
};
145+
146+
if (providerId != null && this.supports(providerId)) {
147+
const type = _supportedProviderTypes.get(providerId);
148+
if (type == null) return [];
149+
150+
return loadModels(type);
151+
}
152+
153+
const modelResults = await Promise.allSettled(map(_supportedProviderTypes.values(), t => loadModels(t)));
135154

136155
return modelResults.flatMap(m => getSettledValue(m, []));
137156
}
@@ -230,9 +249,9 @@ export class AIProviderService implements Disposable {
230249
this._model = model;
231250

232251
if (changed) {
233-
if (isVSCodeAIModel(model)) {
234-
await configuration.updateEffective(`ai.model`, 'vscode');
235-
await configuration.updateEffective(`ai.vscode.model`, model.id);
252+
if (isPrimaryAIProviderModel(model)) {
253+
await configuration.updateEffective(`ai.model`, model.provider.id);
254+
await configuration.updateEffective(`ai.${model.provider.id}.model`, model.id);
236255
} else {
237256
await configuration.updateEffective(
238257
`ai.model`,
@@ -709,6 +728,10 @@ function splitMessageIntoSummaryAndBody(message: string): AIResult {
709728
};
710729
}
711730

712-
function isVSCodeAIModel(model: AIModel): model is AIModel<'vscode', VSCodeAIModels> {
713-
return model.provider.id === 'vscode';
731+
function isPrimaryAIProvider(provider: AIProviders): provider is AIPrimaryProviders {
732+
return primaryAIProviders.includes(provider as AIPrimaryProviders);
733+
}
734+
735+
function isPrimaryAIProviderModel(model: AIModel): model is AIModel<AIPrimaryProviders, AIProviderAndModel> {
736+
return isPrimaryAIProvider(model.provider.id);
714737
}

src/plus/ai/gitkrakenProvider.ts

Lines changed: 143 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,143 @@
1+
import type { CancellationToken } from 'vscode';
2+
import type { Response } from '@env/fetch';
3+
import { fetch } from '@env/fetch';
4+
import { debug } from '../../system/decorators/log';
5+
import { Logger } from '../../system/logger';
6+
import { getLogScope } from '../../system/logger.scope';
7+
import type { AIActionType, AIModel } from './models/model';
8+
import type { PromptTemplate } from './models/promptTemplates';
9+
import { OpenAICompatibleProvider } from './openAICompatibleProvider';
10+
import { getActionName } from './utils/-webview/ai.utils';
11+
12+
const provider = { id: 'gitkraken', name: 'GitKraken AI (Preview)' } as const;
13+
14+
type GitKrakenModel = AIModel<typeof provider.id>;
15+
16+
export class GitKrakenProvider extends OpenAICompatibleProvider<typeof provider.id> {
17+
readonly id = provider.id;
18+
readonly name = provider.name;
19+
protected readonly config = {};
20+
21+
@debug()
22+
async getModels(): Promise<readonly AIModel<typeof provider.id>[]> {
23+
const scope = getLogScope();
24+
25+
try {
26+
const rsp = await fetch(this.container.urls.getGkAIApiUrl('providers/message-prompt'), {
27+
headers: await this.connection.getGkHeaders(undefined, undefined, {
28+
Accept: 'application/json',
29+
}),
30+
});
31+
32+
interface ModelsResponse {
33+
data: {
34+
providerId: string;
35+
providerName: string;
36+
modelId: string;
37+
modelName: string;
38+
preferred: boolean;
39+
maxInputTokens: number;
40+
maxOutputTokens: number;
41+
}[];
42+
error?: null;
43+
}
44+
45+
const result: ModelsResponse = await rsp.json();
46+
47+
if (result.error == null) {
48+
const models: GitKrakenModel[] = result.data.map(
49+
m =>
50+
({
51+
id: m.modelId,
52+
name: m.modelName,
53+
maxTokens: { input: m.maxInputTokens, output: m.maxOutputTokens },
54+
provider: provider,
55+
default: m.preferred,
56+
temperature: null,
57+
}) satisfies GitKrakenModel,
58+
);
59+
return models;
60+
}
61+
62+
debugger;
63+
Logger.error(undefined, scope, `${String(result.error)}: Unable to get models`);
64+
} catch (ex) {
65+
debugger;
66+
Logger.error(ex, scope, `Unable to get models`);
67+
}
68+
69+
return [];
70+
}
71+
72+
override async getPromptTemplate<TAction extends AIActionType>(
73+
action: TAction,
74+
model: AIModel<typeof provider.id>,
75+
): Promise<PromptTemplate | undefined> {
76+
const scope = getLogScope();
77+
78+
try {
79+
const rsp = await fetch(this.container.urls.getGkAIApiUrl(`templates/message-prompt/${action}`), {
80+
headers: await this.connection.getGkHeaders(undefined, undefined, {
81+
Accept: 'application/json',
82+
}),
83+
});
84+
85+
interface PromptResponse {
86+
data: {
87+
id: string;
88+
template: string;
89+
variables: string[];
90+
};
91+
error?: null;
92+
}
93+
94+
const result: PromptResponse = await rsp.json();
95+
if (result.error == null) {
96+
return {
97+
id: result.data.id,
98+
name: getActionName(action),
99+
template: result.data.template,
100+
variables: result.data.variables,
101+
};
102+
}
103+
104+
debugger;
105+
Logger.error(undefined, scope, `${String(result.error)}: Unable to get prompt template for '${action}'`);
106+
} catch (ex) {
107+
debugger;
108+
Logger.error(ex, scope, `Unable to get prompt template for '${action}'`);
109+
}
110+
111+
return super.getPromptTemplate(action, model);
112+
}
113+
114+
protected override getApiKey(): Promise<string | undefined> {
115+
return Promise.resolve('');
116+
}
117+
118+
protected getUrl(_model: AIModel<typeof provider.id>): string {
119+
return this.container.urls.getGkAIApiUrl('chat/completions');
120+
}
121+
122+
protected override getHeaders<TAction extends AIActionType>(
123+
action: TAction,
124+
_model: AIModel<typeof provider.id>,
125+
_url: string,
126+
_apiKey: string,
127+
): Promise<Record<string, string>> {
128+
return this.connection.getGkHeaders(undefined, undefined, {
129+
Accept: 'application/json',
130+
'GK-Action': action,
131+
});
132+
}
133+
134+
protected override fetchCore<TAction extends AIActionType>(
135+
action: TAction,
136+
model: AIModel<typeof provider.id>,
137+
_apiKey: string,
138+
request: object,
139+
cancellation: CancellationToken | undefined,
140+
): Promise<Response> {
141+
return super.fetchCore(action, model, _apiKey, request, cancellation);
142+
}
143+
}

0 commit comments

Comments
 (0)