Skip to content

Commit e0eb73b

Browse files
committed
Closes #4101 adds Claude 3.7 support
Curates the AI models list
1 parent b99a9da commit e0eb73b

File tree

4 files changed

+67
-4
lines changed

4 files changed

+67
-4
lines changed

CHANGELOG.md

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,11 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/) and this p
99
### Added
1010

1111
- Adds AI model status and model switcher to the _Home_ view ([#4064](https://github.com/gitkraken/vscode-gitlens/issues/4064))
12+
- Adds Anthropic Claude 3.7 Sonnet model for GitLens' AI features ([#4101](https://github.com/gitkraken/vscode-gitlens/issues/4101))
13+
14+
### Changed
15+
16+
- Curated the list of AI models available for GitLens' AI features
1217

1318
### Fixed
1419

src/ai/anthropicProvider.ts

Lines changed: 37 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,12 +7,32 @@ const provider = { id: 'anthropic', name: 'Anthropic' } as const;
77

88
type AnthropicModel = AIModel<typeof provider.id>;
99
const models: AnthropicModel[] = [
10+
{
11+
id: 'claude-3-7-sonnet-latest',
12+
name: 'Claude 3.7 Sonnet',
13+
maxTokens: { input: 204800, output: 8192 },
14+
provider: provider,
15+
},
16+
{
17+
id: 'claude-3-7-sonnet-20250219',
18+
name: 'Claude 3.7 Sonnet',
19+
maxTokens: { input: 204800, output: 8192 },
20+
provider: provider,
21+
hidden: true,
22+
},
1023
{
1124
id: 'claude-3-5-sonnet-latest',
1225
name: 'Claude 3.5 Sonnet',
1326
maxTokens: { input: 204800, output: 8192 },
1427
provider: provider,
1528
},
29+
{
30+
id: 'claude-3-5-sonnet-20241022',
31+
name: 'Claude 3.5 Sonnet',
32+
maxTokens: { input: 204800, output: 8192 },
33+
provider: provider,
34+
hidden: true,
35+
},
1636
{
1737
id: 'claude-3-5-sonnet-20240620',
1838
name: 'Claude 3.5 Sonnet',
@@ -25,6 +45,7 @@ const models: AnthropicModel[] = [
2545
name: 'Claude 3.5 Haiku',
2646
maxTokens: { input: 204800, output: 8192 },
2747
provider: provider,
48+
default: true,
2849
},
2950
{
3051
id: 'claude-3-5-haiku-20241022',
@@ -46,24 +67,39 @@ const models: AnthropicModel[] = [
4667
provider: provider,
4768
hidden: true,
4869
},
70+
{
71+
id: 'claude-3-sonnet-latest',
72+
name: 'Claude 3 Sonnet',
73+
maxTokens: { input: 204800, output: 4096 },
74+
provider: provider,
75+
hidden: true,
76+
},
4977
{
5078
id: 'claude-3-sonnet-20240229',
5179
name: 'Claude 3 Sonnet',
5280
maxTokens: { input: 204800, output: 4096 },
5381
provider: provider,
82+
hidden: true,
83+
},
84+
{
85+
id: 'claude-3-haiku-latest',
86+
name: 'Claude 3 Haiku',
87+
maxTokens: { input: 204800, output: 4096 },
88+
provider: provider,
5489
},
5590
{
5691
id: 'claude-3-haiku-20240307',
5792
name: 'Claude 3 Haiku',
5893
maxTokens: { input: 204800, output: 4096 },
5994
provider: provider,
60-
default: true,
95+
hidden: true,
6196
},
6297
{
6398
id: 'claude-2.1',
6499
name: 'Claude 2.1',
65100
maxTokens: { input: 204800, output: 4096 },
66101
provider: provider,
102+
hidden: true,
67103
},
68104
];
69105

src/ai/geminiProvider.ts

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,7 @@ const models: GeminiModel[] = [
1212
name: 'Gemini 2.0 Flash',
1313
maxTokens: { input: 1048576, output: 8192 },
1414
provider: provider,
15+
default: true,
1516
},
1617
{
1718
id: 'gemini-2.0-flash-001',
@@ -49,22 +50,23 @@ const models: GeminiModel[] = [
4950
name: 'Gemini Experimental 1206',
5051
maxTokens: { input: 2097152, output: 8192 },
5152
provider: provider,
53+
hidden: true,
5254
},
5355
{
5456
id: 'gemini-exp-1121',
5557
name: 'Gemini Experimental 1121',
5658
maxTokens: { input: 2097152, output: 8192 },
5759
provider: provider,
60+
hidden: true,
5861
},
5962
{
60-
id: 'gemini-1.5-pro-latest',
63+
id: 'gemini-1.5-pro',
6164
name: 'Gemini 1.5 Pro',
6265
maxTokens: { input: 2097152, output: 8192 },
6366
provider: provider,
64-
default: true,
6567
},
6668
{
67-
id: 'gemini-1.5-flash-latest',
69+
id: 'gemini-1.5-flash',
6870
name: 'Gemini 1.5 Flash',
6971
maxTokens: { input: 1048576, output: 8192 },
7072
provider: provider,

src/ai/openaiProvider.ts

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,14 @@ const models: OpenAIModel[] = [
1313
provider: provider,
1414
temperature: null,
1515
},
16+
{
17+
id: 'o3-mini-2025-01-31',
18+
name: 'o3 mini',
19+
maxTokens: { input: 200000, output: 100000 },
20+
provider: provider,
21+
temperature: null,
22+
hidden: true,
23+
},
1624
{
1725
id: 'o1',
1826
name: 'o1',
@@ -34,6 +42,7 @@ const models: OpenAIModel[] = [
3442
maxTokens: { input: 128000, output: 32768 },
3543
provider: provider,
3644
temperature: null,
45+
hidden: true,
3746
},
3847
{
3948
id: 'o1-preview-2024-09-12',
@@ -65,6 +74,13 @@ const models: OpenAIModel[] = [
6574
provider: provider,
6675
default: true,
6776
},
77+
{
78+
id: 'gpt-4o-2024-11-20',
79+
name: 'GPT-4o',
80+
maxTokens: { input: 128000, output: 16384 },
81+
provider: provider,
82+
hidden: true,
83+
},
6884
{
6985
id: 'gpt-4o-2024-08-06',
7086
name: 'GPT-4o',
@@ -104,6 +120,7 @@ const models: OpenAIModel[] = [
104120
name: 'GPT-4 Turbo',
105121
maxTokens: { input: 128000, output: 4096 },
106122
provider: provider,
123+
hidden: true,
107124
},
108125
{
109126
id: 'gpt-4-turbo-2024-04-09',
@@ -117,6 +134,7 @@ const models: OpenAIModel[] = [
117134
name: 'GPT-4 Turbo preview',
118135
maxTokens: { input: 128000, output: 4096 },
119136
provider: provider,
137+
hidden: true,
120138
},
121139
{
122140
id: 'gpt-4-0125-preview',
@@ -137,6 +155,7 @@ const models: OpenAIModel[] = [
137155
name: 'GPT-4',
138156
maxTokens: { input: 8192, output: 4096 },
139157
provider: provider,
158+
hidden: true,
140159
},
141160
{
142161
id: 'gpt-4-0613',
@@ -164,6 +183,7 @@ const models: OpenAIModel[] = [
164183
name: 'GPT-3.5 Turbo',
165184
maxTokens: { input: 16385, output: 4096 },
166185
provider: provider,
186+
hidden: true,
167187
},
168188
{
169189
id: 'gpt-3.5-turbo-0125',

0 commit comments

Comments
 (0)