Skip to content

Commit d70b715

Browse files
committed
fix: update gemini model to 2.5-flash and increase MaxTokens
gemini-2.0-flash is no longer available to new API keys. Update all integration tests to use gemini-2.5-flash and increase MaxTokens since thinking models use tokens for internal reasoning.
1 parent a5e3eb5 commit d70b715

File tree

4 files changed

+14
-14
lines changed

4 files changed

+14
-14
lines changed

agent/integration_test.go

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -62,7 +62,7 @@ func availableProviders(t *testing.T) []providerTestCase {
6262
providers = append(providers, providerTestCase{
6363
name: "gemini",
6464
provider: gemini.New(key),
65-
model: "gemini-2.0-flash",
65+
model: "gemini-2.5-flash",
6666
})
6767
}
6868
if project := os.Getenv("GOOGLE_CLOUD_PROJECT"); project != "" {
@@ -74,7 +74,7 @@ func availableProviders(t *testing.T) []providerTestCase {
7474
providers = append(providers, providerTestCase{
7575
name: "gemini-vertex",
7676
provider: gemini.New("", gemini.WithVertexAI(project, region), gemini.WithTokenSource(provider.GcloudTokenSource(account))),
77-
model: "gemini-2.0-flash",
77+
model: "gemini-2.5-flash",
7878
})
7979
}
8080

@@ -213,7 +213,7 @@ func TestIntegration_Streaming(t *testing.T) {
213213
ch, err := tc.provider.StreamChat(ctx, provider.ChatRequest{
214214
Model: tc.model,
215215
Messages: []provider.Message{provider.NewTextMessage(provider.RoleUser, "Say 'test-stream-ok' and nothing else.")},
216-
MaxTokens: 32,
216+
MaxTokens: 256,
217217
})
218218
if err != nil {
219219
t.Fatalf("StreamChat: %v", err)

internal/cli/setup_models.go

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -49,7 +49,7 @@ var llmProviders = []providerInfo{
4949
models: []huh.Option[string]{
5050
huh.NewOption("gemini-2.5-pro (most capable)", "gemini-2.5-pro"),
5151
huh.NewOption("gemini-2.5-flash (fast, good balance)", "gemini-2.5-flash"),
52-
huh.NewOption("gemini-2.0-flash (fastest, cheapest)", "gemini-2.0-flash"),
52+
huh.NewOption("gemini-2.0-flash-lite (fastest, cheapest)", "gemini-2.0-flash-lite"),
5353
},
5454
},
5555
}

memory/integration_test.go

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,7 @@ func availableProviders(t *testing.T) []providerTestCase {
3939
providers = append(providers, providerTestCase{
4040
name: "gemini",
4141
provider: gemini.New(key),
42-
model: "gemini-2.0-flash",
42+
model: "gemini-2.5-flash",
4343
})
4444
}
4545
if project := os.Getenv("GOOGLE_CLOUD_PROJECT"); project != "" {
@@ -51,7 +51,7 @@ func availableProviders(t *testing.T) []providerTestCase {
5151
providers = append(providers, providerTestCase{
5252
name: "gemini-vertex",
5353
provider: gemini.New("", gemini.WithVertexAI(project, region), gemini.WithTokenSource(provider.GcloudTokenSource(account))),
54-
model: "gemini-2.0-flash",
54+
model: "gemini-2.5-flash",
5555
})
5656
}
5757

provider/gemini/gemini_test.go

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ func TestChat_TextResponse(t *testing.T) {
2929

3030
p := New("test-key", WithBaseURL(server.URL))
3131
resp, err := p.Chat(context.Background(), provider.ChatRequest{
32-
Model: "gemini-2.0-flash",
32+
Model: "gemini-2.5-flash",
3333
Messages: []provider.Message{provider.NewTextMessage(provider.RoleUser, "Hello")},
3434
})
3535
if err != nil {
@@ -67,7 +67,7 @@ func TestChat_ToolUse(t *testing.T) {
6767

6868
p := New("test-key", WithBaseURL(server.URL))
6969
resp, err := p.Chat(context.Background(), provider.ChatRequest{
70-
Model: "gemini-2.0-flash",
70+
Model: "gemini-2.5-flash",
7171
Messages: []provider.Message{provider.NewTextMessage(provider.RoleUser, "run echo hi")},
7272
})
7373
if err != nil {
@@ -96,7 +96,7 @@ func TestChat_ErrorResponse(t *testing.T) {
9696

9797
p := New("bad-key", WithBaseURL(server.URL))
9898
_, err := p.Chat(context.Background(), provider.ChatRequest{
99-
Model: "gemini-2.0-flash",
99+
Model: "gemini-2.5-flash",
100100
Messages: []provider.Message{provider.NewTextMessage(provider.RoleUser, "Hi")},
101101
})
102102
if err == nil {
@@ -124,7 +124,7 @@ func TestStreamChat_TextDelta(t *testing.T) {
124124

125125
p := New("test-key", WithBaseURL(server.URL))
126126
ch, err := p.StreamChat(context.Background(), provider.ChatRequest{
127-
Model: "gemini-2.0-flash",
127+
Model: "gemini-2.5-flash",
128128
Messages: []provider.Message{provider.NewTextMessage(provider.RoleUser, "Hi")},
129129
})
130130
if err != nil {
@@ -150,9 +150,9 @@ func TestGeminiIntegration(t *testing.T) {
150150

151151
p := New(apiKey)
152152
resp, err := p.Chat(context.Background(), provider.ChatRequest{
153-
Model: "gemini-2.0-flash",
153+
Model: "gemini-2.5-flash",
154154
Messages: []provider.Message{provider.NewTextMessage(provider.RoleUser, "Say 'hello' and nothing else.")},
155-
MaxTokens: 32,
155+
MaxTokens: 256,
156156
})
157157
if err != nil {
158158
t.Fatalf("Chat: %v", err)
@@ -178,9 +178,9 @@ func TestGeminiVertexAIIntegration(t *testing.T) {
178178

179179
p := New("", WithVertexAI(project, region), WithTokenSource(provider.GcloudTokenSource(account)))
180180
resp, err := p.Chat(context.Background(), provider.ChatRequest{
181-
Model: "gemini-2.0-flash",
181+
Model: "gemini-2.5-flash",
182182
Messages: []provider.Message{provider.NewTextMessage(provider.RoleUser, "Say 'hello' and nothing else.")},
183-
MaxTokens: 32,
183+
MaxTokens: 256,
184184
})
185185
if err != nil {
186186
t.Fatalf("Chat: %v", err)

0 commit comments

Comments
 (0)