File tree Expand file tree Collapse file tree 4 files changed +4
-4
lines changed Expand file tree Collapse file tree 4 files changed +4
-4
lines changed Original file line number Diff line number Diff line change 27
27
)
28
28
29
29
model = ChatGoogleGenerativeAI (
30
- model = "gemini-2.5-pro-preview-03-25 " , google_api_key = os .getenv ("GEMINI_API_KEY" )
30
+ model = "gemini-2.5-pro-preview-06-05 " , google_api_key = os .getenv ("GEMINI_API_KEY" )
31
31
)
32
32
33
33
Original file line number Diff line number Diff line change 22
22
"stdio" ,
23
23
],
24
24
)
25
- agent = Agent ("gemini-2.5-pro-preview-03-25 " , mcp_servers = [server ])
25
+ agent = Agent ("gemini-2.5-pro-preview-06-05 " , mcp_servers = [server ])
26
26
Agent .instrument_all ()
27
27
28
28
Original file line number Diff line number Diff line change @@ -195,7 +195,7 @@ uv run agents_mcp_usage/multi_mcp/eval_multi_mcp/run_multi_evals.py \
195
195
196
196
- ** ` --models ` ** - Comma-separated list of models to evaluate
197
197
- ** ` --runs ` ** - Number of evaluation runs per model (default: 3)
198
- - ** ` --judge-model ` ** - Model for LLM judging (default: gemini-2.5-pro-preview-03-25 )
198
+ - ** ` --judge-model ` ** - Model for LLM judging (default: gemini-2.5-pro-preview-06-05 )
199
199
- ** ` --parallel ` ** - Run evaluations in parallel (default: true)
200
200
- ** ` --sequential ` ** - Force sequential execution
201
201
- ** ` --timeout ` ** - Timeout in seconds per evaluation run (default: 600)
Original file line number Diff line number Diff line change 35
35
)
36
36
# Create Agent with MCP servers
37
37
agent = Agent (
38
- "gemini-2.5-pro-preview-05-06 " ,
38
+ "gemini-2.5-pro-preview-06-05 " ,
39
39
# "openai:o4-mini",
40
40
mcp_servers = [local_server , mermaid_server ],
41
41
)
You can’t perform that action at this time.
0 commit comments