Skip to content

Commit 2e7b632

Browse files
authored
Update legacy models across non-legacy cookbooks
1 parent 260f809 commit 2e7b632

File tree

100 files changed

+194
-231
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

100 files changed

+194
-231
lines changed

capabilities/classification/evaluation/promptfooconfig.yaml

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -7,27 +7,27 @@ prompts:
77
- prompts.py:rag_chain_of_thought_classify
88

99
providers:
10-
- id: anthropic:messages:claude-3-haiku-20240307
10+
- id: anthropic:messages:claude-haiku-4-5
1111
label: "Haiku: T-0.0"
1212
config:
1313
max_tokens: 4096
1414
temperature: 0
15-
- id: anthropic:messages:claude-3-haiku-20240307
15+
- id: anthropic:messages:claude-haiku-4-5
1616
label: "Haiku: T-0.2"
1717
config:
1818
max_tokens: 4096
1919
temperature: 0.2
20-
- id: anthropic:messages:claude-3-haiku-20240307
20+
- id: anthropic:messages:claude-haiku-4-5
2121
label: "Haiku: T-0.4"
2222
config:
2323
max_tokens: 4096
2424
temperature: 0.4
25-
- id: anthropic:messages:claude-3-haiku-20240307
25+
- id: anthropic:messages:claude-haiku-4-5
2626
label: "Haiku: T-0.6"
2727
config:
2828
max_tokens: 4096
2929
temperature: 0.6
30-
- id: anthropic:messages:claude-3-haiku-20240307
30+
- id: anthropic:messages:claude-haiku-4-5
3131
label: "Haiku: T-0.8"
3232
config:
3333
max_tokens: 4096

capabilities/classification/guide.ipynb

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -490,7 +490,7 @@
490490
" stop_sequences=[\"</category>\"], \n",
491491
" max_tokens=4096, \n",
492492
" temperature=0.0,\n",
493-
" model=\"claude-3-haiku-20240307\"\n",
493+
" model=\"claude-haiku-4-5\"\n",
494494
" )\n",
495495
" \n",
496496
" # Extract the result from the response\n",
@@ -734,7 +734,7 @@
734734
" stop_sequences=[\"</category>\"], \n",
735735
" max_tokens=4096, \n",
736736
" temperature=0.0,\n",
737-
" model=\"claude-3-haiku-20240307\"\n",
737+
" model=\"claude-haiku-4-5\"\n",
738738
" )\n",
739739
" \n",
740740
" # Extract the result from the response\n",
@@ -846,7 +846,7 @@
846846
" stop_sequences=[\"</category>\"], \n",
847847
" max_tokens=4096, \n",
848848
" temperature=0.0,\n",
849-
" model=\"claude-3-haiku-20240307\"\n",
849+
" model=\"claude-haiku-4-5\"\n",
850850
" )\n",
851851
" \n",
852852
" # Extract the result from the response\n",

capabilities/contextual-embeddings/guide.ipynb

Lines changed: 2 additions & 2 deletions
Large diffs are not rendered by default.

capabilities/retrieval_augmented_generation/evaluation/eval_end_to_end.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@ def evaluate_end_to_end(query, generated_answer, correct_answer):
3535
client = Anthropic(api_key=os.environ.get('ANTHROPIC_API_KEY'))
3636
try:
3737
response = client.messages.create(
38-
model="claude-3-5-sonnet-20241022",
38+
model="claude-sonnet-4-5",
3939
max_tokens=1500,
4040
messages=[
4141
{"role": "user", "content": prompt},

capabilities/retrieval_augmented_generation/evaluation/promptfooconfig_end_to_end.yaml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -7,13 +7,13 @@ prompts:
77
- prompts.py:answer_query_level_three
88

99
providers:
10-
- id: anthropic:messages:claude-3-haiku-20240307
10+
- id: anthropic:messages:claude-haiku-4-5
1111
label: "Haiku: T-0.0"
1212
config:
1313
max_tokens: 2500
1414
temperature: 0
1515

16-
- id: anthropic:messages:claude-3-5-sonnet-20241022
16+
- id: anthropic:messages:claude-sonnet-4-5
1717
label: "3.5 Sonnet: T-0.0"
1818
config:
1919
max_tokens: 2500

capabilities/retrieval_augmented_generation/evaluation/prompts.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -106,7 +106,7 @@ def _rerank_results(query: str, results: List[Dict], k: int = 5) -> List[Dict]:
106106
"""
107107
try:
108108
response = client.messages.create(
109-
model="claude-3-haiku-20240307",
109+
model="claude-haiku-4-5",
110110
max_tokens=50,
111111
messages=[{"role": "user", "content": prompt}, {"role": "assistant", "content": "<relevant_indices>"}],
112112
temperature=0,

capabilities/retrieval_augmented_generation/evaluation/provider_retrieval.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -67,7 +67,7 @@ def _rerank_results(query: str, results: List[Dict], k: int = 3) -> List[Dict]:
6767
client = Anthropic(api_key=os.environ.get('ANTHROPIC_API_KEY'))
6868
try:
6969
response = client.messages.create(
70-
model="claude-3-5-sonnet-20241022",
70+
model="claude-sonnet-4-5",
7171
max_tokens=50,
7272
messages=[{"role": "user", "content": prompt}, {"role": "assistant", "content": "<relevant_indices>"}],
7373
temperature=0,

capabilities/retrieval_augmented_generation/guide.ipynb

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -360,7 +360,7 @@
360360
" Answer the question now, and avoid providing preamble such as 'Here is the answer', etc\n",
361361
" \"\"\"\n",
362362
" response = client.messages.create(\n",
363-
" model=\"claude-3-haiku-20240307\",\n",
363+
" model=\"claude-haiku-4-5\",\n",
364364
" max_tokens=2500,\n",
365365
" messages=[\n",
366366
" {\"role\": \"user\", \"content\": prompt}\n",
@@ -643,7 +643,7 @@
643643
" \n",
644644
" try:\n",
645645
" response = client.messages.create(\n",
646-
" model=\"claude-3-5-sonnet-20241022\",\n",
646+
" model=\"claude-sonnet-4-5\",\n",
647647
" max_tokens=1500,\n",
648648
" messages=[\n",
649649
" {\"role\": \"user\", \"content\": prompt},\n",
@@ -3264,7 +3264,7 @@
32643264
" \"\"\"\n",
32653265
"\n",
32663266
" response = client.messages.create(\n",
3267-
" model=\"claude-3-haiku-20240307\",\n",
3267+
" model=\"claude-haiku-4-5\",\n",
32683268
" max_tokens=150,\n",
32693269
" messages=[\n",
32703270
" {\"role\": \"user\", \"content\": prompt}\n",
@@ -3461,7 +3461,7 @@
34613461
" Answer the question now, and avoid providing preamble such as 'Here is the answer', etc\n",
34623462
" \"\"\"\n",
34633463
" response = client.messages.create(\n",
3464-
" model=\"claude-3-haiku-20240307\",\n",
3464+
" model=\"claude-haiku-4-5\",\n",
34653465
" max_tokens=2500,\n",
34663466
" messages=[\n",
34673467
" {\"role\": \"user\", \"content\": prompt}\n",
@@ -5975,7 +5975,7 @@
59755975
" \"\"\"\n",
59765976
" try:\n",
59775977
" response = client.messages.create(\n",
5978-
" model=\"claude-3-haiku-20240307\",\n",
5978+
" model=\"claude-haiku-4-5\",\n",
59795979
" max_tokens=50,\n",
59805980
" messages=[{\"role\": \"user\", \"content\": prompt}, {\"role\": \"assistant\", \"content\": \"<relevant_indices>\"}],\n",
59815981
" temperature=0,\n",
@@ -6044,7 +6044,7 @@
60446044
" Answer the question now, and avoid providing preamble such as 'Here is the answer', etc\n",
60456045
" \"\"\"\n",
60466046
" response = client.messages.create(\n",
6047-
" model=\"claude-3-haiku-20240307\",\n",
6047+
" model=\"claude-haiku-4-5\",\n",
60486048
" max_tokens=2500,\n",
60496049
" messages=[{\"role\": \"user\", \"content\": prompt}],\n",
60506050
" temperature=0\n",

capabilities/summarization/evaluation/custom_evals/llm_eval.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -53,7 +53,7 @@ def llm_eval(summary, input):
5353
Evaluation (JSON format):"""
5454

5555
response = client.messages.create(
56-
model="claude-3-5-sonnet-20241022",
56+
model="claude-sonnet-4-5",
5757
max_tokens=1000,
5858
temperature=0,
5959
messages=[

capabilities/summarization/evaluation/promptfooconfig.yaml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -6,12 +6,12 @@ prompts:
66
- prompts.py:summarize_long_document
77

88
providers:
9-
- id: anthropic:messages:claude-3-haiku-20240307
9+
- id: anthropic:messages:claude-haiku-4-5
1010
label: "3.0 Haiku"
1111
config:
1212
max_tokens: 4096
1313
temperature: 0
14-
- id: anthropic:messages:claude-3-5-sonnet-20241022
14+
- id: anthropic:messages:claude-sonnet-4-5
1515
label: "3.5 Sonnet"
1616
config:
1717
max_tokens: 4096

0 commit comments

Comments
 (0)