diff --git a/dotnetv3/Bedrock-runtime/Models/Ai21LabsJurassic2/Converse/Converse.csproj b/dotnetv3/Bedrock-runtime/Models/Ai21LabsJurassic2/Converse/Converse.csproj index ae11f145bdc..8475494e76e 100644 --- a/dotnetv3/Bedrock-runtime/Models/Ai21LabsJurassic2/Converse/Converse.csproj +++ b/dotnetv3/Bedrock-runtime/Models/Ai21LabsJurassic2/Converse/Converse.csproj @@ -6,7 +6,7 @@ - - + + diff --git a/dotnetv3/Bedrock-runtime/Models/Ai21LabsJurassic2/InvokeModel/InvokeModel.csproj b/dotnetv3/Bedrock-runtime/Models/Ai21LabsJurassic2/InvokeModel/InvokeModel.csproj index c0b7c08d24e..bf2403af903 100644 --- a/dotnetv3/Bedrock-runtime/Models/Ai21LabsJurassic2/InvokeModel/InvokeModel.csproj +++ b/dotnetv3/Bedrock-runtime/Models/Ai21LabsJurassic2/InvokeModel/InvokeModel.csproj @@ -6,7 +6,7 @@ - - + + diff --git a/dotnetv3/Bedrock-runtime/Models/AmazonTitanText/Converse/Converse.csproj b/dotnetv3/Bedrock-runtime/Models/AmazonTitanText/Converse/Converse.csproj index a4fc72d7ec6..e505af96607 100644 --- a/dotnetv3/Bedrock-runtime/Models/AmazonTitanText/Converse/Converse.csproj +++ b/dotnetv3/Bedrock-runtime/Models/AmazonTitanText/Converse/Converse.csproj @@ -6,7 +6,7 @@ - - + + diff --git a/dotnetv3/Bedrock-runtime/Models/AmazonTitanText/ConverseStream/ConverseStream.csproj b/dotnetv3/Bedrock-runtime/Models/AmazonTitanText/ConverseStream/ConverseStream.csproj index 3c6145ac7b8..5752f31c880 100644 --- a/dotnetv3/Bedrock-runtime/Models/AmazonTitanText/ConverseStream/ConverseStream.csproj +++ b/dotnetv3/Bedrock-runtime/Models/AmazonTitanText/ConverseStream/ConverseStream.csproj @@ -6,7 +6,7 @@ - - + + \ No newline at end of file diff --git a/dotnetv3/Bedrock-runtime/Models/AmazonTitanText/InvokeModel/InvokeModel.csproj b/dotnetv3/Bedrock-runtime/Models/AmazonTitanText/InvokeModel/InvokeModel.csproj index 3c6145ac7b8..5752f31c880 100644 --- a/dotnetv3/Bedrock-runtime/Models/AmazonTitanText/InvokeModel/InvokeModel.csproj +++ b/dotnetv3/Bedrock-runtime/Models/AmazonTitanText/InvokeModel/InvokeModel.csproj @@ -6,7 +6,7 @@ - - + + \ No newline at end of file diff --git a/dotnetv3/Bedrock-runtime/Models/AmazonTitanText/InvokeModelWithResponseStream/InvokeModelWithResponseStream.csproj b/dotnetv3/Bedrock-runtime/Models/AmazonTitanText/InvokeModelWithResponseStream/InvokeModelWithResponseStream.csproj index 3c6145ac7b8..5752f31c880 100644 --- a/dotnetv3/Bedrock-runtime/Models/AmazonTitanText/InvokeModelWithResponseStream/InvokeModelWithResponseStream.csproj +++ b/dotnetv3/Bedrock-runtime/Models/AmazonTitanText/InvokeModelWithResponseStream/InvokeModelWithResponseStream.csproj @@ -6,7 +6,7 @@ - - + + \ No newline at end of file diff --git a/dotnetv3/Bedrock-runtime/Models/AnthropicClaude/Converse/Converse.csproj b/dotnetv3/Bedrock-runtime/Models/AnthropicClaude/Converse/Converse.csproj index 0c7f2907c39..7f752984648 100644 --- a/dotnetv3/Bedrock-runtime/Models/AnthropicClaude/Converse/Converse.csproj +++ b/dotnetv3/Bedrock-runtime/Models/AnthropicClaude/Converse/Converse.csproj @@ -6,7 +6,7 @@ - - + + diff --git a/dotnetv3/Bedrock-runtime/Models/AnthropicClaude/ConverseStream/ConverseStream.csproj b/dotnetv3/Bedrock-runtime/Models/AnthropicClaude/ConverseStream/ConverseStream.csproj index 4ae82a1a6aa..e4e6c3bb250 100644 --- a/dotnetv3/Bedrock-runtime/Models/AnthropicClaude/ConverseStream/ConverseStream.csproj +++ b/dotnetv3/Bedrock-runtime/Models/AnthropicClaude/ConverseStream/ConverseStream.csproj @@ -6,7 +6,7 @@ - - + + diff --git a/dotnetv3/Bedrock-runtime/Models/AnthropicClaude/InvokeModel/InvokeModel.csproj b/dotnetv3/Bedrock-runtime/Models/AnthropicClaude/InvokeModel/InvokeModel.csproj index 4ae82a1a6aa..e4e6c3bb250 100644 --- a/dotnetv3/Bedrock-runtime/Models/AnthropicClaude/InvokeModel/InvokeModel.csproj +++ b/dotnetv3/Bedrock-runtime/Models/AnthropicClaude/InvokeModel/InvokeModel.csproj @@ -6,7 +6,7 @@ - - + + diff --git a/dotnetv3/Bedrock-runtime/Models/AnthropicClaude/InvokeModelWithResponseStream/InvokeModelWithResponseStream.csproj b/dotnetv3/Bedrock-runtime/Models/AnthropicClaude/InvokeModelWithResponseStream/InvokeModelWithResponseStream.csproj index 245bcb22734..0daad35f8b9 100644 --- a/dotnetv3/Bedrock-runtime/Models/AnthropicClaude/InvokeModelWithResponseStream/InvokeModelWithResponseStream.csproj +++ b/dotnetv3/Bedrock-runtime/Models/AnthropicClaude/InvokeModelWithResponseStream/InvokeModelWithResponseStream.csproj @@ -6,7 +6,7 @@ - - + + \ No newline at end of file diff --git a/dotnetv3/Bedrock-runtime/Models/CohereCommand/Command_InvokeModel/Command_InvokeModel.csproj b/dotnetv3/Bedrock-runtime/Models/CohereCommand/Command_InvokeModel/Command_InvokeModel.csproj index f4f137f8d60..402f8c682cb 100644 --- a/dotnetv3/Bedrock-runtime/Models/CohereCommand/Command_InvokeModel/Command_InvokeModel.csproj +++ b/dotnetv3/Bedrock-runtime/Models/CohereCommand/Command_InvokeModel/Command_InvokeModel.csproj @@ -6,7 +6,7 @@ - - + + diff --git a/dotnetv3/Bedrock-runtime/Models/CohereCommand/Command_InvokeModelWithResponseStream/Command_InvokeModelWithResponseStream.csproj b/dotnetv3/Bedrock-runtime/Models/CohereCommand/Command_InvokeModelWithResponseStream/Command_InvokeModelWithResponseStream.csproj index f4f137f8d60..402f8c682cb 100644 --- a/dotnetv3/Bedrock-runtime/Models/CohereCommand/Command_InvokeModelWithResponseStream/Command_InvokeModelWithResponseStream.csproj +++ b/dotnetv3/Bedrock-runtime/Models/CohereCommand/Command_InvokeModelWithResponseStream/Command_InvokeModelWithResponseStream.csproj @@ -6,7 +6,7 @@ - - + + diff --git a/dotnetv3/Bedrock-runtime/Models/CohereCommand/Command_R_InvokeModel/Command_R_InvokeModel.csproj b/dotnetv3/Bedrock-runtime/Models/CohereCommand/Command_R_InvokeModel/Command_R_InvokeModel.csproj index f4f137f8d60..402f8c682cb 100644 --- a/dotnetv3/Bedrock-runtime/Models/CohereCommand/Command_R_InvokeModel/Command_R_InvokeModel.csproj +++ b/dotnetv3/Bedrock-runtime/Models/CohereCommand/Command_R_InvokeModel/Command_R_InvokeModel.csproj @@ -6,7 +6,7 @@ - - + + diff --git a/dotnetv3/Bedrock-runtime/Models/CohereCommand/Command_R_InvokeModelWithResponseStream/Command_R_InvokeModelWithResponseStream.csproj b/dotnetv3/Bedrock-runtime/Models/CohereCommand/Command_R_InvokeModelWithResponseStream/Command_R_InvokeModelWithResponseStream.csproj index f4f137f8d60..402f8c682cb 100644 --- a/dotnetv3/Bedrock-runtime/Models/CohereCommand/Command_R_InvokeModelWithResponseStream/Command_R_InvokeModelWithResponseStream.csproj +++ b/dotnetv3/Bedrock-runtime/Models/CohereCommand/Command_R_InvokeModelWithResponseStream/Command_R_InvokeModelWithResponseStream.csproj @@ -6,7 +6,7 @@ - - + + diff --git a/dotnetv3/Bedrock-runtime/Models/CohereCommand/Converse/Converse.csproj b/dotnetv3/Bedrock-runtime/Models/CohereCommand/Converse/Converse.csproj index f4f137f8d60..402f8c682cb 100644 --- a/dotnetv3/Bedrock-runtime/Models/CohereCommand/Converse/Converse.csproj +++ b/dotnetv3/Bedrock-runtime/Models/CohereCommand/Converse/Converse.csproj @@ -6,7 +6,7 @@ - - + + diff --git a/dotnetv3/Bedrock-runtime/Models/CohereCommand/ConverseStream/ConverseStream.csproj b/dotnetv3/Bedrock-runtime/Models/CohereCommand/ConverseStream/ConverseStream.csproj index f4f137f8d60..402f8c682cb 100644 --- a/dotnetv3/Bedrock-runtime/Models/CohereCommand/ConverseStream/ConverseStream.csproj +++ b/dotnetv3/Bedrock-runtime/Models/CohereCommand/ConverseStream/ConverseStream.csproj @@ -6,7 +6,7 @@ - - + + diff --git a/dotnetv3/Bedrock-runtime/Models/MetaLlama/Converse/Converse.csproj b/dotnetv3/Bedrock-runtime/Models/MetaLlama/Converse/Converse.csproj index 0a9d0111985..f91317c7fa6 100644 --- a/dotnetv3/Bedrock-runtime/Models/MetaLlama/Converse/Converse.csproj +++ b/dotnetv3/Bedrock-runtime/Models/MetaLlama/Converse/Converse.csproj @@ -6,7 +6,7 @@ - - + + diff --git a/dotnetv3/Bedrock-runtime/Models/MetaLlama/ConverseStream/ConverseStream.csproj b/dotnetv3/Bedrock-runtime/Models/MetaLlama/ConverseStream/ConverseStream.csproj index 0a9d0111985..f91317c7fa6 100644 --- a/dotnetv3/Bedrock-runtime/Models/MetaLlama/ConverseStream/ConverseStream.csproj +++ b/dotnetv3/Bedrock-runtime/Models/MetaLlama/ConverseStream/ConverseStream.csproj @@ -6,7 +6,7 @@ - - + + diff --git a/dotnetv3/Bedrock-runtime/Models/MetaLlama/Llama2_InvokeModel/Llama2_InvokeModel.csproj b/dotnetv3/Bedrock-runtime/Models/MetaLlama/Llama2_InvokeModel/Llama2_InvokeModel.csproj index 0a9d0111985..f91317c7fa6 100644 --- a/dotnetv3/Bedrock-runtime/Models/MetaLlama/Llama2_InvokeModel/Llama2_InvokeModel.csproj +++ b/dotnetv3/Bedrock-runtime/Models/MetaLlama/Llama2_InvokeModel/Llama2_InvokeModel.csproj @@ -6,7 +6,7 @@ - - + + diff --git a/dotnetv3/Bedrock-runtime/Models/MetaLlama/Llama2_InvokeModelWithResponseStream/Llama2_InvokeModelWithResponseStream.csproj b/dotnetv3/Bedrock-runtime/Models/MetaLlama/Llama2_InvokeModelWithResponseStream/Llama2_InvokeModelWithResponseStream.csproj index 0a9d0111985..f91317c7fa6 100644 --- a/dotnetv3/Bedrock-runtime/Models/MetaLlama/Llama2_InvokeModelWithResponseStream/Llama2_InvokeModelWithResponseStream.csproj +++ b/dotnetv3/Bedrock-runtime/Models/MetaLlama/Llama2_InvokeModelWithResponseStream/Llama2_InvokeModelWithResponseStream.csproj @@ -6,7 +6,7 @@ - - + + diff --git a/dotnetv3/Bedrock-runtime/Models/MetaLlama/Llama3_InvokeModel/InvokeModel.cs b/dotnetv3/Bedrock-runtime/Models/MetaLlama/Llama3_InvokeModel/InvokeModel.cs index 9b876164f1c..50d6550e72c 100644 --- a/dotnetv3/Bedrock-runtime/Models/MetaLlama/Llama3_InvokeModel/InvokeModel.cs +++ b/dotnetv3/Bedrock-runtime/Models/MetaLlama/Llama3_InvokeModel/InvokeModel.cs @@ -13,18 +13,17 @@ using Amazon.BedrockRuntime.Model; // Create a Bedrock Runtime client in the AWS Region you want to use. -var client = new AmazonBedrockRuntimeClient(RegionEndpoint.USEast1); +var client = new AmazonBedrockRuntimeClient(RegionEndpoint.USWest2); -// Set the model ID, e.g., Llama 3 8b Instruct. -var modelId = "meta.llama3-8b-instruct-v1:0"; +// Set the model ID, e.g., Llama 3 70b Instruct. +var modelId = "meta.llama3-70b-instruct-v1:0"; // Define the prompt for the model. var prompt = "Describe the purpose of a 'hello world' program in one line."; // Embed the prompt in Llama 2's instruction format. var formattedPrompt = $@" -<|begin_of_text|> -<|start_header_id|>user<|end_header_id|> +<|begin_of_text|><|start_header_id|>user<|end_header_id|> {prompt} <|eot_id|> <|start_header_id|>assistant<|end_header_id|> diff --git a/dotnetv3/Bedrock-runtime/Models/MetaLlama/Llama3_InvokeModel/Llama3_InvokeModel.csproj b/dotnetv3/Bedrock-runtime/Models/MetaLlama/Llama3_InvokeModel/Llama3_InvokeModel.csproj index 0a9d0111985..f91317c7fa6 100644 --- a/dotnetv3/Bedrock-runtime/Models/MetaLlama/Llama3_InvokeModel/Llama3_InvokeModel.csproj +++ b/dotnetv3/Bedrock-runtime/Models/MetaLlama/Llama3_InvokeModel/Llama3_InvokeModel.csproj @@ -6,7 +6,7 @@ - - + + diff --git a/dotnetv3/Bedrock-runtime/Models/MetaLlama/Llama3_InvokeModelWithResponseStream/InvokeModelWithResponseStream.cs b/dotnetv3/Bedrock-runtime/Models/MetaLlama/Llama3_InvokeModelWithResponseStream/InvokeModelWithResponseStream.cs index cb376732cf4..e3c85e2dffb 100644 --- a/dotnetv3/Bedrock-runtime/Models/MetaLlama/Llama3_InvokeModelWithResponseStream/InvokeModelWithResponseStream.cs +++ b/dotnetv3/Bedrock-runtime/Models/MetaLlama/Llama3_InvokeModelWithResponseStream/InvokeModelWithResponseStream.cs @@ -14,18 +14,17 @@ using Amazon.BedrockRuntime.Model; // Create a Bedrock Runtime client in the AWS Region you want to use. -var client = new AmazonBedrockRuntimeClient(RegionEndpoint.USEast1); +var client = new AmazonBedrockRuntimeClient(RegionEndpoint.USWest2); -// Set the model ID, e.g., Llama 3 8b Instruct. -var modelId = "meta.llama3-8b-instruct-v1:0"; +// Set the model ID, e.g., Llama 3 70b Instruct. +var modelId = "meta.llama3-70b-instruct-v1:0"; // Define the prompt for the model. var prompt = "Describe the purpose of a 'hello world' program in one line."; // Embed the prompt in Llama 2's instruction format. var formattedPrompt = $@" -<|begin_of_text|> -<|start_header_id|>user<|end_header_id|> +<|begin_of_text|><|start_header_id|>user<|end_header_id|> {prompt} <|eot_id|> <|start_header_id|>assistant<|end_header_id|> diff --git a/dotnetv3/Bedrock-runtime/Models/MetaLlama/Llama3_InvokeModelWithResponseStream/Llama3_InvokeModelWithResponseStream.csproj b/dotnetv3/Bedrock-runtime/Models/MetaLlama/Llama3_InvokeModelWithResponseStream/Llama3_InvokeModelWithResponseStream.csproj index 0a9d0111985..f91317c7fa6 100644 --- a/dotnetv3/Bedrock-runtime/Models/MetaLlama/Llama3_InvokeModelWithResponseStream/Llama3_InvokeModelWithResponseStream.csproj +++ b/dotnetv3/Bedrock-runtime/Models/MetaLlama/Llama3_InvokeModelWithResponseStream/Llama3_InvokeModelWithResponseStream.csproj @@ -6,7 +6,7 @@ - - + + diff --git a/dotnetv3/Bedrock-runtime/Models/Mistral/Converse/Converse.csproj b/dotnetv3/Bedrock-runtime/Models/Mistral/Converse/Converse.csproj index a84aacecb44..27e936ccbc6 100644 --- a/dotnetv3/Bedrock-runtime/Models/Mistral/Converse/Converse.csproj +++ b/dotnetv3/Bedrock-runtime/Models/Mistral/Converse/Converse.csproj @@ -6,7 +6,7 @@ - - + + diff --git a/dotnetv3/Bedrock-runtime/Models/Mistral/ConverseStream/ConverseStream.csproj b/dotnetv3/Bedrock-runtime/Models/Mistral/ConverseStream/ConverseStream.csproj index a28f64365c1..8297baab449 100644 --- a/dotnetv3/Bedrock-runtime/Models/Mistral/ConverseStream/ConverseStream.csproj +++ b/dotnetv3/Bedrock-runtime/Models/Mistral/ConverseStream/ConverseStream.csproj @@ -6,7 +6,7 @@ - - + + diff --git a/dotnetv3/Bedrock-runtime/Models/Mistral/InvokeModel/InvokeModel.csproj b/dotnetv3/Bedrock-runtime/Models/Mistral/InvokeModel/InvokeModel.csproj index a28f64365c1..8297baab449 100644 --- a/dotnetv3/Bedrock-runtime/Models/Mistral/InvokeModel/InvokeModel.csproj +++ b/dotnetv3/Bedrock-runtime/Models/Mistral/InvokeModel/InvokeModel.csproj @@ -6,7 +6,7 @@ - - + + diff --git a/dotnetv3/Bedrock-runtime/Models/Mistral/InvokeModelWithResponseStream/InvokeModelWithResponseStream.csproj b/dotnetv3/Bedrock-runtime/Models/Mistral/InvokeModelWithResponseStream/InvokeModelWithResponseStream.csproj index a28f64365c1..8297baab449 100644 --- a/dotnetv3/Bedrock-runtime/Models/Mistral/InvokeModelWithResponseStream/InvokeModelWithResponseStream.csproj +++ b/dotnetv3/Bedrock-runtime/Models/Mistral/InvokeModelWithResponseStream/InvokeModelWithResponseStream.csproj @@ -6,7 +6,7 @@ - - + + diff --git a/dotnetv3/Bedrock-runtime/Tests/BedrockRuntimeTests.csproj b/dotnetv3/Bedrock-runtime/Tests/BedrockRuntimeTests.csproj index eb69cc3454e..71853be681c 100644 --- a/dotnetv3/Bedrock-runtime/Tests/BedrockRuntimeTests.csproj +++ b/dotnetv3/Bedrock-runtime/Tests/BedrockRuntimeTests.csproj @@ -11,12 +11,12 @@ - - - - + + + + - + runtime; build; native; contentfiles; analyzers; buildtransitive all diff --git a/javascriptv3/example_code/bedrock-runtime/models/metaLlama/llama3/invoke_model_quickstart.js b/javascriptv3/example_code/bedrock-runtime/models/metaLlama/llama3/invoke_model_quickstart.js index 5b5abc3a840..db6df81bcc9 100644 --- a/javascriptv3/example_code/bedrock-runtime/models/metaLlama/llama3/invoke_model_quickstart.js +++ b/javascriptv3/example_code/bedrock-runtime/models/metaLlama/llama3/invoke_model_quickstart.js @@ -12,8 +12,8 @@ import { // Create a Bedrock Runtime client in the AWS Region of your choice. const client = new BedrockRuntimeClient({ region: "us-west-2" }); -// Set the model ID, e.g., Llama 3 8B Instruct. -const modelId = "meta.llama3-8b-instruct-v1:0"; +// Set the model ID, e.g., Llama 3 70B Instruct. +const modelId = "meta.llama3-70b-instruct-v1:0"; // Define the user message to send. const userMessage = @@ -21,8 +21,7 @@ const userMessage = // Embed the message in Llama 3's prompt format. const prompt = ` -<|begin_of_text|> -<|start_header_id|>user<|end_header_id|> +<|begin_of_text|><|start_header_id|>user<|end_header_id|> ${userMessage} <|eot_id|> <|start_header_id|>assistant<|end_header_id|> diff --git a/javascriptv3/example_code/bedrock-runtime/models/metaLlama/llama3/invoke_model_with_response_stream_quickstart.js b/javascriptv3/example_code/bedrock-runtime/models/metaLlama/llama3/invoke_model_with_response_stream_quickstart.js index daa55bb7c21..4c26fb036e9 100644 --- a/javascriptv3/example_code/bedrock-runtime/models/metaLlama/llama3/invoke_model_with_response_stream_quickstart.js +++ b/javascriptv3/example_code/bedrock-runtime/models/metaLlama/llama3/invoke_model_with_response_stream_quickstart.js @@ -12,8 +12,8 @@ import { // Create a Bedrock Runtime client in the AWS Region of your choice. const client = new BedrockRuntimeClient({ region: "us-west-2" }); -// Set the model ID, e.g., Llama 3 8B Instruct. -const modelId = "meta.llama3-8b-instruct-v1:0"; +// Set the model ID, e.g., Llama 3 70B Instruct. +const modelId = "meta.llama3-70b-instruct-v1:0"; // Define the user message to send. const userMessage = @@ -21,8 +21,7 @@ const userMessage = // Embed the message in Llama 3's prompt format. const prompt = ` -<|begin_of_text|> -<|start_header_id|>user<|end_header_id|> +<|begin_of_text|><|start_header_id|>user<|end_header_id|> ${userMessage} <|eot_id|> <|start_header_id|>assistant<|end_header_id|> diff --git a/javascriptv3/example_code/bedrock-runtime/package.json b/javascriptv3/example_code/bedrock-runtime/package.json index c75812840bb..25e81ad8de2 100644 --- a/javascriptv3/example_code/bedrock-runtime/package.json +++ b/javascriptv3/example_code/bedrock-runtime/package.json @@ -11,6 +11,6 @@ "vitest": "^1.6.0" }, "dependencies": { - "@aws-sdk/client-bedrock-runtime": "^3.587.0" + "@aws-sdk/client-bedrock-runtime": "^3.658.1" } } diff --git a/javav2/example_code/bedrock-runtime/pom.xml b/javav2/example_code/bedrock-runtime/pom.xml index 578285e8f6b..353bd77a0c7 100644 --- a/javav2/example_code/bedrock-runtime/pom.xml +++ b/javav2/example_code/bedrock-runtime/pom.xml @@ -30,7 +30,7 @@ software.amazon.awssdk bom - 2.25.67 + 2.28.10 pom import diff --git a/javav2/example_code/bedrock-runtime/src/main/java/com/example/bedrockruntime/models/metaLlama/Llama3_InvokeModel.java b/javav2/example_code/bedrock-runtime/src/main/java/com/example/bedrockruntime/models/metaLlama/Llama3_InvokeModel.java index f1cade1a6e1..a50e3433636 100644 --- a/javav2/example_code/bedrock-runtime/src/main/java/com/example/bedrockruntime/models/metaLlama/Llama3_InvokeModel.java +++ b/javav2/example_code/bedrock-runtime/src/main/java/com/example/bedrockruntime/models/metaLlama/Llama3_InvokeModel.java @@ -22,11 +22,11 @@ public static String invokeModel() { // Replace the DefaultCredentialsProvider with your preferred credentials provider. var client = BedrockRuntimeClient.builder() .credentialsProvider(DefaultCredentialsProvider.create()) - .region(Region.US_EAST_1) + .region(Region.US_WEST_2) .build(); - // Set the model ID, e.g., Llama 3 8b Instruct. - var modelId = "meta.llama3-8b-instruct-v1:0"; + // Set the model ID, e.g., Llama 3 70b Instruct. + var modelId = "meta.llama3-70b-instruct-v1:0"; // The InvokeModel API uses the model's native payload. // Learn more about the available inference parameters and response fields at: @@ -38,8 +38,7 @@ public static String invokeModel() { // Embed the prompt in Llama 3's instruction format. var instruction = ( - "<|begin_of_text|>\\n" + - "<|start_header_id|>user<|end_header_id|>\\n" + + "<|begin_of_text|><|start_header_id|>user<|end_header_id|>\\n" + "{{prompt}} <|eot_id|>\\n" + "<|start_header_id|>assistant<|end_header_id|>\\n" ).replace("{{prompt}}", prompt); diff --git a/javav2/example_code/bedrock-runtime/src/main/java/com/example/bedrockruntime/models/metaLlama/Llama3_InvokeModelWithResponseStream.java b/javav2/example_code/bedrock-runtime/src/main/java/com/example/bedrockruntime/models/metaLlama/Llama3_InvokeModelWithResponseStream.java index a5d16c3e9f7..e0b6bf5ce87 100644 --- a/javav2/example_code/bedrock-runtime/src/main/java/com/example/bedrockruntime/models/metaLlama/Llama3_InvokeModelWithResponseStream.java +++ b/javav2/example_code/bedrock-runtime/src/main/java/com/example/bedrockruntime/models/metaLlama/Llama3_InvokeModelWithResponseStream.java @@ -22,17 +22,17 @@ public class Llama3_InvokeModelWithResponseStream { - public static String invokeModelWithResponseStream() throws ExecutionException, InterruptedException { + public static String invokeModelWithResponseStream() { // Create a Bedrock Runtime client in the AWS Region you want to use. // Replace the DefaultCredentialsProvider with your preferred credentials provider. var client = BedrockRuntimeAsyncClient.builder() .credentialsProvider(DefaultCredentialsProvider.create()) - .region(Region.US_EAST_1) + .region(Region.US_WEST_2) .build(); - // Set the model ID, e.g., Llama 3 8b Instruct. - var modelId = "meta.llama3-8b-instruct-v1:0"; + // Set the model ID, e.g., Llama 3 70b Instruct. + var modelId = "meta.llama3-70b-instruct-v1:0"; // The InvokeModelWithResponseStream API uses the model's native payload. // Learn more about the available inference parameters and response fields at: @@ -44,8 +44,7 @@ public static String invokeModelWithResponseStream() throws ExecutionException, // Embed the prompt in Llama 3's instruction format. var instruction = ( - "<|begin_of_text|>\\n" + - "<|start_header_id|>user<|end_header_id|>\\n" + + "<|begin_of_text|><|start_header_id|>user<|end_header_id|>\\n" + "{{prompt}} <|eot_id|>\\n" + "<|start_header_id|>assistant<|end_header_id|>\\n" ).replace("{{prompt}}", prompt); diff --git a/python/example_code/bedrock-runtime/models/meta_llama/llama3_invoke_model.py b/python/example_code/bedrock-runtime/models/meta_llama/llama3_invoke_model.py index 86c5f996243..ae9022394a3 100644 --- a/python/example_code/bedrock-runtime/models/meta_llama/llama3_invoke_model.py +++ b/python/example_code/bedrock-runtime/models/meta_llama/llama3_invoke_model.py @@ -10,18 +10,17 @@ from botocore.exceptions import ClientError # Create a Bedrock Runtime client in the AWS Region of your choice. -client = boto3.client("bedrock-runtime", region_name="us-east-1") +client = boto3.client("bedrock-runtime", region_name="us-west-2") -# Set the model ID, e.g., Llama 3 8b Instruct. -model_id = "meta.llama3-8b-instruct-v1:0" +# Set the model ID, e.g., Llama 3 70b Instruct. +model_id = "meta.llama3-70b-instruct-v1:0" # Define the prompt for the model. prompt = "Describe the purpose of a 'hello world' program in one line." # Embed the prompt in Llama 3's instruction format. formatted_prompt = f""" -<|begin_of_text|> -<|start_header_id|>user<|end_header_id|> +<|begin_of_text|><|start_header_id|>user<|end_header_id|> {prompt} <|eot_id|> <|start_header_id|>assistant<|end_header_id|> diff --git a/python/example_code/bedrock-runtime/models/meta_llama/llama3_invoke_model_with_response_stream.py b/python/example_code/bedrock-runtime/models/meta_llama/llama3_invoke_model_with_response_stream.py index 5a7eb34cbba..d2089825f0a 100644 --- a/python/example_code/bedrock-runtime/models/meta_llama/llama3_invoke_model_with_response_stream.py +++ b/python/example_code/bedrock-runtime/models/meta_llama/llama3_invoke_model_with_response_stream.py @@ -11,18 +11,17 @@ from botocore.exceptions import ClientError # Create a Bedrock Runtime client in the AWS Region of your choice. -client = boto3.client("bedrock-runtime", region_name="us-east-1") +client = boto3.client("bedrock-runtime", region_name="us-west-2") -# Set the model ID, e.g., Llama 3 8b Instruct. -model_id = "meta.llama3-8b-instruct-v1:0" +# Set the model ID, e.g., Llama 3 70b Instruct. +model_id = "meta.llama3-70b-instruct-v1:0" # Define the prompt for the model. prompt = "Describe the purpose of a 'hello world' program in one line." # Embed the prompt in Llama 3's instruction format. formatted_prompt = f""" -<|begin_of_text|> -<|start_header_id|>user<|end_header_id|> +<|begin_of_text|><|start_header_id|>user<|end_header_id|> {prompt} <|eot_id|> <|start_header_id|>assistant<|end_header_id|> diff --git a/python/example_code/bedrock-runtime/requirements.txt b/python/example_code/bedrock-runtime/requirements.txt index 867b6154895..e084f5d893a 100644 --- a/python/example_code/bedrock-runtime/requirements.txt +++ b/python/example_code/bedrock-runtime/requirements.txt @@ -1,34 +1,35 @@ beautifulsoup4==4.12.3 -boto3==1.34.127 -botocore==1.34.127 -certifi==2024.6.2 +boto3==1.35.28 +botocore==1.35.28 +certifi==2024.8.30 charset-normalizer==3.3.2 colorama==0.4.6 -contourpy==1.2.1 +contourpy==1.3.0 cycler==0.12.1 -fonttools==4.53.0 +fonttools==4.54.1 geojson==3.1.0 -idna==3.7 +idna==3.10 iniconfig==2.0.0 jmespath==1.0.1 -kiwisolver==1.4.5 -lxml==5.2.2 -matplotlib==3.9.0 -numpy==1.26.4 +kiwisolver==1.4.7 +lxml==5.3.0 +matplotlib==3.9.2 +numpy==2.1.1 packaging==24.1 -pandas==2.2.2 -pillow==10.3.0 +pandas==2.2.3 +pillow==10.4.0 +pip-review==1.3.0 pluggy==1.5.0 -pyparsing==3.1.2 -pytest==8.2.2 -pytest-asyncio==0.23.7 +pyparsing==3.1.4 +pytest==8.3.3 +pytest-asyncio==0.24.0 python-dateutil==2.9.0.post0 -pytz==2024.1 +pytz==2024.2 requests==2.32.3 -s3transfer==0.10.1 +s3transfer==0.10.2 six==1.16.0 -soupsieve==2.5 -tzdata==2024.1 +soupsieve==2.6 +tzdata==2024.2 ujson==5.10.0 -urllib3==2.2.1 -xarray==2024.5.0 +urllib3==2.2.3 +xarray==2024.9.0