diff --git a/.doc_gen/metadata/bedrock-runtime_metadata.yaml b/.doc_gen/metadata/bedrock-runtime_metadata.yaml index f867b82cec4..761fe7b7c9b 100644 --- a/.doc_gen/metadata/bedrock-runtime_metadata.yaml +++ b/.doc_gen/metadata/bedrock-runtime_metadata.yaml @@ -1483,3 +1483,20 @@ bedrock-runtime_Scenario_GenerateVideos_NovaReel: - bedrock-runtime.java2.NovaReel.VideoGeneration services: bedrock-runtime: {StartAsyncInvoke, GetAsyncInvoke} + +bedrock-runtime_InvokeModel_TitanText: + title: Invoke Amazon Titan Text models on &BR; using the Invoke Model API + title_abbrev: "InvokeModel" + synopsis: send a text message to Amazon Titan Text, using the Invoke Model API. + category: Amazon Titan Text + languages: + Python: + versions: + - sdk_version: 3 + github: python/example_code/bedrock-runtime + excerpts: + - description: Use the Invoke Model API to send a text message. + snippet_tags: + - python.example_code.bedrock-runtime.InvokeModel_TitanText + services: + bedrock-runtime: {InvokeModel} diff --git a/python/example_code/bedrock-runtime/README.md b/python/example_code/bedrock-runtime/README.md index 7a74274ab38..f93adb8cf0a 100644 --- a/python/example_code/bedrock-runtime/README.md +++ b/python/example_code/bedrock-runtime/README.md @@ -64,6 +64,10 @@ functions within the same service. - [InvokeModel](models/amazon_titan_image_generator/invoke_model.py#L4) +### Amazon Titan Text + +- [InvokeModel](models/amazon_titan_text/invoke_model.py#L4) + ### Amazon Titan Text Embeddings - [InvokeModel](models/amazon_titan_text_embeddings/invoke_model.py#L4) diff --git a/python/example_code/bedrock-runtime/models/amazon_titan_text/invoke_model.py b/python/example_code/bedrock-runtime/models/amazon_titan_text/invoke_model.py new file mode 100644 index 00000000000..88c5b009524 --- /dev/null +++ b/python/example_code/bedrock-runtime/models/amazon_titan_text/invoke_model.py @@ -0,0 +1,48 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 + +# snippet-start:[python.example_code.bedrock-runtime.InvokeModel_TitanText] +# Use the native inference API to send a text message to Amazon Titan Text. + +import boto3 +import json + +from botocore.exceptions import ClientError + +# Create a Bedrock Runtime client in the AWS Region of your choice. +client = boto3.client("bedrock-runtime", region_name="us-east-1") + +# Set the model ID, e.g., Titan Text Premier. +model_id = "amazon.titan-text-premier-v1:0" + +# Define the prompt for the model. +prompt = "Describe the purpose of a 'hello world' program in one line." + +# Format the request payload using the model's native structure. +native_request = { + "inputText": prompt, + "textGenerationConfig": { + "maxTokenCount": 512, + "temperature": 0.5, + }, +} + +# Convert the native request to JSON. +request = json.dumps(native_request) + +try: + # Invoke the model with the request. + response = client.invoke_model(modelId=model_id, body=request) + +except (ClientError, Exception) as e: + print(f"ERROR: Can't invoke '{model_id}'. Reason: {e}") + exit(1) + +# Decode the response body. +model_response = json.loads(response["body"].read()) + +# Extract and print the response text. +response_text = model_response["results"][0]["outputText"] +print(response_text) + +# snippet-end:[python.example_code.bedrock-runtime.InvokeModel_TitanText]