diff --git a/applications/feedback_sentiment_analyzer/SPECIFICATION.md b/applications/feedback_sentiment_analyzer/SPECIFICATION.md
index eed56cba22d..ef0ea8c7276 100644
--- a/applications/feedback_sentiment_analyzer/SPECIFICATION.md
+++ b/applications/feedback_sentiment_analyzer/SPECIFICATION.md
@@ -201,7 +201,7 @@ For example:
```json
{
- "bucket": "DOC-EXAMPLE-BUCKET",
+ "bucket": "amzn-s3-demo-bucket",
"region": "us-east-1",
"object": "obj/ect.png"
}
@@ -295,7 +295,7 @@ For example:
```json
{
- "bucket": "DOC-EXAMPLE-BUCKET",
+ "bucket": "amzn-s3-demo-bucket",
"translated_text": "THIS HOTEL WAS GREAT",
"region": "us-east-1",
"object": "comment.png"
diff --git a/aws-cli/bash-linux/s3/s3_getting_started.sh b/aws-cli/bash-linux/s3/s3_getting_started.sh
index c85cc61ae38..b83a565ac58 100755
--- a/aws-cli/bash-linux/s3/s3_getting_started.sh
+++ b/aws-cli/bash-linux/s3/s3_getting_started.sh
@@ -52,7 +52,7 @@ function s3_getting_started() {
echo_repeat "*" 88
local bucket_name
- bucket_name=$(generate_random_name "doc-example-bucket")
+ bucket_name=$(generate_random_name "amzn-s3-demo-bucket")
local region_code
region_code=$(aws configure get region)
diff --git a/cpp/example_code/iam/tests/mock_input/ListBuckets.xml b/cpp/example_code/iam/tests/mock_input/ListBuckets.xml
index fb80d336235..d608223d3e8 100644
--- a/cpp/example_code/iam/tests/mock_input/ListBuckets.xml
+++ b/cpp/example_code/iam/tests/mock_input/ListBuckets.xml
@@ -2,11 +2,11 @@
2019-12-11T23:32:47+00:00
- DOC-EXAMPLE-BUCKET
+ amzn-s3-demo-bucket
2019-11-10T23:32:13+00:00
- DOC-EXAMPLE-BUCKET2
+ amzn-s3-demo-bucket2
diff --git a/cpp/example_code/s3/create_bucket.cpp b/cpp/example_code/s3/create_bucket.cpp
index 23be332bb2e..cf51221b36a 100644
--- a/cpp/example_code/s3/create_bucket.cpp
+++ b/cpp/example_code/s3/create_bucket.cpp
@@ -77,9 +77,9 @@ int main() {
// Create a unique bucket name to increase the chance of success
// when trying to create the bucket.
- // Format: "doc-example-bucket-" + lowercase UUID.
+ // Format: "amzn-s3-demo-bucket-" + lowercase UUID.
Aws::String uuid = Aws::Utils::UUID::RandomUUID();
- Aws::String bucketName = "doc-example-bucket-" +
+ Aws::String bucketName = "amzn-s3-demo-bucket-" +
Aws::Utils::StringUtils::ToLower(uuid.c_str());
AwsDoc::S3::createBucket(bucketName, clientConfig);
diff --git a/cpp/example_code/s3/list_objects_with_aws_global_region.cpp b/cpp/example_code/s3/list_objects_with_aws_global_region.cpp
index dc5e87d8306..e57a5324205 100644
--- a/cpp/example_code/s3/list_objects_with_aws_global_region.cpp
+++ b/cpp/example_code/s3/list_objects_with_aws_global_region.cpp
@@ -45,7 +45,7 @@ static const int MAX_TIMEOUT_RETRIES = 20;
static Aws::String createOneBucket(const Aws::S3::S3Client &s3Client) {
// Create an S3 bucket within the us-west-2 AWS Region.
Aws::String uuid = Aws::Utils::UUID::RandomUUID();
- Aws::String bucketName = "doc-example-bucket-" +
+ Aws::String bucketName = "amzn-s3-demo-bucket-" +
Aws::Utils::StringUtils::ToLower(uuid.c_str());
Aws::S3::Model::CreateBucketRequest createBucketRequest;
@@ -203,4 +203,4 @@ int main() {
return 0;
}
-#endif // EXCLUDE_MAIN_FUNCTION
\ No newline at end of file
+#endif // EXCLUDE_MAIN_FUNCTION
diff --git a/cpp/example_code/s3/s3_getting_started_scenario.cpp b/cpp/example_code/s3/s3_getting_started_scenario.cpp
index 2e670c59b35..05b6ca9b03e 100644
--- a/cpp/example_code/s3/s3_getting_started_scenario.cpp
+++ b/cpp/example_code/s3/s3_getting_started_scenario.cpp
@@ -81,9 +81,9 @@ bool AwsDoc::S3::S3_GettingStartedScenario(const Aws::String &uploadFilePath,
Aws::S3::S3Client client(clientConfig);
// Create a unique bucket name which is only temporary and will be deleted.
- // Format: "doc-example-bucket-" + lowercase UUID.
+ // Format: "amzn-s3-demo-bucket-" + lowercase UUID.
Aws::String uuid = Aws::Utils::UUID::RandomUUID();
- Aws::String bucketName = "doc-example-bucket-" +
+ Aws::String bucketName = "amzn-s3-demo-bucket-" +
Aws::Utils::StringUtils::ToLower(uuid.c_str());
// 1. Create a bucket.
diff --git a/cpp/example_code/s3/tests/S3_GTests.cpp b/cpp/example_code/s3/tests/S3_GTests.cpp
index ada6dc9af0b..8eb71b03d5b 100644
--- a/cpp/example_code/s3/tests/S3_GTests.cpp
+++ b/cpp/example_code/s3/tests/S3_GTests.cpp
@@ -89,7 +89,7 @@ void AwsDocTest::S3_GTests::TearDownTestSuite() {
std::vector AwsDocTest::S3_GTests::GetCachedS3Buckets(size_t count) {
for (size_t index = s_cachedS3Buckets.size(); index < count; ++index) {
Aws::String uuid = Aws::Utils::UUID::RandomUUID();
- Aws::String bucketName = "doc-example-bucket-" +
+ Aws::String bucketName = "amzn-s3-demo-bucket-" +
Aws::Utils::StringUtils::ToLower(uuid.c_str());
if (CreateBucket(bucketName)) {
diff --git a/cpp/example_code/s3/tests/gtest_create_bucket.cpp b/cpp/example_code/s3/tests/gtest_create_bucket.cpp
index b0729d1ac37..c468f88cf20 100644
--- a/cpp/example_code/s3/tests/gtest_create_bucket.cpp
+++ b/cpp/example_code/s3/tests/gtest_create_bucket.cpp
@@ -19,7 +19,7 @@ namespace AwsDocTest {
// NOLINTNEXTLINE(readability-named-parameter)
TEST_F(S3_GTests, create_bucket_2_) {
Aws::String uuid = Aws::Utils::UUID::RandomUUID();
- Aws::String bucketName = "doc-example-bucket-" +
+ Aws::String bucketName = "amzn-s3-demo-bucket-" +
Aws::Utils::StringUtils::ToLower(uuid.c_str());
bool result = AwsDoc::S3::createBucket(bucketName, *s_clientConfig);
diff --git a/cpp/example_code/s3/tests/gtest_delete_bucket.cpp b/cpp/example_code/s3/tests/gtest_delete_bucket.cpp
index d848cbf0a79..5ca455fbb21 100644
--- a/cpp/example_code/s3/tests/gtest_delete_bucket.cpp
+++ b/cpp/example_code/s3/tests/gtest_delete_bucket.cpp
@@ -19,7 +19,7 @@ namespace AwsDocTest {
// NOLINTNEXTLINE(readability-named-parameter)
TEST_F(S3_GTests, delete_bucket_2_) {
Aws::String uuid = Aws::Utils::UUID::RandomUUID();
- Aws::String bucketName = "doc-example-bucket-" +
+ Aws::String bucketName = "amzn-s3-demo-bucket-" +
Aws::Utils::StringUtils::ToLower(uuid.c_str());
diff --git a/cpp/example_code/s3/tests/gtest_put_bucket_policy.cpp b/cpp/example_code/s3/tests/gtest_put_bucket_policy.cpp
index 445abb87b0c..dcd89dda199 100644
--- a/cpp/example_code/s3/tests/gtest_put_bucket_policy.cpp
+++ b/cpp/example_code/s3/tests/gtest_put_bucket_policy.cpp
@@ -47,12 +47,12 @@ namespace AwsDocTest {
"AWS": "arn:aws:iam::111111222222:user/UnitTester"
},
"Action": "s3:GetObject",
- "Resource": "arn:aws:s3:::doc-example-bucket/*"
+ "Resource": "arn:aws:s3:::amzn-s3-demo-bucket/*"
}
]
})";
- result = AwsDoc::S3::putBucketPolicy("doc-example-bucket", policyString, *s_clientConfig);
+ result = AwsDoc::S3::putBucketPolicy("amzn-s3-demo-bucket", policyString, *s_clientConfig);
ASSERT_TRUE(result);
}
} // namespace AwsDocTest
diff --git a/cpp/example_code/s3/tests/gtest_s3_demo_for_cloud9.cpp b/cpp/example_code/s3/tests/gtest_s3_demo_for_cloud9.cpp
index 07d07b516fc..a7946cd4894 100644
--- a/cpp/example_code/s3/tests/gtest_s3_demo_for_cloud9.cpp
+++ b/cpp/example_code/s3/tests/gtest_s3_demo_for_cloud9.cpp
@@ -11,7 +11,7 @@ namespace AwsDocTest {
// NOLINTNEXTLINE(readability-named-parameter)
TEST_F(S3_GTests, s3_demo_for_cloud9) {
Aws::String uuid = Aws::Utils::UUID::RandomUUID();
- Aws::String bucketName = "doc-example-bucket-" +
+ Aws::String bucketName = "amzn-s3-demo-bucket-" +
Aws::Utils::StringUtils::ToLower(uuid.c_str());
Aws::S3::S3Client s3Client(*s_clientConfig);
diff --git a/cpp/example_code/s3/tests/mock_input/GetBucketPolicy.json b/cpp/example_code/s3/tests/mock_input/GetBucketPolicy.json
index 1b0d7b59c88..1dc04ae6339 100644
--- a/cpp/example_code/s3/tests/mock_input/GetBucketPolicy.json
+++ b/cpp/example_code/s3/tests/mock_input/GetBucketPolicy.json
@@ -8,7 +8,7 @@
"AWS": "arn:aws:iam::111111222222:user/UnitTester"
},
"Action": "s3:GetObject",
- "Resource": "arn:aws:s3:::doc-example-bucket/*"
+ "Resource": "arn:aws:s3:::amzn-s3-demo-bucket/*"
}
]
-}
\ No newline at end of file
+}
diff --git a/dotnetv3/CloudWatchLogs/CreateExportTaskExample/CreateExportTask.cs b/dotnetv3/CloudWatchLogs/CreateExportTaskExample/CreateExportTask.cs
index f093c31e4f5..5fe8753c94f 100644
--- a/dotnetv3/CloudWatchLogs/CreateExportTaskExample/CreateExportTask.cs
+++ b/dotnetv3/CloudWatchLogs/CreateExportTaskExample/CreateExportTask.cs
@@ -25,7 +25,7 @@ public static async Task Main()
var client = new AmazonCloudWatchLogsClient();
string taskName = "export-task-example";
string logGroupName = "cloudwatchlogs-example-loggroup";
- string destination = "doc-example-bucket";
+ string destination = "amzn-s3-demo-bucket";
var fromTime = 1437584472382;
var toTime = 1437584472833;
diff --git a/dotnetv3/EC2/VirtualPrivateCloudExamples/CreateVPCTests/testsettings.json b/dotnetv3/EC2/VirtualPrivateCloudExamples/CreateVPCTests/testsettings.json
index cf113cc02ff..2972957e0a4 100644
--- a/dotnetv3/EC2/VirtualPrivateCloudExamples/CreateVPCTests/testsettings.json
+++ b/dotnetv3/EC2/VirtualPrivateCloudExamples/CreateVPCTests/testsettings.json
@@ -1,5 +1,5 @@
{
- "BucketName": "DOC-EXAMPLE-BUCKET1",
+ "BucketName": "amzn-s3-demo-bucket1",
"VpcId": "vpc-1a2b3c4d",
"SubnetId": "subnet-012345678912345606",
"SecurityGroupId": "sg-012345678912345606",
diff --git a/dotnetv3/EC2/VirtualPrivateCloudExamples/CreateVPCforS3Example/appsettings.json b/dotnetv3/EC2/VirtualPrivateCloudExamples/CreateVPCforS3Example/appsettings.json
index d18f43a367b..1216ee2ffce 100644
--- a/dotnetv3/EC2/VirtualPrivateCloudExamples/CreateVPCforS3Example/appsettings.json
+++ b/dotnetv3/EC2/VirtualPrivateCloudExamples/CreateVPCforS3Example/appsettings.json
@@ -1,5 +1,5 @@
{
- "BucketName": "DOC-EXAMPLE-BUCKET1",
+ "BucketName": "amzn-s3-demo-bucket1",
"VpcId": "vpc-1a2b3c4d",
"SubnetId": "subnet-012345678912345606",
"SecurityGroupId": "sg-012345678912345606",
diff --git a/dotnetv3/Lambda/Scenarios/LambdaBasics/settings.json b/dotnetv3/Lambda/Scenarios/LambdaBasics/settings.json
index 65bd41281f8..13377f3f8f7 100644
--- a/dotnetv3/Lambda/Scenarios/LambdaBasics/settings.json
+++ b/dotnetv3/Lambda/Scenarios/LambdaBasics/settings.json
@@ -2,9 +2,9 @@
"FunctionName": "LambdaExample",
"Handler": "LambdaIncrement::LambdaIncrement.Function::FunctionHandler",
"UpdatedHandler": "LambdaCalculator::LambdaCalculator.Function::FunctionHandler",
- "BucketName": "doc-example-bucket",
+ "BucketName": "amzn-s3-demo-bucket",
"IncrementKey": "LambdaIncrement.zip",
"CalculatorKey": "LambdaCalculator.zip",
"RoleName": "lambda-support",
"PolicyArn": "arn:aws:iam::aws:policy/service-role/AWSLambdaBasicExecutionRole"
-}
\ No newline at end of file
+}
diff --git a/dotnetv3/Lambda/Tests/testsettings.json b/dotnetv3/Lambda/Tests/testsettings.json
index 4eed33cfb8f..2ee5e4fff12 100644
--- a/dotnetv3/Lambda/Tests/testsettings.json
+++ b/dotnetv3/Lambda/Tests/testsettings.json
@@ -2,7 +2,7 @@
"FunctionName": "LambdaExample",
"IncrementHandler": "LambdaIncrement::LambdaIncrement.Function::FunctionHandler",
"CalculatorHandler": "LambdaCalculator::LambdaCalculator.Function::FunctionHandler",
- "BucketName": "doc-example-bucket",
+ "BucketName": "amzn-s3-demo-bucket",
"IncrementKey": "LambdaIncrement.zip",
"CalculatorKey": "LambdaCalculator.zip",
"RoleName": "lambda-support",
diff --git a/dotnetv3/Rekognition/AddFacesExample/AddFaces.cs b/dotnetv3/Rekognition/AddFacesExample/AddFaces.cs
index 502fa0f793d..d1f4ddca40f 100644
--- a/dotnetv3/Rekognition/AddFacesExample/AddFaces.cs
+++ b/dotnetv3/Rekognition/AddFacesExample/AddFaces.cs
@@ -20,7 +20,7 @@ public class AddFaces
public static async Task Main()
{
string collectionId = "MyCollection2";
- string bucket = "doc-example-bucket";
+ string bucket = "amzn-s3-demo-bucket";
string photo = "input.jpg";
var rekognitionClient = new AmazonRekognitionClient();
diff --git a/dotnetv3/S3/AbortMPUExample/AbortMPU.cs b/dotnetv3/S3/AbortMPUExample/AbortMPU.cs
index 547ae708c5f..729f68c9982 100644
--- a/dotnetv3/S3/AbortMPUExample/AbortMPU.cs
+++ b/dotnetv3/S3/AbortMPUExample/AbortMPU.cs
@@ -18,7 +18,7 @@ public class AbortMPU
{
public static async Task Main()
{
- string bucketName = "doc-example-bucket";
+ string bucketName = "amzn-s3-demo-bucket";
// If the AWS Region defined for your default user is different
// from the Region where your Amazon S3 bucket is located,
diff --git a/dotnetv3/S3/BucketACLExample/BucketACL.cs b/dotnetv3/S3/BucketACLExample/BucketACL.cs
index 971ddfc897d..705a2fffc8f 100644
--- a/dotnetv3/S3/BucketACLExample/BucketACL.cs
+++ b/dotnetv3/S3/BucketACLExample/BucketACL.cs
@@ -17,7 +17,7 @@ public class BucketACL
{
public static async Task Main()
{
- const string newBucketName = "doc-example-bucket";
+ const string newBucketName = "amzn-s3-demo-bucket";
IAmazonS3 client = new AmazonS3Client();
diff --git a/dotnetv3/S3/CopyObjectExample/CopyObject.cs b/dotnetv3/S3/CopyObjectExample/CopyObject.cs
index 7b1c7a1762c..f53627cb96e 100644
--- a/dotnetv3/S3/CopyObjectExample/CopyObject.cs
+++ b/dotnetv3/S3/CopyObjectExample/CopyObject.cs
@@ -23,8 +23,8 @@ public static async Task Main()
IAmazonS3 s3Client = new AmazonS3Client();
// Remember to change these values to refer to your Amazon S3 objects.
- string sourceBucketName = "doc-example-bucket1";
- string destinationBucketName = "doc-example-bucket2";
+ string sourceBucketName = "amzn-s3-demo-bucket1";
+ string destinationBucketName = "amzn-s3-demo-bucket2";
string sourceObjectKey = "testfile.txt";
string destinationObjectKey = "testfilecopy.txt";
diff --git a/dotnetv3/S3/EnableNotificationsExample/EnableNotifications.cs b/dotnetv3/S3/EnableNotificationsExample/EnableNotifications.cs
index 6f4bd632cf6..c8ea53c9636 100644
--- a/dotnetv3/S3/EnableNotificationsExample/EnableNotifications.cs
+++ b/dotnetv3/S3/EnableNotificationsExample/EnableNotifications.cs
@@ -18,7 +18,7 @@ public class EnableNotifications
{
public static async Task Main()
{
- const string bucketName = "doc-example-bucket1";
+ const string bucketName = "amzn-s3-demo-bucket1";
const string snsTopic = "arn:aws:sns:us-east-2:0123456789ab:bucket-notify";
const string sqsQueue = "arn:aws:sqs:us-east-2:0123456789ab:Example_Queue";
diff --git a/dotnetv3/S3/GenPresignedURLExample/GenPresignedUrl.cs b/dotnetv3/S3/GenPresignedURLExample/GenPresignedUrl.cs
index 13abf0b1561..2b524d1b24e 100644
--- a/dotnetv3/S3/GenPresignedURLExample/GenPresignedUrl.cs
+++ b/dotnetv3/S3/GenPresignedURLExample/GenPresignedUrl.cs
@@ -18,7 +18,7 @@ public class GenPresignedUrl
{
public static void Main()
{
- const string bucketName = "doc-example-bucket";
+ const string bucketName = "amzn-s3-demo-bucket";
const string objectKey = "sample.txt";
// Specify how long the presigned URL lasts, in hours
diff --git a/dotnetv3/S3/LifecycleExample/Lifecycle.cs b/dotnetv3/S3/LifecycleExample/Lifecycle.cs
index 7e402986c92..673e505aa12 100644
--- a/dotnetv3/S3/LifecycleExample/Lifecycle.cs
+++ b/dotnetv3/S3/LifecycleExample/Lifecycle.cs
@@ -24,7 +24,7 @@ public static async Task Main()
// should look like this:
// RegionEndpoint.USWest2
var client = new AmazonS3Client();
- const string BucketName = "doc-example-bucket";
+ const string BucketName = "amzn-s3-demo-bucket";
await AddUpdateDeleteLifecycleConfigAsync(client, BucketName);
}
diff --git a/dotnetv3/S3/ListObjectsPaginatorExample/ListObjectsPaginator.cs b/dotnetv3/S3/ListObjectsPaginatorExample/ListObjectsPaginator.cs
index afdeacb3e0c..801b5894ada 100644
--- a/dotnetv3/S3/ListObjectsPaginatorExample/ListObjectsPaginator.cs
+++ b/dotnetv3/S3/ListObjectsPaginatorExample/ListObjectsPaginator.cs
@@ -15,7 +15,7 @@ namespace ListObjectsPaginatorExample
///
public class ListObjectsPaginator
{
- private const string BucketName = "doc-example-bucket";
+ private const string BucketName = "amzn-s3-demo-bucket";
public static async Task Main()
{
diff --git a/dotnetv3/S3/MPUapiCopyObjExample/MPUapiCopyObj.cs b/dotnetv3/S3/MPUapiCopyObjExample/MPUapiCopyObj.cs
index a930d088109..540c5d7deec 100644
--- a/dotnetv3/S3/MPUapiCopyObjExample/MPUapiCopyObj.cs
+++ b/dotnetv3/S3/MPUapiCopyObjExample/MPUapiCopyObj.cs
@@ -16,8 +16,8 @@ namespace MPUapiCopyObjectExample
///
public class MPUapiCopyObj
{
- private const string SourceBucket = "doc-example-bucket1";
- private const string TargetBucket = "doc-example-bucket2";
+ private const string SourceBucket = "amzn-s3-demo-bucket1";
+ private const string TargetBucket = "amzn-s3-demo-bucket2";
private const string SourceObjectKey = "example.mov";
private const string TargetObjectKey = "copied_video_file.mov";
diff --git a/dotnetv3/S3/ManageACLsExample/ManageACLs.cs b/dotnetv3/S3/ManageACLsExample/ManageACLs.cs
index bf620e05f33..20512ebdfcc 100644
--- a/dotnetv3/S3/ManageACLsExample/ManageACLs.cs
+++ b/dotnetv3/S3/ManageACLsExample/ManageACLs.cs
@@ -19,8 +19,8 @@ public class ManageACLs
{
public static async Task Main()
{
- string bucketName = "doc-example-bucket1";
- string newBucketName = "doc-example-bucket2";
+ string bucketName = "amzn-s3-demo-bucket1";
+ string newBucketName = "amzn-s3-demo-bucket2";
string keyName = "sample-object.txt";
string emailAddress = "someone@example.com";
diff --git a/dotnetv3/S3/ObjectTagExample/ObjectTag.cs b/dotnetv3/S3/ObjectTagExample/ObjectTag.cs
index 862ea6dc228..037b7146423 100644
--- a/dotnetv3/S3/ObjectTagExample/ObjectTag.cs
+++ b/dotnetv3/S3/ObjectTagExample/ObjectTag.cs
@@ -19,7 +19,7 @@ public class ObjectTag
{
public static async Task Main()
{
- string bucketName = "doc-example-bucket";
+ string bucketName = "amzn-s3-demo-bucket";
string keyName = "newobject.txt";
string filePath = @"*** file path ***";
diff --git a/dotnetv3/S3/RestoreArchivedObjectExample/RestoreArchivedObject.cs b/dotnetv3/S3/RestoreArchivedObjectExample/RestoreArchivedObject.cs
index 79b3c72de48..4ea3c145230 100644
--- a/dotnetv3/S3/RestoreArchivedObjectExample/RestoreArchivedObject.cs
+++ b/dotnetv3/S3/RestoreArchivedObjectExample/RestoreArchivedObject.cs
@@ -18,7 +18,7 @@ public class RestoreArchivedObject
{
public static void Main()
{
- string bucketName = "doc-example-bucket";
+ string bucketName = "amzn-s3-demo-bucket";
string objectKey = "archived-object.txt";
// Specify your bucket region (an example region is shown).
diff --git a/dotnetv3/S3/SSECLowLevelMPUcopyObjectExample/SSECLowLevelMPUcopyObject.cs b/dotnetv3/S3/SSECLowLevelMPUcopyObjectExample/SSECLowLevelMPUcopyObject.cs
index ea062cfc08b..b8080deddfa 100644
--- a/dotnetv3/S3/SSECLowLevelMPUcopyObjectExample/SSECLowLevelMPUcopyObject.cs
+++ b/dotnetv3/S3/SSECLowLevelMPUcopyObjectExample/SSECLowLevelMPUcopyObject.cs
@@ -20,7 +20,7 @@ public class SSECLowLevelMPUcopyObject
{
public static async Task Main()
{
- string existingBucketName = "doc-example-bucket";
+ string existingBucketName = "amzn-s3-demo-bucket";
string sourceKeyName = "sample_file.txt";
string targetKeyName = "sample_file_copy.txt";
string filePath = $"sample\\{targetKeyName}";
diff --git a/dotnetv3/S3/SSEClientEncryptionExample/SSEClientEncryption.cs b/dotnetv3/S3/SSEClientEncryptionExample/SSEClientEncryption.cs
index cbf24036da1..f9e0d0ac843 100644
--- a/dotnetv3/S3/SSEClientEncryptionExample/SSEClientEncryption.cs
+++ b/dotnetv3/S3/SSEClientEncryptionExample/SSEClientEncryption.cs
@@ -19,7 +19,7 @@ public class SSEClientEncryption
{
public static async Task Main()
{
- string bucketName = "doc-example-bucket";
+ string bucketName = "amzn-s3-demo-bucket";
string keyName = "exampleobject.txt";
string copyTargetKeyName = "examplecopy.txt";
diff --git a/dotnetv3/S3/ServerAccessLoggingExample/settings.json b/dotnetv3/S3/ServerAccessLoggingExample/settings.json
index 40544d0eb9e..6e4e81c14f5 100644
--- a/dotnetv3/S3/ServerAccessLoggingExample/settings.json
+++ b/dotnetv3/S3/ServerAccessLoggingExample/settings.json
@@ -1,6 +1,6 @@
{
- "BucketName": "DOC-EXAMPLE-SOURCE-BUCKET",
- "LogBucketName": "DOC-EXAMPLE-DESTINATION-BUCKET",
+ "BucketName": "amzn-s3-demo-source-bucket",
+ "LogBucketName": "amzn-s3-demo-destination-bucket",
"LogObjectKeyPrefix": "access-logs/",
"AccountId": "111122223333"
}
diff --git a/dotnetv3/S3/ServerSideEncryptionExample/ServerSideEncryption.cs b/dotnetv3/S3/ServerSideEncryptionExample/ServerSideEncryption.cs
index ce5246bab9c..f5eb90d6e0c 100644
--- a/dotnetv3/S3/ServerSideEncryptionExample/ServerSideEncryption.cs
+++ b/dotnetv3/S3/ServerSideEncryptionExample/ServerSideEncryption.cs
@@ -17,7 +17,7 @@ public class ServerSideEncryption
{
public static async Task Main()
{
- string bucketName = "doc-example-bucket";
+ string bucketName = "amzn-s3-demo-bucket";
string keyName = "samplefile.txt";
// If the AWS Region defined for your default user is different
diff --git a/dotnetv3/S3/TrackMPUUsingHighLevelAPIExample/TrackMPUUsingHighLevelAPI.cs b/dotnetv3/S3/TrackMPUUsingHighLevelAPIExample/TrackMPUUsingHighLevelAPI.cs
index 17f7b50ddb7..e6c0f5cbb56 100644
--- a/dotnetv3/S3/TrackMPUUsingHighLevelAPIExample/TrackMPUUsingHighLevelAPI.cs
+++ b/dotnetv3/S3/TrackMPUUsingHighLevelAPIExample/TrackMPUUsingHighLevelAPI.cs
@@ -18,7 +18,7 @@ public class TrackMPUUsingHighLevelAPI
{
public static async Task Main()
{
- string bucketName = "doc-example-bucket";
+ string bucketName = "amzn-s3-demo-bucket";
string keyName = "sample_pic.png";
string path = "filepath/directory/";
string filePath = $"{path}{keyName}";
diff --git a/dotnetv3/S3/TransferAccelerationExample/TransferAcceleration.cs b/dotnetv3/S3/TransferAccelerationExample/TransferAcceleration.cs
index 75ded2d42ad..78f32b4ebfd 100644
--- a/dotnetv3/S3/TransferAccelerationExample/TransferAcceleration.cs
+++ b/dotnetv3/S3/TransferAccelerationExample/TransferAcceleration.cs
@@ -25,7 +25,7 @@ public class TransferAcceleration
public static async Task Main()
{
var s3Client = new AmazonS3Client();
- const string bucketName = "doc-example-bucket";
+ const string bucketName = "amzn-s3-demo-bucket";
await EnableAccelerationAsync(s3Client, bucketName);
}
diff --git a/dotnetv3/S3/UploadUsingPresignedURLExample/UploadUsingPresignedURL.cs b/dotnetv3/S3/UploadUsingPresignedURLExample/UploadUsingPresignedURL.cs
index a9fc781bd16..f7401da6aa6 100644
--- a/dotnetv3/S3/UploadUsingPresignedURLExample/UploadUsingPresignedURL.cs
+++ b/dotnetv3/S3/UploadUsingPresignedURLExample/UploadUsingPresignedURL.cs
@@ -24,7 +24,7 @@ public class UploadUsingPresignedURL
public static async Task Main()
{
- string bucketName = "doc-example-bucket";
+ string bucketName = "amzn-s3-demo-bucket";
string keyName = "samplefile.txt";
string filePath = $"source\\{keyName}";
diff --git a/dotnetv3/S3/WebsiteConfigExample/WebsiteConfig.cs b/dotnetv3/S3/WebsiteConfigExample/WebsiteConfig.cs
index abddc644e66..ead1e506fcd 100644
--- a/dotnetv3/S3/WebsiteConfigExample/WebsiteConfig.cs
+++ b/dotnetv3/S3/WebsiteConfigExample/WebsiteConfig.cs
@@ -23,7 +23,7 @@ public class WebsiteConfig
///
public static async Task Main()
{
- const string bucketName = "doc-example-bucket";
+ const string bucketName = "amzn-s3-demo-bucket";
const string indexDocumentSuffix = "index.html";
const string errorDocument = "error.html";
diff --git a/dotnetv3/S3/non-versioned-examples/DeleteMultipleObjectsExample/DeleteMultipleObjects.cs b/dotnetv3/S3/non-versioned-examples/DeleteMultipleObjectsExample/DeleteMultipleObjects.cs
index 10439db7b67..ea784e1019e 100644
--- a/dotnetv3/S3/non-versioned-examples/DeleteMultipleObjectsExample/DeleteMultipleObjects.cs
+++ b/dotnetv3/S3/non-versioned-examples/DeleteMultipleObjectsExample/DeleteMultipleObjects.cs
@@ -22,7 +22,7 @@ public class DeleteMultipleObjects
///
public static async Task Main()
{
- const string bucketName = "doc-example-bucket";
+ const string bucketName = "amzn-s3-demo-bucket";
// If the Amazon S3 bucket from which you wish to delete objects is not
// located in the same AWS Region as the default user, define the
diff --git a/dotnetv3/S3/non-versioned-examples/DeleteObjectExample/DeleteObject.cs b/dotnetv3/S3/non-versioned-examples/DeleteObjectExample/DeleteObject.cs
index 60b3d539d5f..5a8bab4f82a 100644
--- a/dotnetv3/S3/non-versioned-examples/DeleteObjectExample/DeleteObject.cs
+++ b/dotnetv3/S3/non-versioned-examples/DeleteObjectExample/DeleteObject.cs
@@ -22,7 +22,7 @@ public class DeleteObject
///
public static async Task Main()
{
- const string bucketName = "doc-example-bucket";
+ const string bucketName = "amzn-s3-demo-bucket";
const string keyName = "testfile.txt";
// If the Amazon S3 bucket is located in an AWS Region other than the
diff --git a/dotnetv3/S3/s3CORSExample/S3CORS.cs b/dotnetv3/S3/s3CORSExample/S3CORS.cs
index e412253c7d2..30f4080cd56 100644
--- a/dotnetv3/S3/s3CORSExample/S3CORS.cs
+++ b/dotnetv3/S3/s3CORSExample/S3CORS.cs
@@ -22,7 +22,7 @@ public class S3CORS
{
// Remember to change the bucket name to the name of an Amazon Simple
// Storage Service (Amazon S3) bucket that exists on your account.
- private const string BucketName = "doc-example-bucket";
+ private const string BucketName = "amzn-s3-demo-bucket";
public static async Task Main()
{
diff --git a/dotnetv3/S3/scenarios/TransferUtilityBasics/TransferUtilityBasics/settings.json b/dotnetv3/S3/scenarios/TransferUtilityBasics/TransferUtilityBasics/settings.json
index 8c7d419242b..ce1d04a2d7e 100644
--- a/dotnetv3/S3/scenarios/TransferUtilityBasics/TransferUtilityBasics/settings.json
+++ b/dotnetv3/S3/scenarios/TransferUtilityBasics/TransferUtilityBasics/settings.json
@@ -1,6 +1,6 @@
{
- "BucketName": "doc-example-bucket",
+ "BucketName": "amzn-s3-demo-bucket",
"FileToDownload": "Download.txt",
"FileToUpload": "Upload.txt",
"S3Path": "DownloadPath"
-}
\ No newline at end of file
+}
diff --git a/dotnetv3/S3/scenarios/TransferUtilityBasics/TransferUtilityBasicsTests/testsettings.json b/dotnetv3/S3/scenarios/TransferUtilityBasics/TransferUtilityBasicsTests/testsettings.json
index 9cb6bd78af9..f46297f6ee2 100644
--- a/dotnetv3/S3/scenarios/TransferUtilityBasics/TransferUtilityBasicsTests/testsettings.json
+++ b/dotnetv3/S3/scenarios/TransferUtilityBasics/TransferUtilityBasicsTests/testsettings.json
@@ -1,6 +1,6 @@
{
- "BucketName": "doc-example-bucket",
+ "BucketName": "amzn-s3-demo-bucket",
"FileToDownload": "DownloadTest.txt",
"FileToUpload": "UploadTest.txt",
"S3Path": "DownloadTest"
-}
\ No newline at end of file
+}
diff --git a/dotnetv3/S3/versioned-examples/DeleteMultipleObjectsExample/DeleteMultipleObjects.cs b/dotnetv3/S3/versioned-examples/DeleteMultipleObjectsExample/DeleteMultipleObjects.cs
index 54cb686c928..56541869397 100644
--- a/dotnetv3/S3/versioned-examples/DeleteMultipleObjectsExample/DeleteMultipleObjects.cs
+++ b/dotnetv3/S3/versioned-examples/DeleteMultipleObjectsExample/DeleteMultipleObjects.cs
@@ -18,7 +18,7 @@ public class DeleteMultipleObjects
{
public static async Task Main()
{
- string bucketName = "doc-example-bucket";
+ string bucketName = "amzn-s3-demo-bucket";
// If the AWS Region for your Amazon S3 bucket is different from
// the AWS Region of the default user, define the AWS Region for
diff --git a/dotnetv3/S3/versioned-examples/DeleteObjectVersionExample/DeleteObjectVersion.cs b/dotnetv3/S3/versioned-examples/DeleteObjectVersionExample/DeleteObjectVersion.cs
index fb3555f2157..bc64481048f 100644
--- a/dotnetv3/S3/versioned-examples/DeleteObjectVersionExample/DeleteObjectVersion.cs
+++ b/dotnetv3/S3/versioned-examples/DeleteObjectVersionExample/DeleteObjectVersion.cs
@@ -18,7 +18,7 @@ public class DeleteObjectVersion
{
public static async Task Main()
{
- string bucketName = "doc-example-bucket";
+ string bucketName = "amzn-s3-demo-bucket";
string keyName = "verstioned-object.txt";
// If the AWS Region of the default user is different from the AWS
diff --git a/dotnetv3/S3/versioned-examples/ListObjectVersionsExample/ListObjectVersions.cs b/dotnetv3/S3/versioned-examples/ListObjectVersionsExample/ListObjectVersions.cs
index d65e5935880..15533390533 100644
--- a/dotnetv3/S3/versioned-examples/ListObjectVersionsExample/ListObjectVersions.cs
+++ b/dotnetv3/S3/versioned-examples/ListObjectVersionsExample/ListObjectVersions.cs
@@ -17,7 +17,7 @@ public class ListObjectVersions
{
public static async Task Main()
{
- string bucketName = "doc-example-bucket";
+ string bucketName = "amzn-s3-demo-bucket";
// If the AWS Region where your bucket is defined is different from
// the AWS Region where the Amazon S3 bucket is defined, pass the constant
diff --git a/dotnetv3/SQS/AuthorizeS3ToSendMessageExample/AuthorizeS3ToSendMessageExample/AuthorizeS3ToSendMessage.cs b/dotnetv3/SQS/AuthorizeS3ToSendMessageExample/AuthorizeS3ToSendMessageExample/AuthorizeS3ToSendMessage.cs
index d9ea7173398..22d0932f97a 100644
--- a/dotnetv3/SQS/AuthorizeS3ToSendMessageExample/AuthorizeS3ToSendMessageExample/AuthorizeS3ToSendMessage.cs
+++ b/dotnetv3/SQS/AuthorizeS3ToSendMessageExample/AuthorizeS3ToSendMessageExample/AuthorizeS3ToSendMessage.cs
@@ -23,7 +23,7 @@ public class AuthorizeS3ToSendMessage
public static async Task Main()
{
string queueUrl = "https://sqs.us-east-2.amazonaws.com/0123456789ab/Example_Queue";
- string bucketName = "doc-example-bucket";
+ string bucketName = "amzn-s3-demo-bucket";
// Create an Amazon SQS client object using the
// default user. If the AWS Region you want to use
diff --git a/dotnetv3/Transcribe/Actions/ActionExamples.cs b/dotnetv3/Transcribe/Actions/ActionExamples.cs
index ce872b75f33..8fa2e29e356 100644
--- a/dotnetv3/Transcribe/Actions/ActionExamples.cs
+++ b/dotnetv3/Transcribe/Actions/ActionExamples.cs
@@ -32,9 +32,9 @@ public static ActionExamples CreateInstance()
// Set this value to the Amazon S3 location of a media file.
// A sample media file is provided in the media folder of this solution.
private static readonly string transcriptionMediaLocation =
- "https://DOC-EXAMPLE-BUCKET1.s3.amazonaws.com/Jabberwocky.mp3";
+ "https://amzn-s3-demo-bucket1.s3.amazonaws.com/Jabberwocky.mp3";
// Set this value to an Amazon S3 bucket name where the output can be stored.
- private static readonly string transcriptionMediaOutputLocation = "DOC-EXAMPLE-BUCKET2";
+ private static readonly string transcriptionMediaOutputLocation = "amzn-s3-demo-bucket2";
private static readonly string customVocabularyName = "Example-jabber-vocabulary";
static async Task Main(string[] args)
diff --git a/dotnetv3/Transcribe/Tests/testsettings.json b/dotnetv3/Transcribe/Tests/testsettings.json
index 609b356d6a3..479b343034b 100644
--- a/dotnetv3/Transcribe/Tests/testsettings.json
+++ b/dotnetv3/Transcribe/Tests/testsettings.json
@@ -1,7 +1,7 @@
{
- "transcriptionMediaLocation": "DOC-EXAMPLE-BUCKET",
+ "transcriptionMediaLocation": "amzn-s3-demo-bucket",
"transcriptionJobName": "testTranscriptionJobName",
- "outputLocation": "DOC-EXAMPLE-BUCKET2",
+ "outputLocation": "amzn-s3-demo-bucket2",
"medicalTranscriptionJobName": "testMedicalTranscriptionJobName",
"customVocabularyName": "example_vocabulary_name"
}
diff --git a/dotnetv3/Translate/BatchTranslateExample/BatchTranslate.cs b/dotnetv3/Translate/BatchTranslateExample/BatchTranslate.cs
index 01c5d81a37d..99beb675ddf 100644
--- a/dotnetv3/Translate/BatchTranslateExample/BatchTranslate.cs
+++ b/dotnetv3/Translate/BatchTranslateExample/BatchTranslate.cs
@@ -23,8 +23,8 @@ public static async Task Main()
// Set this variable to an S3 bucket location with a folder."
// Input files must be in a folder and not at the bucket root."
- var s3InputUri = "s3://DOC-EXAMPLE-BUCKET1/FOLDER/";
- var s3OutputUri = "s3://DOC-EXAMPLE-BUCKET2/";
+ var s3InputUri = "s3://amzn-s3-demo-bucket1/FOLDER/";
+ var s3OutputUri = "s3://amzn-s3-demo-bucket2/";
// This role must have permissions to read the source bucket and to read and
// write to the destination bucket where the translated text will be stored.
diff --git a/dotnetv3/Translate/TranslateTextExample/TranslateText.cs b/dotnetv3/Translate/TranslateTextExample/TranslateText.cs
index d543b61e018..d5074b5ce17 100644
--- a/dotnetv3/Translate/TranslateTextExample/TranslateText.cs
+++ b/dotnetv3/Translate/TranslateTextExample/TranslateText.cs
@@ -36,7 +36,7 @@ public static async Task Main()
// The Amazon Simple Storage Service (Amazon S3) bucket where the
// source text file is stored.
- string srcBucket = "DOC-EXAMPLE-BUCKET";
+ string srcBucket = "amzn-s3-demo-bucket";
string srcTextFile = "source.txt";
var srcText = await GetSourceTextAsync(srcBucket, srcTextFile);
diff --git a/dotnetv3/cross-service/S3ObjectLambdaFunction/README.md b/dotnetv3/cross-service/S3ObjectLambdaFunction/README.md
index 56e2a5956d8..be2f40d1b98 100644
--- a/dotnetv3/cross-service/S3ObjectLambdaFunction/README.md
+++ b/dotnetv3/cross-service/S3ObjectLambdaFunction/README.md
@@ -24,7 +24,7 @@ Refer to [Create an S3 bucket](https://docs.aws.amazon.com/AmazonS3/latest/userg
Use CLI
```cmd
-aws s3api create-bucket --bucket DOC-EXAMPLE-BUCKET --region us-east-1
+aws s3api create-bucket --bucket amzn-s3-demo-bucket --region us-east-1
```
## Upload a file to the S3 bucket
@@ -46,7 +46,7 @@ or
Use CLI
```cmd
-aws s3 cp tutorial.txt s3://DOC-EXAMPLE-BUCKET
+aws s3 cp tutorial.txt s3://amzn-s3-demo-bucket
```
## Create an S3 access point
@@ -58,7 +58,7 @@ or
Use CLI
```cmd
-aws s3control create-access-point --bucket DOC-EXAMPLE-BUCKET --name DOC-EXAMPLE-BUCKET-ACCESSPOINT --account-id 111122223333
+aws s3control create-access-point --bucket amzn-s3-demo-bucket --name amzn-s3-demo-bucket-ACCESSPOINT --account-id 111122223333
```
diff --git a/javascriptv3/example_code/s3/actions/get-object-legal-hold.js b/javascriptv3/example_code/s3/actions/get-object-legal-hold.js
index 15fcccd4788..ac3f077a5a0 100644
--- a/javascriptv3/example_code/s3/actions/get-object-legal-hold.js
+++ b/javascriptv3/example_code/s3/actions/get-object-legal-hold.js
@@ -29,5 +29,5 @@ export const main = async (client, bucketName, objectKey) => {
// Invoke main function if this file was run directly.
if (process.argv[1] === fileURLToPath(import.meta.url)) {
- main(new S3Client(), "DOC-EXAMPLE-BUCKET", "OBJECT_KEY");
+ main(new S3Client(), "amzn-s3-demo-bucket", "OBJECT_KEY");
}
diff --git a/javav2/example_code/iam/src/main/java/com/example/iam/IamPolicyBuilderExamples.java b/javav2/example_code/iam/src/main/java/com/example/iam/IamPolicyBuilderExamples.java
index eeb1d59fed8..dca35ac34aa 100644
--- a/javav2/example_code/iam/src/main/java/com/example/iam/IamPolicyBuilderExamples.java
+++ b/javav2/example_code/iam/src/main/java/com/example/iam/IamPolicyBuilderExamples.java
@@ -233,7 +233,7 @@ public String allowCrossAccountAccessExample() {
.effect(IamEffect.ALLOW)
.addPrincipal(IamPrincipalType.AWS, "111122223333")
.addAction("s3:PutObject")
- .addResource("arn:aws:s3:::DOC-EXAMPLE-BUCKET/*")
+ .addResource("arn:aws:s3:::amzn-s3-demo-bucket/*")
.addCondition(b1 -> b1
.operator(IamConditionOperator.STRING_EQUALS)
.key("s3:x-amz-acl")
diff --git a/php/example_code/s3/GettingStartedWithS3.php b/php/example_code/s3/GettingStartedWithS3.php
index 0caf97b621e..51e37e3a510 100644
--- a/php/example_code/s3/GettingStartedWithS3.php
+++ b/php/example_code/s3/GettingStartedWithS3.php
@@ -55,7 +55,7 @@ public function runExample()
*/
// snippet-start:[php.example_code.s3.basics.bucketName]
- $this->bucketName = "doc-example-bucket-" . uniqid();
+ $this->bucketName = "amzn-s3-demo-bucket-" . uniqid();
// snippet-end:[php.example_code.s3.basics.bucketName]
// snippet-start:[php.example_code.s3.basics.createBucket]
diff --git a/php/example_code/s3/PresignedPost.php b/php/example_code/s3/PresignedPost.php
index 5988f35946e..4c185a6509f 100644
--- a/php/example_code/s3/PresignedPost.php
+++ b/php/example_code/s3/PresignedPost.php
@@ -16,7 +16,7 @@
'profile' => 'default',
'region' => 'us-east-1',
]);
-$bucket = 'doc-example-bucket10';
+$bucket = 'amzn-s3-demo-bucket10';
$starts_with = 'user/eric/';
$client->listBuckets();
diff --git a/python/cross_service/textract_comprehend_notebook/TextractAndComprehendNotebook.ipynb b/python/cross_service/textract_comprehend_notebook/TextractAndComprehendNotebook.ipynb
index eb312e2f89a..8fa64419116 100644
--- a/python/cross_service/textract_comprehend_notebook/TextractAndComprehendNotebook.ipynb
+++ b/python/cross_service/textract_comprehend_notebook/TextractAndComprehendNotebook.ipynb
@@ -80,7 +80,7 @@
"import pandas as pd\n",
"import os\n",
"\n",
- "bucket = \"DOC-EXAMPLE-BUCKET\"\n",
+ "bucket = \"amzn-s3-demo-bucket\"\n",
"document = \"Name of your document\"\n",
"region_name = \"Name of your region\""
]
diff --git a/python/cross_service/textract_comprehend_notebook/test-texttract-comprehend.py b/python/cross_service/textract_comprehend_notebook/test-texttract-comprehend.py
index 1cfe8c017ae..fa19fb80247 100644
--- a/python/cross_service/textract_comprehend_notebook/test-texttract-comprehend.py
+++ b/python/cross_service/textract_comprehend_notebook/test-texttract-comprehend.py
@@ -20,7 +20,7 @@ def test_text_detection(tb):
# variables for images
# replace the value of bucket with the name of a bucket and the value of document
# with the name of a document in the bucket
- bucket = "DOC-EXAMPLE-BUCKET"
+ bucket = "amzn-s3-demo-bucket"
document = "Your document name here"
res = detect_func(bucket, document, aws_access_code, aws_secret_code, region)
# Check if list returned
diff --git a/python/example_code/comprehend/comprehend_demo_resources.py b/python/example_code/comprehend/comprehend_demo_resources.py
index 3d31091db26..2bcbaac6be8 100644
--- a/python/example_code/comprehend/comprehend_demo_resources.py
+++ b/python/example_code/comprehend/comprehend_demo_resources.py
@@ -43,7 +43,7 @@ def setup(self, demo_name):
"""
try:
self.bucket = self.s3_resource.create_bucket(
- Bucket=f"doc-example-bucket-{uuid.uuid4()}",
+ Bucket=f"amzn-s3-demo-bucket-{uuid.uuid4()}",
CreateBucketConfiguration={
"LocationConstraint": self.s3_resource.meta.client.meta.region_name
},
diff --git a/python/example_code/comprehend/test/test_comprehend_demo_resources.py b/python/example_code/comprehend/test/test_comprehend_demo_resources.py
index a62cdc3961c..81971a4ecd6 100644
--- a/python/example_code/comprehend/test/test_comprehend_demo_resources.py
+++ b/python/example_code/comprehend/test/test_comprehend_demo_resources.py
@@ -28,7 +28,7 @@ def test_setup(make_stubber, stub_runner, monkeypatch, error_code, stop_on_metho
iam_stubber = make_stubber(iam_resource.meta.client)
demo_resources = ComprehendDemoResources(s3_resource, iam_resource)
demo_name = "test-name"
- bucket_name = "doc-example-bucket-test-uuid"
+ bucket_name = "amzn-s3-demo-bucket-test-uuid"
role_name = f"{demo_name}-role"
policy_name = f"{demo_name}-policy"
policy_arn = f"arn:aws:iam:REGION:123456789012:policy/{policy_name}"
@@ -118,7 +118,7 @@ def test_cleanup(make_stubber, monkeypatch, error_code):
iam_resource = boto3.resource("iam")
iam_stubber = make_stubber(iam_resource.meta.client)
demo_resources = ComprehendDemoResources(s3_resource, iam_resource)
- bucket_name = "doc-example-bucket-test-uuid"
+ bucket_name = "amzn-s3-demo-bucket-test-uuid"
role_name = "comprehend-classifier-demo-role"
policy_name = "comprehend-classifier-demo-policy"
policy_arn = "arn:aws:iam:REGION:123456789012:policy/test-policy"
diff --git a/python/example_code/lookoutvision/README.md b/python/example_code/lookoutvision/README.md
index ca3a12f583f..2f71ad19978 100644
--- a/python/example_code/lookoutvision/README.md
+++ b/python/example_code/lookoutvision/README.md
@@ -94,7 +94,7 @@ Before running these demonstrations do the following:
The folder structures for the training and test images must be as follows:
```
-s3://doc-example-bucket//
+s3://amzn-s3-demo-bucket//
normal/
anomaly/
```
@@ -234,11 +234,11 @@ Start the example by running the following at a command prompt:
- `project` - A name for your project.
- `bucket` - The name of the Amazon S3 bucket in which to store your manifest files and
training output. The bucket must be in your AWS account and in the same AWS Region as
- the Amazon S3 path supplied for `train` and `test`. For example, `doc-example-bucket`.
+ the Amazon S3 path supplied for `train` and `test`. For example, `amzn-s3-demo-bucket`.
- `train` - The Amazon S3 path where your training images are stored. For example,
- `s3://doc-example-bucket/circuitboard/train/`.
+ `s3://amzn-s3-demo-bucket/circuitboard/train/`.
- `test` - (Optional) the Amazon S3 path where your test images are stored. For example,
- `s3://doc-example-bucket/circuitboard/test/`. If you don't supply a value,
+ `s3://amzn-s3-demo-bucket/circuitboard/test/`. If you don't supply a value,
Lookout for Vision splits the training dataset to create a test dataset.
After training completes, use the performance metrics to decide if the model's
diff --git a/python/example_code/lookoutvision/datasets.py b/python/example_code/lookoutvision/datasets.py
index cb793d3e0b1..689e8483d50 100644
--- a/python/example_code/lookoutvision/datasets.py
+++ b/python/example_code/lookoutvision/datasets.py
@@ -104,7 +104,7 @@ def create_manifest_file_s3(s3_resource, image_s3_path, manifest_s3_path):
:param image_s3_path: The Amazon S3 path to the images referenced by the
manifest file. The images must be in an Amazon S3 bucket
with the following folder structure.
- s3://doc-example-bucket//
+ s3://amzn-s3-demo-bucket//
normal/
anomaly/
Place normal images in the normal folder and anomalous
diff --git a/python/example_code/lookoutvision/test/test_find_running_models.py b/python/example_code/lookoutvision/test/test_find_running_models.py
index 96c9cc26f61..9c6e9db91b2 100644
--- a/python/example_code/lookoutvision/test/test_find_running_models.py
+++ b/python/example_code/lookoutvision/test/test_find_running_models.py
@@ -38,7 +38,7 @@ def test_find_models_in_project(make_stubber, stub_runner, error_code, stop_on_m
recall = 0.3
precision = 0.5
f1 = 0.7
- out_buck = "doc-example-bucket"
+ out_buck = "amzn-s3-demo-bucket"
out_folder = "test-folder"
with stub_runner(error_code, stop_on_method) as runner:
@@ -100,7 +100,7 @@ def test_find_running_models(
recall = 0.3
precision = 0.5
f1 = 0.7
- out_buck = "doc-example-bucket"
+ out_buck = "amzn-s3-demo-bucket"
out_folder = "test-folder"
project_arn = "test-arn"
region = "us-east-1"
diff --git a/python/example_code/lookoutvision/test/test_inference.py b/python/example_code/lookoutvision/test/test_inference.py
index 5abd6bdd6ef..1451e3768e5 100644
--- a/python/example_code/lookoutvision/test/test_inference.py
+++ b/python/example_code/lookoutvision/test/test_inference.py
@@ -57,7 +57,7 @@ def test_detect_anomalies(make_stubber, monkeypatch, error_code):
def test_download_from_s3(make_stubber, monkeypatch):
s3_resource = boto3.resource("s3")
- photo = "s3://doc-example-bucket/test-photo.jpeg"
+ photo = "s3://amzn-s3-demo-bucket/test-photo.jpeg"
file = "test-photo.jpeg"
monkeypatch.setattr(
@@ -72,7 +72,7 @@ def test_download_from_s3(make_stubber, monkeypatch):
@pytest.mark.parametrize("error_code", [None, "TestException"])
def test_reject_on_classification(make_stubber, error_code):
- photo = "s3://doc-example-bucket/test-photo.jpeg"
+ photo = "s3://amzn-s3-demo-bucket/test-photo.jpeg"
prediction = {"IsAnomalous": True, "Confidence": 0.9}
confidence_limit = 0.5
@@ -88,7 +88,7 @@ def test_reject_on_classification(make_stubber, error_code):
@pytest.mark.parametrize("error_code", [None, "TestException"])
def test_reject_on_anomaly_types(make_stubber, error_code):
- photo = "s3://doc-example-bucket/test-photo.jpeg"
+ photo = "s3://amzn-s3-demo-bucket/test-photo.jpeg"
prediction = {
"IsAnomalous": True,
"Confidence": 0.9,
@@ -117,7 +117,7 @@ def test_reject_on_anomaly_types(make_stubber, error_code):
@pytest.mark.parametrize("error_code", [None, "TestException"])
def test_reject_on_coverage(make_stubber, error_code):
- photo = "s3://doc-example-bucket/test-photo.jpeg"
+ photo = "s3://amzn-s3-demo-bucket/test-photo.jpeg"
prediction = {
"IsAnomalous": True,
"Confidence": 0.9,
diff --git a/python/example_code/lookoutvision/test/test_models.py b/python/example_code/lookoutvision/test/test_models.py
index f9a1e939d72..6d43ca72015 100644
--- a/python/example_code/lookoutvision/test/test_models.py
+++ b/python/example_code/lookoutvision/test/test_models.py
@@ -20,7 +20,7 @@ def test_create_model(make_stubber, error_code):
project_name = "test-project_name"
model_version = "test-model"
model_arn = "test-arn"
- out_buck = "doc-example-bucket"
+ out_buck = "amzn-s3-demo-bucket"
out_folder = "test-results"
training_results = f"s3://{out_buck}/{out_folder}"
status = "TRAINED"
@@ -59,7 +59,7 @@ def test_describe_model(make_stubber, error_code):
recall = 0.3
precision = 0.5
f1 = 0.7
- out_buck = "doc-example-bucket"
+ out_buck = "amzn-s3-demo-bucket"
out_folder = "test-folder"
lookoutvision_stubber.stub_describe_model(
diff --git a/python/example_code/rekognition/rekognition_video_detection.py b/python/example_code/rekognition/rekognition_video_detection.py
index 21313aebeac..3ce173c7bf0 100644
--- a/python/example_code/rekognition/rekognition_video_detection.py
+++ b/python/example_code/rekognition/rekognition_video_detection.py
@@ -360,7 +360,7 @@ def usage_demo():
print("Creating Amazon S3 bucket and uploading video.")
s3_resource = boto3.resource("s3")
bucket = s3_resource.create_bucket(
- Bucket=f"doc-example-bucket-rekognition-{time.time_ns()}",
+ Bucket=f"amzn-s3-demo-bucket-rekognition-{time.time_ns()}",
CreateBucketConfiguration={
"LocationConstraint": s3_resource.meta.client.meta.region_name
},
diff --git a/python/example_code/rekognition/test/test_rekognition_video_detection.py b/python/example_code/rekognition/test/test_rekognition_video_detection.py
index ff557accdb0..ffaf4dd9458 100644
--- a/python/example_code/rekognition/test/test_rekognition_video_detection.py
+++ b/python/example_code/rekognition/test/test_rekognition_video_detection.py
@@ -29,7 +29,7 @@
def mock_video(monkeypatch, poll_status, rekognition_client):
video_name = "test-video"
video = RekognitionVideo(
- {"S3Object": {"Bucket": "doc-example-bucket", "Name": video_name}},
+ {"S3Object": {"Bucket": "amzn-s3-demo-bucket", "Name": video_name}},
video_name,
rekognition_client,
)
@@ -85,7 +85,7 @@ def test_create_notification_channel(
runner.add(iam_stubber.stub_attach_role_policy, resource_name, policy_arn)
video = RekognitionVideo(
- {"S3Object": {"Bucket": "doc-example-bucket", "Name": "doc-example-key"}},
+ {"S3Object": {"Bucket": "amzn-s3-demo-bucket", "Name": "doc-example-key"}},
"Test Video",
rekognition_client,
)
diff --git a/python/example_code/s3/s3_basics/bucket_wrapper.py b/python/example_code/s3/s3_basics/bucket_wrapper.py
index b7c35d88b0a..69bed101cad 100644
--- a/python/example_code/s3/s3_basics/bucket_wrapper.py
+++ b/python/example_code/s3/s3_basics/bucket_wrapper.py
@@ -384,7 +384,7 @@ def usage_demo():
logging.basicConfig(level=logging.INFO, format="%(levelname)s: %(message)s")
s3_resource = boto3.resource("s3")
- prefix = "doc-example-bucket-"
+ prefix = "amzn-s3-demo-bucket-"
created_buckets = [
BucketWrapper(s3_resource.Bucket(prefix + str(uuid.uuid1()))) for _ in range(3)
]
diff --git a/python/example_code/s3/s3_basics/object_wrapper.py b/python/example_code/s3/s3_basics/object_wrapper.py
index 2d01acaa602..5fcdd7c5ced 100644
--- a/python/example_code/s3/s3_basics/object_wrapper.py
+++ b/python/example_code/s3/s3_basics/object_wrapper.py
@@ -294,7 +294,7 @@ def usage_demo():
logging.basicConfig(level=logging.INFO, format="%(levelname)s: %(message)s")
s3_resource = boto3.resource("s3")
- bucket = s3_resource.Bucket(f"doc-example-bucket-{uuid.uuid4()}")
+ bucket = s3_resource.Bucket(f"amzn-s3-demo-bucket-{uuid.uuid4()}")
try:
bucket.create(
CreateBucketConfiguration={
diff --git a/python/example_code/s3/s3_basics/scenario_getting_started.py b/python/example_code/s3/s3_basics/scenario_getting_started.py
index 5ef606ab864..0f974b3b81b 100644
--- a/python/example_code/s3/s3_basics/scenario_getting_started.py
+++ b/python/example_code/s3/s3_basics/scenario_getting_started.py
@@ -28,7 +28,7 @@ def do_scenario(s3_resource):
print("Welcome to the Amazon S3 getting started demo!")
print("-" * 88)
- bucket_name = f"doc-example-bucket-{uuid.uuid4()}"
+ bucket_name = f"amzn-s3-demo-bucket-{uuid.uuid4()}"
bucket = s3_resource.Bucket(bucket_name)
try:
bucket.create(
diff --git a/python/example_code/ses/ses_receipt_handler.py b/python/example_code/ses/ses_receipt_handler.py
index 83b9644c5f9..73551bfcdfe 100644
--- a/python/example_code/ses/ses_receipt_handler.py
+++ b/python/example_code/ses/ses_receipt_handler.py
@@ -282,7 +282,7 @@ def usage_demo():
rule_set_name = "doc-example-rule-set"
rule_name = "copy-mail-to-bucket"
email = "example@example.org"
- bucket_name = f"doc-example-bucket-{time.time_ns()}"
+ bucket_name = f"amzn-s3-demo-bucket-{time.time_ns()}"
prefix = "example-emails/"
current_ip_address = (
diff --git a/python/example_code/ses/test/test_ses_receipt_handler.py b/python/example_code/ses/test/test_ses_receipt_handler.py
index aa482f3ed9d..e73615b1b9e 100644
--- a/python/example_code/ses/test/test_ses_receipt_handler.py
+++ b/python/example_code/ses/test/test_ses_receipt_handler.py
@@ -107,7 +107,7 @@ def test_create_bucket_for_copy(make_stubber, stub_runner, error_code, stop_on_m
s3_resource = boto3.resource("s3")
s3_stubber = make_stubber(s3_resource.meta.client)
ses_receipt = SesReceiptHandler(None, s3_resource)
- bucket_name = "doc-example-bucket"
+ bucket_name = "amzn-s3-demo-bucket"
with stub_runner(error_code, stop_on_method) as runner:
runner.add(
@@ -138,7 +138,7 @@ def test_create_s3_copy_rule(make_stubber, error_code):
rule_set_name = "test-rule-set"
rule_name = "test-rule"
recipients = ["me", "myself", "I"]
- bucket_name = "doc-example-bucket"
+ bucket_name = "amzn-s3-demo-bucket"
prefix = "mymails/"
actions = [{"S3Action": {"BucketName": bucket_name, "ObjectKeyPrefix": prefix}}]
@@ -166,7 +166,7 @@ def test_describe_receipt_rule_set(make_stubber, error_code):
rule_set_name = "test-rule-set"
rule_name = "test-rule"
recipients = ["me", "myself", "I"]
- bucket_name = "doc-example-bucket"
+ bucket_name = "amzn-s3-demo-bucket"
prefix = "mymails/"
actions = [{"S3Action": {"BucketName": bucket_name, "ObjectKeyPrefix": prefix}}]
diff --git a/ruby/example_code/cloudwatch/alarm_actions.rb b/ruby/example_code/cloudwatch/alarm_actions.rb
index ab0cda59e54..0a684292952 100644
--- a/ruby/example_code/cloudwatch/alarm_actions.rb
+++ b/ruby/example_code/cloudwatch/alarm_actions.rb
@@ -42,7 +42,7 @@
# [
# {
# name: 'BucketName',
-# value: 'doc-example-bucket'
+# value: 'amzn-s3-demo-bucket'
# },
# {
# name: 'StorageType',
@@ -127,7 +127,7 @@ def run_me
dimensions = [
{
name: "BucketName",
- value: "doc-example-bucket"
+ value: "amzn-s3-demo-bucket"
},
{
name: "StorageType",
diff --git a/ruby/example_code/cloudwatch/alarm_basics.rb b/ruby/example_code/cloudwatch/alarm_basics.rb
index 5b233f17f42..a4e57d055d7 100644
--- a/ruby/example_code/cloudwatch/alarm_basics.rb
+++ b/ruby/example_code/cloudwatch/alarm_basics.rb
@@ -62,7 +62,7 @@ def list_alarms(cloudwatch_client)
# [
# {
# name: 'BucketName',
-# value: 'doc-example-bucket'
+# value: 'amzn-s3-demo-bucket'
# },
# {
# name: 'StorageType',
@@ -147,7 +147,7 @@ def run_me
dimensions = [
{
name: "BucketName",
- value: "doc-example-bucket"
+ value: "amzn-s3-demo-bucket"
},
{
name: "StorageType",
diff --git a/ruby/example_code/cloudwatch/create_alarm.rb b/ruby/example_code/cloudwatch/create_alarm.rb
index 6d366170c86..0712d7407aa 100644
--- a/ruby/example_code/cloudwatch/create_alarm.rb
+++ b/ruby/example_code/cloudwatch/create_alarm.rb
@@ -39,7 +39,7 @@
# [
# {
# name: 'BucketName',
-# value: 'doc-example-bucket'
+# value: 'amzn-s3-demo-bucket'
# },
# {
# name: 'StorageType',
@@ -100,7 +100,7 @@ def run_me
dimensions = [
{
name: "BucketName",
- value: "doc-example-bucket"
+ value: "amzn-s3-demo-bucket"
},
{
name: "StorageType",
diff --git a/ruby/example_code/cloudwatch/spec/alarm_actions_spec.rb b/ruby/example_code/cloudwatch/spec/alarm_actions_spec.rb
index 74f17e04e86..956b0d5531c 100644
--- a/ruby/example_code/cloudwatch/spec/alarm_actions_spec.rb
+++ b/ruby/example_code/cloudwatch/spec/alarm_actions_spec.rb
@@ -15,7 +15,7 @@
[
{
name: "BucketName",
- value: "doc-example-bucket"
+ value: "amzn-s3-demo-bucket"
},
{
name: "StorageType",
diff --git a/ruby/example_code/cloudwatch/spec/alarm_basics_spec.rb b/ruby/example_code/cloudwatch/spec/alarm_basics_spec.rb
index ed5d7b0b9f2..d2bc268d413 100644
--- a/ruby/example_code/cloudwatch/spec/alarm_basics_spec.rb
+++ b/ruby/example_code/cloudwatch/spec/alarm_basics_spec.rb
@@ -35,7 +35,7 @@
[
{
name: "BucketName",
- value: "doc-example-bucket"
+ value: "amzn-s3-demo-bucket"
},
{
name: "StorageType",
diff --git a/ruby/example_code/cloudwatch/spec/create_alarm_spec.rb b/ruby/example_code/cloudwatch/spec/create_alarm_spec.rb
index 2da5b063e1b..39cfd6ffcb4 100644
--- a/ruby/example_code/cloudwatch/spec/create_alarm_spec.rb
+++ b/ruby/example_code/cloudwatch/spec/create_alarm_spec.rb
@@ -15,7 +15,7 @@
[
{
name: "BucketName",
- value: "doc-example-bucket"
+ value: "amzn-s3-demo-bucket"
},
{
name: "StorageType",
diff --git a/ruby/example_code/cloudwatch/spec/show_alarms_spec.rb b/ruby/example_code/cloudwatch/spec/show_alarms_spec.rb
index d2874547d93..910fdbeb80f 100644
--- a/ruby/example_code/cloudwatch/spec/show_alarms_spec.rb
+++ b/ruby/example_code/cloudwatch/spec/show_alarms_spec.rb
@@ -34,7 +34,7 @@
dimensions: [
{
name: "BucketName",
- value: "doc-example-bucket"
+ value: "amzn-s3-demo-bucket"
},
{
name: "StorageType",
diff --git a/ruby/example_code/s3/auth_federation_token_request_test.rb b/ruby/example_code/s3/auth_federation_token_request_test.rb
index ccd01b47bce..fa2a2feed87 100644
--- a/ruby/example_code/s3/auth_federation_token_request_test.rb
+++ b/ruby/example_code/s3/auth_federation_token_request_test.rb
@@ -56,7 +56,7 @@ def get_user(iam, user_name)
# 'Sid' => 'Stmt1',
# 'Effect' => 'Allow',
# 'Action' => 's3:ListBucket',
-# 'Resource' => 'arn:aws:s3:::doc-example-bucket'
+# 'Resource' => 'arn:aws:s3:::amzn-s3-demo-bucket'
# ]
# }
# )
@@ -80,7 +80,7 @@ def get_temporary_credentials(sts, duration_seconds, user_name, policy)
# @return [Boolean] true if the objects were listed; otherwise, false.
# @example
# s3_client = Aws::S3::Client.new(region: 'us-west-2')
-# exit 1 unless list_objects_in_bucket?(s3_client, 'doc-example-bucket')
+# exit 1 unless list_objects_in_bucket?(s3_client, 'amzn-s3-demo-bucket')
def list_objects_in_bucket?(s3_client, bucket_name)
puts "Accessing the contents of the bucket named '#{bucket_name}'..."
response = s3_client.list_objects_v2(
@@ -106,7 +106,7 @@ def list_objects_in_bucket?(s3_client, bucket_name)
def run_me
region = "us-west-2"
user_name = "my-user"
- bucket_name = "doc-example-bucket"
+ bucket_name = "amzn-s3-demo-bucket"
iam = Aws::IAM::Client.new(region: region)
user = get_user(iam, user_name)
diff --git a/ruby/example_code/s3/auth_request_object_keys.rb b/ruby/example_code/s3/auth_request_object_keys.rb
index ebb95c9c21c..4e9e2e880c0 100644
--- a/ruby/example_code/s3/auth_request_object_keys.rb
+++ b/ruby/example_code/s3/auth_request_object_keys.rb
@@ -17,7 +17,7 @@
# @return [Boolean] true if all operations succeed; otherwise, false.
# @example
# s3_client = Aws::S3::Client.new(region: 'us-west-2')
-# exit 1 unless list_bucket_objects?(s3_client, 'doc-example-bucket')
+# exit 1 unless list_bucket_objects?(s3_client, 'amzn-s3-demo-bucket')
def list_bucket_objects?(s3_client, bucket_name)
puts "Accessing the bucket named '#{bucket_name}'..."
objects = s3_client.list_objects_v2(
diff --git a/ruby/example_code/s3/auth_session_token_request_test.rb b/ruby/example_code/s3/auth_session_token_request_test.rb
index 69ed20182a8..db1381d979f 100644
--- a/ruby/example_code/s3/auth_session_token_request_test.rb
+++ b/ruby/example_code/s3/auth_session_token_request_test.rb
@@ -125,7 +125,7 @@ def get_credentials(sts_client, role_arn, role_session_name, duration_seconds)
# @return [Boolean] true if the bucket exists; otherwise, false.
# @example
# s3_client = Aws::S3::Client.new(region: 'us-west-2')
-# exit 1 unless bucket_exists?(s3_client, 'doc-example-bucket')
+# exit 1 unless bucket_exists?(s3_client, 'amzn-s3-demo-bucket')
def bucket_exists?(s3_client, bucket_name)
response = s3_client.list_buckets
response.buckets.each do |bucket|
@@ -143,7 +143,7 @@ def bucket_exists?(s3_client, bucket_name)
# @return [Boolean] true if the objects were listed; otherwise, false.
# @example
# s3_client = Aws::S3::Client.new(region: 'us-west-2')
-# exit 1 unless list_objects_in_bucket?(s3_client, 'doc-example-bucket')
+# exit 1 unless list_objects_in_bucket?(s3_client, 'amzn-s3-demo-bucket')
def list_objects_in_bucket?(s3_client, bucket_name)
puts "Accessing the contents of the bucket named '#{bucket_name}'..."
response = s3_client.list_objects_v2(
@@ -177,7 +177,7 @@ def run_me
role_session_name = "ReadAmazonS3Bucket"
duration_seconds = 3600
sts_client = Aws::STS::Client.new(region: region)
- bucket_name = "doc-example-bucket"
+ bucket_name = "amzn-s3-demo-bucket"
puts "Getting or creating user '#{user_name}'..."
diff --git a/ruby/example_code/s3/bucket_cors.rb b/ruby/example_code/s3/bucket_cors.rb
index 1ae92977402..f26bec8ead8 100644
--- a/ruby/example_code/s3/bucket_cors.rb
+++ b/ruby/example_code/s3/bucket_cors.rb
@@ -75,7 +75,7 @@ def delete_cors
# Example usage:
def run_demo
- bucket_name = "doc-example-bucket"
+ bucket_name = "amzn-s3-demo-bucket"
allowed_methods = %w[GET PUT]
allowed_origins = %w[http://www.example.com]
diff --git a/ruby/example_code/s3/bucket_create.rb b/ruby/example_code/s3/bucket_create.rb
index e8bc41cc7e1..4201f1aeae7 100644
--- a/ruby/example_code/s3/bucket_create.rb
+++ b/ruby/example_code/s3/bucket_create.rb
@@ -47,7 +47,7 @@ def location
# Example usage:
def run_demo
region = "us-west-2"
- wrapper = BucketCreateWrapper.new(Aws::S3::Bucket.new("doc-example-bucket-#{Random.uuid}"))
+ wrapper = BucketCreateWrapper.new(Aws::S3::Bucket.new("amzn-s3-demo-bucket-#{Random.uuid}"))
return unless wrapper.create?(region)
puts "Created bucket #{wrapper.bucket.name}."
diff --git a/ruby/example_code/s3/bucket_list_objects.rb b/ruby/example_code/s3/bucket_list_objects.rb
index 20861b41910..e3ffab553ea 100644
--- a/ruby/example_code/s3/bucket_list_objects.rb
+++ b/ruby/example_code/s3/bucket_list_objects.rb
@@ -38,7 +38,7 @@ def list_objects(max_objects)
# Example usage:
def run_demo
- bucket_name = "doc-example-bucket"
+ bucket_name = "amzn-s3-demo-bucket"
wrapper = BucketListObjectsWrapper.new(Aws::S3::Bucket.new(bucket_name))
count = wrapper.list_objects(25)
diff --git a/ruby/example_code/s3/bucket_policy.rb b/ruby/example_code/s3/bucket_policy.rb
index 801a84c70f9..ab4adc9d85c 100644
--- a/ruby/example_code/s3/bucket_policy.rb
+++ b/ruby/example_code/s3/bucket_policy.rb
@@ -60,7 +60,7 @@ def delete_policy
# Example usage:
def run_demo
- bucket_name = "doc-example-bucket"
+ bucket_name = "amzn-s3-demo-bucket"
policy_user = "arn:aws:iam::111122223333:user/Martha"
policy = {
'Version': "2012-10-17",
diff --git a/ruby/example_code/s3/bucket_put_encryption.rb b/ruby/example_code/s3/bucket_put_encryption.rb
index 4d9a821b7ed..3f172f54d5c 100644
--- a/ruby/example_code/s3/bucket_put_encryption.rb
+++ b/ruby/example_code/s3/bucket_put_encryption.rb
@@ -41,7 +41,7 @@ def set_encryption(bucket_name)
# Example usage:
def run_demo
- bucket_name = "doc-example-bucket"
+ bucket_name = "amzn-s3-demo-bucket"
wrapper = BucketEncryptionWrapper.new(Aws::S3::Client.new)
return unless wrapper.set_encryption(bucket_name)
diff --git a/ruby/example_code/s3/bucket_put_website.rb b/ruby/example_code/s3/bucket_put_website.rb
index ba9127cc429..8254b9dcc32 100644
--- a/ruby/example_code/s3/bucket_put_website.rb
+++ b/ruby/example_code/s3/bucket_put_website.rb
@@ -38,7 +38,7 @@ def set_website(index_document, error_document)
# Example usage:
def run_demo
- bucket_name = "doc-example-bucket"
+ bucket_name = "amzn-s3-demo-bucket"
index_document = "index.html"
error_document = "404.html"
diff --git a/ruby/example_code/s3/encryption_v2/s3_add_csaes_encrypt_item.rb b/ruby/example_code/s3/encryption_v2/s3_add_csaes_encrypt_item.rb
index 8fd60bd70c0..e279ecbe616 100644
--- a/ruby/example_code/s3/encryption_v2/s3_add_csaes_encrypt_item.rb
+++ b/ruby/example_code/s3/encryption_v2/s3_add_csaes_encrypt_item.rb
@@ -32,7 +32,7 @@
# )
# if encrypted_object_uploaded?(
# s3_encryption_client,
-# 'doc-example-bucket',
+# 'amzn-s3-demo-bucket',
# 'my-file.txt',
# 'This is the content of my-file.txt.'
# )
@@ -84,7 +84,7 @@ def get_random_aes_256_gcm_key
# Example usage:
def run_me
- bucket_name = "doc-example-bucket"
+ bucket_name = "amzn-s3-demo-bucket"
object_key = "my-file.txt"
region = "us-west-2"
object_content = File.read(object_key)
diff --git a/ruby/example_code/s3/encryption_v2/s3_add_cskms_encrypt_item.rb b/ruby/example_code/s3/encryption_v2/s3_add_cskms_encrypt_item.rb
index b985b2db92f..5ecac7bc7a6 100644
--- a/ruby/example_code/s3/encryption_v2/s3_add_cskms_encrypt_item.rb
+++ b/ruby/example_code/s3/encryption_v2/s3_add_cskms_encrypt_item.rb
@@ -30,7 +30,7 @@
# )
# if encrypted_object_uploaded?(
# s3_encryption_client,
-# 'doc-example-bucket',
+# 'amzn-s3-demo-bucket',
# 'my-file.txt',
# 'This is the content of my-file.txt.'
# )
@@ -57,7 +57,7 @@ def encrypted_object_uploaded?(
# Example usage:
def run_me
- bucket_name = "doc-example-bucket"
+ bucket_name = "amzn-s3-demo-bucket"
object_key = "my-file.txt"
region = "us-west-2"
kms_key_id = "9041e78c-7a20-4db3-929e-828abEXAMPLE"
diff --git a/ruby/example_code/s3/encryption_v2/s3_get_csaes_decrypt_item.rb b/ruby/example_code/s3/encryption_v2/s3_get_csaes_decrypt_item.rb
index a0e896764f7..90db79f7955 100644
--- a/ruby/example_code/s3/encryption_v2/s3_get_csaes_decrypt_item.rb
+++ b/ruby/example_code/s3/encryption_v2/s3_get_csaes_decrypt_item.rb
@@ -32,7 +32,7 @@
# )
# puts get_decrypted_object_content(
# s3_encryption_client,
-# 'doc-example-bucket',
+# 'amzn-s3-demo-bucket',
# 'my-file.txt'
# )
def get_decrypted_object_content(
@@ -54,7 +54,7 @@ def get_decrypted_object_content(
# Example usage:
def run_me
- bucket_name = "doc-example-bucket"
+ bucket_name = "amzn-s3-demo-bucket"
object_key = "my-file.txt"
region = "us-west-2"
diff --git a/ruby/example_code/s3/encryption_v2/s3_get_cskms_decrypt_item.rb b/ruby/example_code/s3/encryption_v2/s3_get_cskms_decrypt_item.rb
index 338a7f72d7f..47c72257250 100644
--- a/ruby/example_code/s3/encryption_v2/s3_get_cskms_decrypt_item.rb
+++ b/ruby/example_code/s3/encryption_v2/s3_get_cskms_decrypt_item.rb
@@ -31,7 +31,7 @@
# )
# puts get_decrypted_object_content(
# s3_encryption_client,
-# 'doc-example-bucket',
+# 'amzn-s3-demo-bucket',
# 'my-file.txt'
# )
def get_decrypted_object_content(
@@ -54,7 +54,7 @@ def get_decrypted_object_content(
# Example usage:
def run_me
- bucket_name = "doc-example-bucket"
+ bucket_name = "amzn-s3-demo-bucket"
object_key = "my-file.txt"
region = "us-west-2"
kms_key_id = "9041e78c-7a20-4db3-929e-828abEXAMPLE"
diff --git a/ruby/example_code/s3/encryption_v2/s3_ruby_example_add_cspk_item.rb b/ruby/example_code/s3/encryption_v2/s3_ruby_example_add_cspk_item.rb
index b20d794b3fc..7640db5174d 100644
--- a/ruby/example_code/s3/encryption_v2/s3_ruby_example_add_cspk_item.rb
+++ b/ruby/example_code/s3/encryption_v2/s3_ruby_example_add_cspk_item.rb
@@ -29,7 +29,7 @@
# security_profile: :v2,
# region: 'us-west-2'
# ),
-# 'doc-example-bucket',
+# 'amzn-s3-demo-bucket',
# 'my-file.txt',
# 'This is the content of my-file.txt.'
# )
@@ -55,7 +55,7 @@ def object_uploaded_with_public_key_encryption?(
# Example usage:
def run_me
- bucket_name = "doc-example-bucket"
+ bucket_name = "amzn-s3-demo-bucket"
object_key = "my-file.txt"
object_content = "This is the content of my-file.txt."
region = "us-west-2"
diff --git a/ruby/example_code/s3/encryption_v2/s3_ruby_example_get_cspk_item.rb b/ruby/example_code/s3/encryption_v2/s3_ruby_example_get_cspk_item.rb
index 2100db7b445..06837bc58c5 100644
--- a/ruby/example_code/s3/encryption_v2/s3_ruby_example_get_cspk_item.rb
+++ b/ruby/example_code/s3/encryption_v2/s3_ruby_example_get_cspk_item.rb
@@ -32,7 +32,7 @@
# security_profile: :v2,
# region: 'us-west-2'
# ),
-# 'doc-example-bucket',
+# 'amzn-s3-demo-bucket',
# 'my-file.txt'
# )
def download_object_with_private_key_encryption(
@@ -51,7 +51,7 @@ def download_object_with_private_key_encryption(
# Example usage:
def run_me
- bucket_name = "doc-example-bucket"
+ bucket_name = "amzn-s3-demo-bucket"
object_key = "my-file.txt"
region = "us-west-2"
private_key_file = "my-private-key.pem"
diff --git a/ruby/example_code/s3/encryption_v2/spec/test_s3_add_csaes_encrypt_item.rb b/ruby/example_code/s3/encryption_v2/spec/test_s3_add_csaes_encrypt_item.rb
index dfbd2c3ff0f..b5a20cc6405 100644
--- a/ruby/example_code/s3/encryption_v2/spec/test_s3_add_csaes_encrypt_item.rb
+++ b/ruby/example_code/s3/encryption_v2/spec/test_s3_add_csaes_encrypt_item.rb
@@ -4,7 +4,7 @@
require_relative "../s3_add_csaes_encrypt_item"
describe "#encrypted_object_uploaded?" do
- let(:bucket_name) { "doc-example-bucket" }
+ let(:bucket_name) { "amzn-s3-demo-bucket" }
let(:object_key) { "my-file.txt" }
let(:object_content) { "This is the content of my-file.txt." }
let(:encryption_key) { get_random_aes_256_gcm_key }
diff --git a/ruby/example_code/s3/encryption_v2/spec/test_s3_add_cskms_encrypt_item.rb b/ruby/example_code/s3/encryption_v2/spec/test_s3_add_cskms_encrypt_item.rb
index 4044651726c..0d4fcad0035 100644
--- a/ruby/example_code/s3/encryption_v2/spec/test_s3_add_cskms_encrypt_item.rb
+++ b/ruby/example_code/s3/encryption_v2/spec/test_s3_add_cskms_encrypt_item.rb
@@ -4,7 +4,7 @@
require_relative "../s3_add_cskms_encrypt_item"
describe "#encrypted_object_uploaded?" do
- let(:bucket_name) { "doc-example-bucket" }
+ let(:bucket_name) { "amzn-s3-demo-bucket" }
let(:object_key) { "my-file.txt" }
let(:object_content) { "This is the content of my-file.txt." }
let(:kms_key_id) { "9041e78c-7a20-4db3-929e-828abEXAMPLE" }
diff --git a/ruby/example_code/s3/encryption_v2/spec/test_s3_get_csaes_decrypt_item.rb b/ruby/example_code/s3/encryption_v2/spec/test_s3_get_csaes_decrypt_item.rb
index d08add94428..0f8fe481f89 100644
--- a/ruby/example_code/s3/encryption_v2/spec/test_s3_get_csaes_decrypt_item.rb
+++ b/ruby/example_code/s3/encryption_v2/spec/test_s3_get_csaes_decrypt_item.rb
@@ -40,7 +40,7 @@ def stub_get(s3_client, data, stub_auth_tag)
)
end
- let(:bucket_name) { "doc-example-bucket" }
+ let(:bucket_name) { "amzn-s3-demo-bucket" }
let(:object_key) { "my-file.txt" }
let(:object_content) { "This is the content of my-file.txt." }
let(:encryption_key_string) { "XSiKrmzhtDKR9tTwJRSLjgwLhiMA82TC2z3GEXAMPLE=" }
diff --git a/ruby/example_code/s3/encryption_v2/spec/test_s3_get_cskms_decrypt_item.rb b/ruby/example_code/s3/encryption_v2/spec/test_s3_get_cskms_decrypt_item.rb
index ee1a60fc85b..5d986ed1654 100644
--- a/ruby/example_code/s3/encryption_v2/spec/test_s3_get_cskms_decrypt_item.rb
+++ b/ruby/example_code/s3/encryption_v2/spec/test_s3_get_cskms_decrypt_item.rb
@@ -57,7 +57,7 @@ def stub_decrypt(kms_client, opts)
)
end
- let(:bucket_name) { "doc-example-bucket" }
+ let(:bucket_name) { "amzn-s3-demo-bucket" }
let(:object_key) { "my-file.txt" }
let(:object_content) { "This is the content of my-file.txt." }
let(:kms_key_id) { "9041e78c-7a20-4db3-929e-828abEXAMPLE" }
diff --git a/ruby/example_code/s3/encryption_v2/spec/test_s3_ruby_example_add_cspk_item.rb b/ruby/example_code/s3/encryption_v2/spec/test_s3_ruby_example_add_cspk_item.rb
index 2f236c3951e..bdc76297a03 100644
--- a/ruby/example_code/s3/encryption_v2/spec/test_s3_ruby_example_add_cspk_item.rb
+++ b/ruby/example_code/s3/encryption_v2/spec/test_s3_ruby_example_add_cspk_item.rb
@@ -4,7 +4,7 @@
require_relative "../s3-ruby-example-add-cspk-item"
describe "#object_uploaded_with_public_key_encryption?" do
- let(:bucket_name) { "doc-example-bucket" }
+ let(:bucket_name) { "amzn-s3-demo-bucket" }
let(:object_key) { "my-file.txt" }
let(:object_content) { "This is the content of my-file.txt." }
# Note that Aws::S3::EncryptionV2::Client is a wrapper around
diff --git a/ruby/example_code/s3/encryption_v2/spec/test_s3_ruby_example_get_cspk_item.rb b/ruby/example_code/s3/encryption_v2/spec/test_s3_ruby_example_get_cspk_item.rb
index e1cec1c6e54..355141843f0 100644
--- a/ruby/example_code/s3/encryption_v2/spec/test_s3_ruby_example_get_cspk_item.rb
+++ b/ruby/example_code/s3/encryption_v2/spec/test_s3_ruby_example_get_cspk_item.rb
@@ -40,7 +40,7 @@ def stub_get(s3_client, data, stub_auth_tag)
)
end
- let(:bucket_name) { "doc-example-bucket" }
+ let(:bucket_name) { "amzn-s3-demo-bucket" }
let(:object_key) { "my-file.txt" }
let(:object_content) { "This is the content of my-file.txt." }
# Note that Aws::S3::EncryptionV2::Client is a wrapper around
diff --git a/ruby/example_code/s3/object_copy.rb b/ruby/example_code/s3/object_copy.rb
index d55e48c57e5..c3e415708b8 100644
--- a/ruby/example_code/s3/object_copy.rb
+++ b/ruby/example_code/s3/object_copy.rb
@@ -33,9 +33,9 @@ def copy_object(target_bucket, target_object_key)
# Example usage:
def run_demo
- source_bucket_name = "doc-example-bucket1"
+ source_bucket_name = "amzn-s3-demo-bucket1"
source_key = "my-source-file.txt"
- target_bucket_name = "doc-example-bucket2"
+ target_bucket_name = "amzn-s3-demo-bucket2"
target_key = "my-target-file.txt"
source_bucket = Aws::S3::Bucket.new(source_bucket_name)
diff --git a/ruby/example_code/s3/object_copy_encrypt.rb b/ruby/example_code/s3/object_copy_encrypt.rb
index a5c21927ac5..680c1552e7e 100644
--- a/ruby/example_code/s3/object_copy_encrypt.rb
+++ b/ruby/example_code/s3/object_copy_encrypt.rb
@@ -34,9 +34,9 @@ def copy_object(target_bucket, target_object_key, encryption)
# Example usage:
def run_demo
- source_bucket_name = "doc-example-bucket1"
+ source_bucket_name = "amzn-s3-demo-bucket1"
source_key = "my-source-file.txt"
- target_bucket_name = "doc-example-bucket2"
+ target_bucket_name = "amzn-s3-demo-bucket2"
target_key = "my-target-file.txt"
target_encryption = "AES256"
diff --git a/ruby/example_code/s3/object_exists.rb b/ruby/example_code/s3/object_exists.rb
index 01d896379f5..05307f9fb66 100644
--- a/ruby/example_code/s3/object_exists.rb
+++ b/ruby/example_code/s3/object_exists.rb
@@ -30,7 +30,7 @@ def exists?
# Example usage:
def run_demo
- bucket_name = "doc-example-bucket"
+ bucket_name = "amzn-s3-demo-bucket"
object_key = "my-object.txt"
wrapper = ObjectExistsWrapper.new(Aws::S3::Object.new(bucket_name, object_key))
diff --git a/ruby/example_code/s3/object_get.rb b/ruby/example_code/s3/object_get.rb
index 7fec0e54c68..c8a443b5194 100644
--- a/ruby/example_code/s3/object_get.rb
+++ b/ruby/example_code/s3/object_get.rb
@@ -30,7 +30,7 @@ def get_object(target_path)
# Example usage:
def run_demo
- bucket_name = "doc-example-bucket"
+ bucket_name = "amzn-s3-demo-bucket"
object_key = "my-object.txt"
target_path = "my-object-as-file.txt"
diff --git a/ruby/example_code/s3/object_get_encryption.rb b/ruby/example_code/s3/object_get_encryption.rb
index b100527ac68..48a0fde0cac 100644
--- a/ruby/example_code/s3/object_get_encryption.rb
+++ b/ruby/example_code/s3/object_get_encryption.rb
@@ -30,7 +30,7 @@ def get_object
# Example usage:
def run_demo
- bucket_name = "doc-example-bucket"
+ bucket_name = "amzn-s3-demo-bucket"
object_key = "my-object.txt"
wrapper = ObjectGetEncryptionWrapper.new(Aws::S3::Object.new(bucket_name, object_key))
diff --git a/ruby/example_code/s3/object_presigned_url_upload.rb b/ruby/example_code/s3/object_presigned_url_upload.rb
index 16fe6b886b7..84095125feb 100644
--- a/ruby/example_code/s3/object_presigned_url_upload.rb
+++ b/ruby/example_code/s3/object_presigned_url_upload.rb
@@ -24,7 +24,7 @@ def get_presigned_url(bucket, object_key)
# Example usage:
def run_demo
- bucket_name = "doc-example-bucket"
+ bucket_name = "amzn-s3-demo-bucket"
object_key = "my-file.txt"
object_content = "This is the content of my-file.txt."
diff --git a/ruby/example_code/s3/object_put.rb b/ruby/example_code/s3/object_put.rb
index 6d396df3839..63dd10010fd 100644
--- a/ruby/example_code/s3/object_put.rb
+++ b/ruby/example_code/s3/object_put.rb
@@ -30,7 +30,7 @@ def put_object(source_file_path)
# Example usage:
def run_demo
- bucket_name = "doc-example-bucket"
+ bucket_name = "amzn-s3-demo-bucket"
object_key = "my-object-key"
file_path = "my-local-file.txt"
diff --git a/ruby/example_code/s3/object_put_sse.rb b/ruby/example_code/s3/object_put_sse.rb
index e3164d5061b..a90b499d583 100644
--- a/ruby/example_code/s3/object_put_sse.rb
+++ b/ruby/example_code/s3/object_put_sse.rb
@@ -30,7 +30,7 @@ def put_object_encrypted(object_content, encryption)
# Example usage:
def run_demo
- bucket_name = "doc-example-bucket"
+ bucket_name = "amzn-s3-demo-bucket"
object_key = "my-encrypted-content"
object_content = "This is my super-secret content."
encryption = "AES256"
diff --git a/ruby/example_code/s3/object_upload_file.rb b/ruby/example_code/s3/object_upload_file.rb
index 7069e5f66a9..ecd2be8f949 100644
--- a/ruby/example_code/s3/object_upload_file.rb
+++ b/ruby/example_code/s3/object_upload_file.rb
@@ -32,7 +32,7 @@ def upload_file(file_path)
# Example usage:
def run_demo
- bucket_name = "doc-example-bucket"
+ bucket_name = "amzn-s3-demo-bucket"
object_key = "my-uploaded-file"
file_path = "object_upload_file.rb"
diff --git a/ruby/example_code/s3/scenario_getting_started.rb b/ruby/example_code/s3/scenario_getting_started.rb
index b68d81c63fa..ead5032f888 100644
--- a/ruby/example_code/s3/scenario_getting_started.rb
+++ b/ruby/example_code/s3/scenario_getting_started.rb
@@ -29,7 +29,7 @@ def initialize(s3_resource)
# @return [Aws::S3::Bucket] The newly created bucket.
def create_bucket
bucket = @s3_resource.create_bucket(
- bucket: "doc-example-bucket-#{Random.uuid}",
+ bucket: "amzn-s3-demo-bucket-#{Random.uuid}",
create_bucket_configuration: {
location_constraint: "us-east-1" # Note: only certain regions permitted
}
diff --git a/ruby/example_code/s3/spec/bucket_create_spec.rb b/ruby/example_code/s3/spec/bucket_create_spec.rb
index 587f55c57a0..88502b54534 100644
--- a/ruby/example_code/s3/spec/bucket_create_spec.rb
+++ b/ruby/example_code/s3/spec/bucket_create_spec.rb
@@ -5,7 +5,7 @@
require_relative "../bucket_create"
describe BucketCreateWrapper do
- let(:bucket_name) { "doc-example-bucket" }
+ let(:bucket_name) { "amzn-s3-demo-bucket" }
let(:bucket) { Aws::S3::Bucket.new(bucket_name, stub_responses: true) }
let(:wrapper) { BucketCreateWrapper.new(bucket) }
diff --git a/ruby/example_code/sns/tests/test_create_bucket_snippet.rb b/ruby/example_code/sns/tests/test_create_bucket_snippet.rb
index 65572eb9526..9fd4ccbc3dd 100644
--- a/ruby/example_code/sns/tests/test_create_bucket_snippet.rb
+++ b/ruby/example_code/sns/tests/test_create_bucket_snippet.rb
@@ -5,7 +5,7 @@
describe "#bucket_created?" do
let(:s3_client) { Aws::S3::Client.new(stub_responses: true) }
- let(:bucket_name) { "doc-example-bucket" }
+ let(:bucket_name) { "amzn-s3-demo-bucket" }
it "confirms the bucket was created" do
bucket_data = s3_client.stub_data(
diff --git a/rustv1/examples/glue/src/prepare.rs b/rustv1/examples/glue/src/prepare.rs
index 6bf47c12f42..10cc6b6f205 100644
--- a/rustv1/examples/glue/src/prepare.rs
+++ b/rustv1/examples/glue/src/prepare.rs
@@ -243,7 +243,7 @@ impl GlueScenario {
Ok(())
}
- // Upload Python ETL script to the user's Amazon Simple Storage Service (Amazon S3) bucket. It looks something like this: s3://doc-example-bucket-123456/flight_etl_job_script.py.
+ // Upload Python ETL script to the user's Amazon Simple Storage Service (Amazon S3) bucket. It looks something like this: s3://amzn-s3-demo-bucket-123456/flight_etl_job_script.py.
// Create a job, pass it the AWS Identity and Access Management (IAM) role and the URL to the uploaded script.
#[instrument(skip(self))]
pub async fn prepare_job(self: &GlueScenario) -> Result {
diff --git a/rustv1/examples/s3/src/bin/s3-getting-started.rs b/rustv1/examples/s3/src/bin/s3-getting-started.rs
index 0f7c756998f..caee38ea25e 100644
--- a/rustv1/examples/s3/src/bin/s3-getting-started.rs
+++ b/rustv1/examples/s3/src/bin/s3-getting-started.rs
@@ -23,7 +23,7 @@ async fn main() -> Result<(), S3ExampleError> {
let region = region_provider.region().await.unwrap();
let shared_config = aws_config::from_env().region(region_provider).load().await;
let client = Client::new(&shared_config);
- let bucket_name = format!("doc-example-bucket-{}", Uuid::new_v4());
+ let bucket_name = format!("amzn-s3-demo-bucket-{}", Uuid::new_v4());
let file_name = "s3/testfile.txt".to_string();
let key = "test file key name".to_string();
let target_key = "target_key".to_string();
diff --git a/rustv1/examples/s3/src/bin/s3-multipart-upload.rs b/rustv1/examples/s3/src/bin/s3-multipart-upload.rs
index ef6b97aacb2..2decd5ec60b 100644
--- a/rustv1/examples/s3/src/bin/s3-multipart-upload.rs
+++ b/rustv1/examples/s3/src/bin/s3-multipart-upload.rs
@@ -39,7 +39,7 @@ async fn run_example() -> Result<(), S3ExampleError> {
let shared_config = aws_config::load_from_env().await;
let client = S3Client::new(&shared_config);
- let bucket_name = format!("doc-example-bucket-{}", Uuid::new_v4());
+ let bucket_name = format!("amzn-s3-demo-bucket-{}", Uuid::new_v4());
let region_provider = RegionProviderChain::first_try(Region::new("us-west-2"));
let region = region_provider.region().await.unwrap();
s3_code_examples::create_bucket(&client, &bucket_name, ®ion).await?;
diff --git a/rustv1/examples/s3/tests/test-s3-getting-started.rs b/rustv1/examples/s3/tests/test-s3-getting-started.rs
index 879ab73d9e5..a3964e2f0a8 100644
--- a/rustv1/examples/s3/tests/test-s3-getting-started.rs
+++ b/rustv1/examples/s3/tests/test-s3-getting-started.rs
@@ -40,7 +40,7 @@ async fn setup() -> (Region, Client, String, String, String, String) {
let shared_config = aws_config::from_env().region(region_provider).load().await;
let client = Client::new(&shared_config);
- let bucket_name = format!("{}{}", "doc-example-bucket-", Uuid::new_v4());
+ let bucket_name = format!("{}{}", "amzn-s3-demo-bucket-", Uuid::new_v4());
let file_name = "../s3/testfile.txt".to_string();
let key = "test file key name".to_string();
let target_key = "target_key".to_string();