Skip to content

Commit 048bf54

Browse files
committed
Update Readme for newer library version
1 parent 2ab7285 commit 048bf54

File tree

1 file changed

+62
-44
lines changed

1 file changed

+62
-44
lines changed

README.md

Lines changed: 62 additions & 44 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ sns-extended-client allows for publishing large messages through SNS via S3. Thi
2121
* use_legacy_attribute -- if `True`, then all published messages use the Legacy reserved message attribute (SQSLargePayloadSize) instead of the current reserved message attribute (ExtendedPayloadSize).
2222
* message_size_threshold -- the threshold for storing the message in the large messages bucket. Cannot be less than `0` or greater than `262144`. Defaults to `262144`.
2323
* always_through_s3 -- if `True`, then all messages will be serialized to S3. Defaults to `False`
24-
* s3 -- the boto3 S3 `resource` object to use to store objects to S3. Use this if you want to control the S3 resource (for example, custom S3 config or credentials). Defaults to `boto3.resource("s3")` on first use if not previously set.
24+
* s3_client -- the boto3 S3 `client` object to use to store objects to S3. Use this if you want to control the S3 client (for example, custom S3 config or credentials). Defaults to `boto3.client("s3")` on first use if not previously set.
2525

2626
## Usage
2727

@@ -108,25 +108,27 @@ platform_endpoint = resource.PlatformEndpoint('endpoint-arn')
108108
platform_endpoint.large_payload_support = 'my-bucket-name'
109109
platform_endpoint.always_through_s3 = True
110110
```
111-
### Setting a custom S3 resource
111+
### Setting a custom S3 config
112112
```python
113113
import boto3
114114
from botocore.config import Config
115115
import sns_extended_client
116116

117-
# Low level client
118-
sns = boto3.client('sns')
119-
sns.large_payload_support = 'my-bucket-name'
120-
sns.s3 = boto3.resource(
121-
's3',
122-
config=Config(
123-
signature_version='s3v4',
117+
# Define Configuration for boto3's S3 Client
118+
# NOTE - The boto3 version from 1.36.0 to 1.36.6 will throw an error if you enable accelerate_endpoint.
119+
s3_client_config = Config(
120+
region_name = 'us-east-1',
121+
signature_version = 's3v4',
124122
s3={
125-
"use_accelerate_endpoint": True
123+
"use_accelerate_endpoint":True
126124
}
127-
)
128125
)
129126

127+
# Low level client
128+
sns = boto3.client('sns')
129+
sns.large_payload_support = 'my-bucket-name'
130+
sns.s3_client = boto3.client("s3", config=s3_client_config)
131+
130132
# boto SNS.Topic resource
131133
resource = boto3.resource('sns')
132134
topic = resource.Topic('topic-arn')
@@ -135,30 +137,14 @@ topic = resource.Topic('topic-arn')
135137
topic = resource.topic(Name='topic-name')
136138

137139
topic.large_payload_support = 'my-bucket-name'
138-
topic.s3 = boto3.resource(
139-
's3',
140-
config=Config(
141-
signature_version='s3v4',
142-
s3={
143-
"use_accelerate_endpoint": True
144-
}
145-
)
146-
)
140+
topic.s3_client = boto3.client("s3", config=s3_client_config)
147141

148142
# boto SNS.PlatformEndpoint resource
149143
resource = boto3.resource('sns')
150144
platform_endpoint = resource.PlatformEndpoint('endpoint-arn')
151145

152146
platform_endpoint.large_payload_support = 'my-bucket-name'
153-
platform_endpoint.s3 = boto3.resource(
154-
's3',
155-
config=Config(
156-
signature_version='s3v4',
157-
s3={
158-
"use_accelerate_endpoint": True
159-
}
160-
)
161-
)
147+
platform_endpoint.s3_client = boto3.client("s3", config=s3_client_config)
162148
```
163149

164150
### Setting a custom S3 Key
@@ -251,18 +237,17 @@ def allow_sns_to_write_to_sqs(topicarn, queuearn):
251237

252238
return policy_document
253239

254-
def get_msg_from_s3(body):
240+
def get_msg_from_s3(body,sns_extended_client):
255241
"""Handy Helper to fetch message from S3"""
256242
json_msg = loads(body)
257-
s3_client = boto3.client("s3",region_name="us-east-1")
258-
s3_object = s3_client.get_object(
243+
s3_object = sns_extended_client.s3_client.get_object(
259244
Bucket=json_msg[1].get("s3BucketName"), Key=json_msg[1].get("s3Key")
260245
)
261246
msg = s3_object.get("Body").read().decode()
262247
return msg
263248

264249

265-
def fetch_and_print_from_sqs(sqs, queue_url):
250+
def fetch_and_print_from_sqs(sqs, queue_url,sns_extended_client):
266251
sqs_msg = sqs.receive_message(
267252
QueueUrl=queue_url,
268253
AttributeNames=['All'],
@@ -274,7 +259,7 @@ def fetch_and_print_from_sqs(sqs, queue_url):
274259

275260
message_body = sqs_msg.get("Body")
276261
print("Published Message: {}".format(message_body))
277-
print("Message Stored in S3 Bucket is: {}\n".format(get_msg_from_s3(message_body)))
262+
print("Message Stored in S3 Bucket is: {}\n".format(get_msg_from_s3(message_body,sns_extended_client)))
278263

279264
# Delete the Processed Message
280265
sqs.delete_message(
@@ -312,14 +297,17 @@ Endpoint=sqs_queue_arn
312297

313298
sns_extended_client.large_payload_support = s3_extended_payload_bucket
314299

300+
# Change default s3_client attribute of sns_extended_client to use 'us-east-1' region
301+
sns_extended_client.s3_client = boto3.client("s3", region_name="us-east-1")
302+
315303

316304
# Below is the example that all the messages will be sent to the S3 bucket
317305
sns_extended_client.always_through_s3 = True
318306
sns_extended_client.publish(
319307
TopicArn=sns_topic_arn, Message="This message should be published to S3"
320308
)
321309
print("\n\nPublished using SNS extended client:")
322-
fetch_and_print_from_sqs(sqs, demo_queue_url) # Prints message stored in s3
310+
fetch_and_print_from_sqs(sqs, demo_queue_url,sns_extended_client) # Prints message stored in s3
323311

324312
# Below is the example that all the messages larger than 32 bytes will be sent to the S3 bucket
325313
print("\nUsing decreased message size threshold:")
@@ -331,16 +319,20 @@ sns_extended_client.publish(
331319
Message="This message should be published to S3 as it exceeds the limit of the 32 bytes",
332320
)
333321

334-
fetch_and_print_from_sqs(sqs, demo_queue_url) # Prints message stored in s3
322+
fetch_and_print_from_sqs(sqs, demo_queue_url,sns_extended_client) # Prints message stored in s3
335323

336324

337-
# # Below is the example to publish message using the SNS.Topic resource
325+
# Below is the example to publish message using the SNS.Topic resource
338326
sns_extended_client_resource = SNSExtendedClientSession().resource(
339327
"sns", region_name="us-east-1"
340328
)
341329

342330
topic = sns_extended_client_resource.Topic(sns_topic_arn)
343331
topic.large_payload_support = s3_extended_payload_bucket
332+
333+
# Change default s3_client attribute of topic to use 'us-east-1' region
334+
topic.s3_client = boto3.client("s3", region_name="us-east-1")
335+
344336
topic.always_through_s3 = True
345337
# Can Set custom S3 Keys to be used to store objects in S3
346338
topic.publish(
@@ -353,24 +345,51 @@ topic.publish(
353345
},
354346
)
355347
print("\nPublished using Topic Resource:")
356-
fetch_and_print_from_sqs(sqs, demo_queue_url)
348+
fetch_and_print_from_sqs(sqs, demo_queue_url,topic)
349+
350+
# Below is the example to publish message using the SNS.PlatformEndpoint resource
351+
sns_extended_client_resource = SNSExtendedClientSession().resource(
352+
"sns", region_name="us-east-1"
353+
)
354+
355+
platform_endpoint = sns_extended_client_resource.PlatformEndpoint(sns_topic_arn)
356+
platform_endpoint.large_payload_support = s3_extended_payload_bucket
357+
358+
# Change default s3_client attribute of platform_endpoint to use 'us-east-1' region
359+
platform_endpoint.s3_client = boto3.client("s3", region_name="us-east-1")
360+
361+
platform_endpoint.always_through_s3 = True
362+
# Can Set custom S3 Keys to be used to store objects in S3
363+
platform_endpoint.publish(
364+
Message="This message should be published to S3 using the PlatformEndpoint resource",
365+
MessageAttributes={
366+
"S3Key": {
367+
"DataType": "String",
368+
"StringValue": "247c11c4-a22c-42e4-a6a2-9b5af5b76587",
369+
}
370+
},
371+
)
372+
print("\nPublished using PlatformEndpoint Resource:")
373+
fetch_and_print_from_sqs(sqs, demo_queue_url,platform_endpoint)
357374
```
358375

359376
PRODUCED OUTPUT:
360377
```
361378
Published using SNS extended client:
362-
Published Message: ["software.amazon.payloadoffloading.PayloadS3Pointer", {"s3BucketName": "extended-client-bucket-store", "s3Key": "465d51ea-2c85-4cf8-9ff7-f0a20636ac54"}]
379+
Published Message: ["software.amazon.payloadoffloading.PayloadS3Pointer", {"s3BucketName": "extended-client-bucket-store", "s3Key": "10999f58-c5ae-4d68-9208-f70475e0113d"}]
363380
Message Stored in S3 Bucket is: This message should be published to S3
364381
365-
366382
Using decreased message size threshold:
367-
Published Message: ["software.amazon.payloadoffloading.PayloadS3Pointer", {"s3BucketName": "extended-client-bucket-store", "s3Key": "4e32bc6c-e67e-4e09-982b-66dfbe0c588a"}]
383+
Published Message: ["software.amazon.payloadoffloading.PayloadS3Pointer", {"s3BucketName": "extended-client-bucket-store", "s3Key": "2c5cb2c7-e649-492b-85fb-fa9923cb02bf"}]
368384
Message Stored in S3 Bucket is: This message should be published to S3 as it exceeds the limit of the 32 bytes
369385
370-
371386
Published using Topic Resource:
372387
Published Message: ["software.amazon.payloadoffloading.PayloadS3Pointer", {"s3BucketName": "extended-client-bucket-store", "s3Key": "347c11c4-a22c-42e4-a6a2-9b5af5b76587"}]
373388
Message Stored in S3 Bucket is: This message should be published to S3 using the topic resource
389+
390+
Published using PlatformEndpoint Resource:
391+
Published Message: ["software.amazon.payloadoffloading.PayloadS3Pointer", {"s3BucketName": "extended-client-bucket-store", "s3Key": "247c11c4-a22c-42e4-a6a2-9b5af5b76587"}]
392+
Message Stored in S3 Bucket is: This message should be published to S3 using the PlatformEndpoint resource
374393
```
375394

376395
## DEVELOPMENT
@@ -388,5 +407,4 @@ See [CONTRIBUTING](CONTRIBUTING.md#security-issue-notifications) for more inform
388407

389408
## License
390409

391-
This project is licensed under the Apache-2.0 License.
392-
410+
This project is licensed under the Apache-2.0 License.

0 commit comments

Comments
 (0)