Skip to content

Commit ea42241

Browse files
committed
more pylinting and refactor out common code to a new lambda layer to be included in use by all functions
1 parent fb0acdf commit ea42241

File tree

4 files changed

+120
-0
lines changed

4 files changed

+120
-0
lines changed

src/layer_utils/__init__.py

Whitespace-only changes.

src/layer_utils/aws_utils.py

Lines changed: 40 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,40 @@
1+
"""
2+
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
3+
# SPDX-License-Identifier: MIT-0
4+
5+
AWS related functions that multiple lambda functions use, here to reduce redundancy
6+
"""
7+
from io import BytesIO
8+
from json import dumps
9+
from botocore.exceptions import ClientError
10+
from boto3 import resource, client
11+
12+
# Given a bucket and object, verify its existence and return the resource.
13+
def s3_object_stream(bucket_name: str, object_name: str):
14+
"""Retrieve an s3 object and read as stream"""
15+
s3res = resource('s3')
16+
res = s3res.Object(bucket_name=bucket_name, key=object_name)
17+
try:
18+
fs = BytesIO()
19+
res.download_fileobj(fs)
20+
return fs
21+
except ClientError as ce:
22+
raise ce
23+
24+
# Given a bucket name and object name, return bytes representing
25+
# the object content.
26+
def s3_filebuf_bytes(bucket_name: str, object_name: str):
27+
"""Flush s3 object stream buffer to string object"""
28+
object_stream = s3_object_stream(bucket_name=bucket_name,
29+
object_name=object_name)
30+
return object_stream.getvalue()
31+
32+
def queue_manifest_certificate(identity, certificate, queue_url):
33+
"""Send the thing name and certificate to sqs queue"""
34+
sqs_client = client("sqs")
35+
payload = {
36+
'thing': identity,
37+
'certificate': certificate
38+
}
39+
sqs_client.send_message( QueueUrl=queue_url,
40+
MessageBody=dumps(payload) )

src/layer_utils/cert_utils.py

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,20 @@
1+
"""
2+
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
3+
# SPDX-License-Identifier: MIT-0
4+
5+
Certificate/manifest related functions that multiple lambda functions use,
6+
here to reduce redundancy
7+
"""
8+
from base64 import b64encode
9+
from cryptography import x509
10+
from cryptography.hazmat.backends import default_backend
11+
from cryptography.hazmat.primitives import serialization
12+
13+
def format_certificate(cert_string):
14+
"""Encode certificate so that it can safely travel via sqs"""
15+
cert_encoded = cert_string.encode('ascii')
16+
17+
pem_obj = x509.load_pem_x509_certificate(cert_encoded,
18+
backend=default_backend())
19+
block = pem_obj.public_bytes(encoding=serialization.Encoding.PEM).decode('ascii')
20+
return str(b64encode(block.encode('ascii')))

test/unit/src/test_aws_utils.py

Lines changed: 60 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,60 @@
1+
"""
2+
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
3+
# SPDX-License-Identifier: MIT-0
4+
5+
Utility lambda layer unit testing
6+
"""
7+
import os
8+
import io
9+
from unittest import TestCase
10+
from unittest.mock import MagicMock, patch
11+
import pytest
12+
from moto import mock_aws, settings
13+
from botocore.exceptions import ClientError
14+
from boto3 import resource, client
15+
16+
from aws_utils import s3_object_stream, s3_filebuf_bytes
17+
from .model_provider_espressif import LambdaS3Class
18+
19+
@mock_aws(config={
20+
"core": {
21+
"mock_credentials": True,
22+
"reset_boto3_session": False,
23+
"service_whitelist": None,
24+
},
25+
'iot': {'use_valid_cert': True}})
26+
class TestProviderEspressif(TestCase):
27+
"""Unit tests for the espressif provider module"""
28+
def setUp(self):
29+
self.test_s3_bucket_name = "unit_test_s3_bucket"
30+
self.test_s3_object_content = None
31+
os.environ["S3_BUCKET_NAME"] = self.test_s3_bucket_name
32+
s3_client = client('s3', region_name="us-east-1")
33+
s3_client.create_bucket(Bucket = self.test_s3_bucket_name )
34+
with open('./test/artifacts/manifest-espressif.csv', 'rb') as data:
35+
s3_client.put_object(Bucket=self.test_s3_bucket_name, Key="manifest.csv", Body=data)
36+
self.test_s3_object_content = s3_client.get_object(Bucket=self.test_s3_bucket_name, Key="manifest.csv")['Body']
37+
mocked_s3_resource = resource("s3")
38+
mocked_s3_resource = { "resource" : resource('s3'),
39+
"bucket_name" : self.test_s3_bucket_name }
40+
self.mocked_s3_class = LambdaS3Class(mocked_s3_resource)
41+
42+
def test_pos_s3_object_resource(self):
43+
"""Basic pos test case for object resource"""
44+
r = s3_object_stream("unit_test_s3_bucket", "manifest.csv")
45+
assert isinstance(r, io.BytesIO)
46+
47+
def test_neg_s3_object_resource(self):
48+
"""Basic neg test case for object resource"""
49+
with pytest.raises(ClientError) as e:
50+
# Although this returns a value, no need to define var for it
51+
s3_object_stream("unit_test_s3_buckets", "manifest")
52+
errstr = "An error occurred (NoSuchBucket) when calling the " \
53+
"HeadObject operation: The specified bucket does not exist"
54+
assert str(e.value) == errstr
55+
56+
def test_pos_s3_filebuf_bytes(self):
57+
"""Basic pos test case for byte buffer handling"""
58+
# The bytes should equal to the object in the bucket
59+
v = s3_filebuf_bytes("unit_test_s3_bucket", "manifest.csv")
60+
assert v == self.test_s3_object_content.read()

0 commit comments

Comments
 (0)