Skip to content

Commit a90098f

Browse files
authored
Merge pull request nficano#74 from slapula/s3-deploy
Adding function to deploy using S3 as source
2 parents 7053c05 + 078807d commit a90098f

File tree

4 files changed

+103
-39
lines changed

4 files changed

+103
-39
lines changed

README.rst

Lines changed: 14 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -167,14 +167,20 @@ This would create environment variables in the lambda instance upon deploy. If y
167167

168168
Uploading to S3
169169
===============
170-
You may find that you do not need the toolkit to fully deploy your Lambda or that your code bundle is too large to upload via the API. You can use the `upload` command to send the bundle to an S3 bucket of your choosing.
171-
Before doing this, you will need to set the following variables in `config.yaml`:
172-
```
173-
role: basic_s3_upload
174-
bucket_name: 'example-bucket'
175-
s3_key_prefix: 'path/to/file/'
176-
```
177-
Your role must have `s3:PutObject` permission on the bucket/key that you specify for the upload to work properly. Once you have that set, you can execute `lambda upload` to initiate the transfer.
170+
You may find that you do not need the toolkit to fully deploy your Lambda or that your code bundle is too large to upload via the API. You can use the ``upload`` command to send the bundle to an S3 bucket of your choosing.
171+
Before doing this, you will need to set the following variables in ``config.yaml``:
172+
173+
.. code:: yaml
174+
175+
role: basic_s3_upload
176+
bucket_name: 'example-bucket'
177+
s3_key_prefix: 'path/to/file/'
178+
179+
Your role must have ``s3:PutObject`` permission on the bucket/key that you specify for the upload to work properly. Once you have that set, you can execute ``lambda upload`` to initiate the transfer.
180+
181+
Deploying via S3
182+
===============
183+
You can also choose to use S3 as your source for Lambda deployments. This can be done by issuing ``lambda deploy_s3`` with the same variables/AWS permissions you'd set for executing the ``upload`` command.
178184

179185
Development
180186
===========

aws_lambda/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
__email__ = '[email protected]'
55
__version__ = '2.1.1'
66

7-
from .aws_lambda import deploy, invoke, init, build, upload, cleanup_old_versions
7+
from .aws_lambda import deploy, deploy_s3, invoke, init, build, upload, cleanup_old_versions
88

99
# Set default logging handler to avoid "No handler found" warnings.
1010
import logging

aws_lambda/aws_lambda.py

Lines changed: 79 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -103,6 +103,32 @@ def deploy(src, requirements=False, local_package=None):
103103
else:
104104
create_function(cfg, path_to_zip_file)
105105

106+
def deploy_s3(src, requirements=False, local_package=None):
107+
"""Deploys a new function via AWS S3.
108+
109+
:param str src:
110+
The path to your Lambda ready project (folder must contain a valid
111+
config.yaml and handler module (e.g.: service.py).
112+
:param str local_package:
113+
The path to a local package with should be included in the deploy as
114+
well (and/or is not available on PyPi)
115+
"""
116+
# Load and parse the config file.
117+
path_to_config_file = os.path.join(src, 'config.yaml')
118+
cfg = read(path_to_config_file, loader=yaml.load)
119+
120+
# Copy all the pip dependencies required to run your code into a temporary
121+
# folder then add the handler file in the root of this directory.
122+
# Zip the contents of this folder into a single file and output to the dist
123+
# directory.
124+
path_to_zip_file = build(src, requirements, local_package)
125+
126+
use_s3 = True
127+
s3_file = upload_s3(cfg, path_to_zip_file, use_s3)
128+
if function_exists(cfg, cfg.get('function_name')):
129+
update_function(cfg, path_to_zip_file, use_s3, s3_file)
130+
else:
131+
create_function(cfg, path_to_zip_file, use_s3, s3_file)
106132

107133
def upload(src, requirements=False, local_package=None):
108134
"""Uploads a new function to AWS S3.
@@ -406,7 +432,7 @@ def get_client(client, aws_access_key_id, aws_secret_access_key, region=None):
406432
)
407433

408434

409-
def create_function(cfg, path_to_zip_file):
435+
def create_function(cfg, path_to_zip_file, *use_s3, **s3_file):
410436
"""Register and upload a function to AWS Lambda."""
411437

412438
print('Creating your new Lambda function')
@@ -426,21 +452,41 @@ def create_function(cfg, path_to_zip_file):
426452
)
427453

428454
# Do we prefer development variable over config?
455+
buck_name = (
456+
os.environ.get('S3_BUCKET_NAME') or cfg.get('bucket_name')
457+
)
429458
func_name = (
430459
os.environ.get('LAMBDA_FUNCTION_NAME') or cfg.get('function_name')
431460
)
432461
print('Creating lambda function with name: {}'.format(func_name))
433-
kwargs = {
434-
'FunctionName': func_name,
435-
'Runtime': cfg.get('runtime', 'python2.7'),
436-
'Role': role,
437-
'Handler': cfg.get('handler'),
438-
'Code': {'ZipFile': byte_stream},
439-
'Description': cfg.get('description'),
440-
'Timeout': cfg.get('timeout', 15),
441-
'MemorySize': cfg.get('memory_size', 512),
442-
'Publish': True,
443-
}
462+
463+
if use_s3 == True:
464+
kwargs = {
465+
'FunctionName': func_name,
466+
'Runtime': cfg.get('runtime', 'python2.7'),
467+
'Role': role,
468+
'Handler': cfg.get('handler'),
469+
'Code': {
470+
'S3Bucket': '{}'.format(buck_name),
471+
'S3Key': '{}'.format(s3_file)
472+
},
473+
'Description': cfg.get('description'),
474+
'Timeout': cfg.get('timeout', 15),
475+
'MemorySize': cfg.get('memory_size', 512),
476+
'Publish': True
477+
}
478+
else:
479+
kwargs = {
480+
'FunctionName': func_name,
481+
'Runtime': cfg.get('runtime', 'python2.7'),
482+
'Role': role,
483+
'Handler': cfg.get('handler'),
484+
'Code': {'ZipFile': byte_stream},
485+
'Description': cfg.get('description'),
486+
'Timeout': cfg.get('timeout', 15),
487+
'MemorySize': cfg.get('memory_size', 512),
488+
'Publish': True
489+
}
444490

445491
if 'environment_variables' in cfg:
446492
kwargs.update(
@@ -456,7 +502,7 @@ def create_function(cfg, path_to_zip_file):
456502
client.create_function(**kwargs)
457503

458504

459-
def update_function(cfg, path_to_zip_file):
505+
def update_function(cfg, path_to_zip_file, *use_s3, **s3_file):
460506
"""Updates the code of an existing Lambda function"""
461507

462508
print('Updating your Lambda function')
@@ -475,12 +521,25 @@ def update_function(cfg, path_to_zip_file):
475521
cfg.get('region'),
476522
)
477523

478-
client.update_function_code(
479-
FunctionName=cfg.get('function_name'),
480-
ZipFile=byte_stream,
481-
Publish=False,
524+
# Do we prefer development variable over config?
525+
buck_name = (
526+
os.environ.get('S3_BUCKET_NAME') or cfg.get('bucket_name')
482527
)
483528

529+
if use_s3 == True:
530+
client.update_function_code(
531+
FunctionName=cfg.get('function_name'),
532+
S3Bucket='{}'.format(buck_name),
533+
S3Key='{}'.format(s3_file),
534+
Publish=True
535+
)
536+
else:
537+
client.update_function_code(
538+
FunctionName=cfg.get('function_name'),
539+
ZipFile=byte_stream,
540+
Publish=True
541+
)
542+
484543
kwargs = {
485544
'FunctionName': cfg.get('function_name'),
486545
'Role': role,
@@ -507,13 +566,7 @@ def update_function(cfg, path_to_zip_file):
507566

508567
client.update_function_configuration(**kwargs)
509568

510-
# Publish last, so versions pick up eventually updated description...
511-
client.publish_version(
512-
FunctionName=cfg.get('function_name'),
513-
)
514-
515-
516-
def upload_s3(cfg, path_to_zip_file):
569+
def upload_s3(cfg, path_to_zip_file, *use_s3):
517570
"""Upload a function to AWS S3."""
518571

519572
print('Uploading your new Lambda function')
@@ -548,6 +601,8 @@ def upload_s3(cfg, path_to_zip_file):
548601

549602
client.put_object(**kwargs)
550603
print('Finished uploading {} to S3 bucket {}'.format(func_name, buck_name))
604+
if use_s3 == True:
605+
return filename
551606

552607

553608
def function_exists(cfg, function_name):

scripts/lambda

Lines changed: 9 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -78,12 +78,14 @@ def deploy(use_requirements, local_package):
7878
def upload(use_requirements, local_package):
7979
aws_lambda.upload(CURRENT_DIR, use_requirements, local_package)
8080

81-
82-
@click.command(help='Delete old versions of your functions')
83-
@click.option(
84-
'--keep-last', type=int,
85-
prompt='Please enter the number of recent versions to keep',
86-
)
81+
@click.command(help="Deploy your lambda via S3.")
82+
@click.option('--use-requirements', default=False, is_flag=True, help='Install all packages defined in requirements.txt')
83+
@click.option('--local-package', default=None, help='Install local package as well.', type=click.Path(), multiple=True)
84+
def deploy_s3(use_requirements, local_package):
85+
aws_lambda.deploy_s3(CURRENT_DIR, use_requirements, local_package)
86+
87+
@click.command(help="Delete old versions of your functions")
88+
@click.option("--keep-last", type=int, prompt="Please enter the number of recent versions to keep")
8789
def cleanup(keep_last):
8890
aws_lambda.cleanup_old_versions(CURRENT_DIR, keep_last)
8991

@@ -93,6 +95,7 @@ if __name__ == '__main__':
9395
cli.add_command(invoke)
9496
cli.add_command(deploy)
9597
cli.add_command(upload)
98+
cli.add_command(deploy_s3)
9699
cli.add_command(build)
97100
cli.add_command(cleanup)
98101
cli()

0 commit comments

Comments
 (0)