Skip to content

Commit 1331d0c

Browse files
committed
Merge branch 'master' of github.com:nficano/python-lambda
* 'master' of github.com:nficano/python-lambda: Allowing update_function to update runtime markdown formatting fail typo Adding function to deploy using S3 as source fix exception on invoke when no env vars found in config
2 parents 7053c05 + 484fdd0 commit 1331d0c

File tree

4 files changed

+108
-41
lines changed

4 files changed

+108
-41
lines changed

README.rst

Lines changed: 14 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -167,14 +167,20 @@ This would create environment variables in the lambda instance upon deploy. If y
167167

168168
Uploading to S3
169169
===============
170-
You may find that you do not need the toolkit to fully deploy your Lambda or that your code bundle is too large to upload via the API. You can use the `upload` command to send the bundle to an S3 bucket of your choosing.
171-
Before doing this, you will need to set the following variables in `config.yaml`:
172-
```
173-
role: basic_s3_upload
174-
bucket_name: 'example-bucket'
175-
s3_key_prefix: 'path/to/file/'
176-
```
177-
Your role must have `s3:PutObject` permission on the bucket/key that you specify for the upload to work properly. Once you have that set, you can execute `lambda upload` to initiate the transfer.
170+
You may find that you do not need the toolkit to fully deploy your Lambda or that your code bundle is too large to upload via the API. You can use the ``upload`` command to send the bundle to an S3 bucket of your choosing.
171+
Before doing this, you will need to set the following variables in ``config.yaml``:
172+
173+
.. code:: yaml
174+
175+
role: basic_s3_upload
176+
bucket_name: 'example-bucket'
177+
s3_key_prefix: 'path/to/file/'
178+
179+
Your role must have ``s3:PutObject`` permission on the bucket/key that you specify for the upload to work properly. Once you have that set, you can execute ``lambda upload`` to initiate the transfer.
180+
181+
Deploying via S3
182+
===============
183+
You can also choose to use S3 as your source for Lambda deployments. This can be done by issuing ``lambda deploy_s3`` with the same variables/AWS permissions you'd set for executing the ``upload`` command.
178184

179185
Development
180186
===========

aws_lambda/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
__email__ = '[email protected]'
55
__version__ = '2.1.1'
66

7-
from .aws_lambda import deploy, invoke, init, build, upload, cleanup_old_versions
7+
from .aws_lambda import deploy, deploy_s3, invoke, init, build, upload, cleanup_old_versions
88

99
# Set default logging handler to avoid "No handler found" warnings.
1010
import logging

aws_lambda/aws_lambda.py

Lines changed: 84 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -103,6 +103,32 @@ def deploy(src, requirements=False, local_package=None):
103103
else:
104104
create_function(cfg, path_to_zip_file)
105105

106+
def deploy_s3(src, requirements=False, local_package=None):
107+
"""Deploys a new function via AWS S3.
108+
109+
:param str src:
110+
The path to your Lambda ready project (folder must contain a valid
111+
config.yaml and handler module (e.g.: service.py).
112+
:param str local_package:
113+
The path to a local package with should be included in the deploy as
114+
well (and/or is not available on PyPi)
115+
"""
116+
# Load and parse the config file.
117+
path_to_config_file = os.path.join(src, 'config.yaml')
118+
cfg = read(path_to_config_file, loader=yaml.load)
119+
120+
# Copy all the pip dependencies required to run your code into a temporary
121+
# folder then add the handler file in the root of this directory.
122+
# Zip the contents of this folder into a single file and output to the dist
123+
# directory.
124+
path_to_zip_file = build(src, requirements, local_package)
125+
126+
use_s3 = True
127+
s3_file = upload_s3(cfg, path_to_zip_file, use_s3)
128+
if function_exists(cfg, cfg.get('function_name')):
129+
update_function(cfg, path_to_zip_file, use_s3, s3_file)
130+
else:
131+
create_function(cfg, path_to_zip_file, use_s3, s3_file)
106132

107133
def upload(src, requirements=False, local_package=None):
108134
"""Uploads a new function to AWS S3.
@@ -144,8 +170,10 @@ def invoke(src, alt_event=None, verbose=False):
144170

145171
# Load environment variables from the config file into the actual
146172
# environment.
147-
for key, value in cfg.get('environment_variables').items():
148-
os.environ[key] = value
173+
env_vars = cfg.get('environment_variables')
174+
if env_vars:
175+
for key, value in env_vars.items():
176+
os.environ[key] = value
149177

150178
# Load and parse event file.
151179
if alt_event:
@@ -406,7 +434,7 @@ def get_client(client, aws_access_key_id, aws_secret_access_key, region=None):
406434
)
407435

408436

409-
def create_function(cfg, path_to_zip_file):
437+
def create_function(cfg, path_to_zip_file, *use_s3, **s3_file):
410438
"""Register and upload a function to AWS Lambda."""
411439

412440
print('Creating your new Lambda function')
@@ -426,21 +454,41 @@ def create_function(cfg, path_to_zip_file):
426454
)
427455

428456
# Do we prefer development variable over config?
457+
buck_name = (
458+
os.environ.get('S3_BUCKET_NAME') or cfg.get('bucket_name')
459+
)
429460
func_name = (
430461
os.environ.get('LAMBDA_FUNCTION_NAME') or cfg.get('function_name')
431462
)
432463
print('Creating lambda function with name: {}'.format(func_name))
433-
kwargs = {
434-
'FunctionName': func_name,
435-
'Runtime': cfg.get('runtime', 'python2.7'),
436-
'Role': role,
437-
'Handler': cfg.get('handler'),
438-
'Code': {'ZipFile': byte_stream},
439-
'Description': cfg.get('description'),
440-
'Timeout': cfg.get('timeout', 15),
441-
'MemorySize': cfg.get('memory_size', 512),
442-
'Publish': True,
443-
}
464+
465+
if use_s3 == True:
466+
kwargs = {
467+
'FunctionName': func_name,
468+
'Runtime': cfg.get('runtime', 'python2.7'),
469+
'Role': role,
470+
'Handler': cfg.get('handler'),
471+
'Code': {
472+
'S3Bucket': '{}'.format(buck_name),
473+
'S3Key': '{}'.format(s3_file)
474+
},
475+
'Description': cfg.get('description'),
476+
'Timeout': cfg.get('timeout', 15),
477+
'MemorySize': cfg.get('memory_size', 512),
478+
'Publish': True
479+
}
480+
else:
481+
kwargs = {
482+
'FunctionName': func_name,
483+
'Runtime': cfg.get('runtime', 'python2.7'),
484+
'Role': role,
485+
'Handler': cfg.get('handler'),
486+
'Code': {'ZipFile': byte_stream},
487+
'Description': cfg.get('description'),
488+
'Timeout': cfg.get('timeout', 15),
489+
'MemorySize': cfg.get('memory_size', 512),
490+
'Publish': True
491+
}
444492

445493
if 'environment_variables' in cfg:
446494
kwargs.update(
@@ -456,7 +504,7 @@ def create_function(cfg, path_to_zip_file):
456504
client.create_function(**kwargs)
457505

458506

459-
def update_function(cfg, path_to_zip_file):
507+
def update_function(cfg, path_to_zip_file, *use_s3, **s3_file):
460508
"""Updates the code of an existing Lambda function"""
461509

462510
print('Updating your Lambda function')
@@ -475,15 +523,29 @@ def update_function(cfg, path_to_zip_file):
475523
cfg.get('region'),
476524
)
477525

478-
client.update_function_code(
479-
FunctionName=cfg.get('function_name'),
480-
ZipFile=byte_stream,
481-
Publish=False,
526+
# Do we prefer development variable over config?
527+
buck_name = (
528+
os.environ.get('S3_BUCKET_NAME') or cfg.get('bucket_name')
482529
)
483530

531+
if use_s3 == True:
532+
client.update_function_code(
533+
FunctionName=cfg.get('function_name'),
534+
S3Bucket='{}'.format(buck_name),
535+
S3Key='{}'.format(s3_file),
536+
Publish=True
537+
)
538+
else:
539+
client.update_function_code(
540+
FunctionName=cfg.get('function_name'),
541+
ZipFile=byte_stream,
542+
Publish=True
543+
)
544+
484545
kwargs = {
485546
'FunctionName': cfg.get('function_name'),
486547
'Role': role,
548+
'Runtime': cfg.get('runtime'),
487549
'Handler': cfg.get('handler'),
488550
'Description': cfg.get('description'),
489551
'Timeout': cfg.get('timeout', 15),
@@ -507,13 +569,7 @@ def update_function(cfg, path_to_zip_file):
507569

508570
client.update_function_configuration(**kwargs)
509571

510-
# Publish last, so versions pick up eventually updated description...
511-
client.publish_version(
512-
FunctionName=cfg.get('function_name'),
513-
)
514-
515-
516-
def upload_s3(cfg, path_to_zip_file):
572+
def upload_s3(cfg, path_to_zip_file, *use_s3):
517573
"""Upload a function to AWS S3."""
518574

519575
print('Uploading your new Lambda function')
@@ -548,6 +604,8 @@ def upload_s3(cfg, path_to_zip_file):
548604

549605
client.put_object(**kwargs)
550606
print('Finished uploading {} to S3 bucket {}'.format(func_name, buck_name))
607+
if use_s3 == True:
608+
return filename
551609

552610

553611
def function_exists(cfg, function_name):

scripts/lambda

Lines changed: 9 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -78,12 +78,14 @@ def deploy(use_requirements, local_package):
7878
def upload(use_requirements, local_package):
7979
aws_lambda.upload(CURRENT_DIR, use_requirements, local_package)
8080

81-
82-
@click.command(help='Delete old versions of your functions')
83-
@click.option(
84-
'--keep-last', type=int,
85-
prompt='Please enter the number of recent versions to keep',
86-
)
81+
@click.command(help="Deploy your lambda via S3.")
82+
@click.option('--use-requirements', default=False, is_flag=True, help='Install all packages defined in requirements.txt')
83+
@click.option('--local-package', default=None, help='Install local package as well.', type=click.Path(), multiple=True)
84+
def deploy_s3(use_requirements, local_package):
85+
aws_lambda.deploy_s3(CURRENT_DIR, use_requirements, local_package)
86+
87+
@click.command(help="Delete old versions of your functions")
88+
@click.option("--keep-last", type=int, prompt="Please enter the number of recent versions to keep")
8789
def cleanup(keep_last):
8890
aws_lambda.cleanup_old_versions(CURRENT_DIR, keep_last)
8991

@@ -93,6 +95,7 @@ if __name__ == '__main__':
9395
cli.add_command(invoke)
9496
cli.add_command(deploy)
9597
cli.add_command(upload)
98+
cli.add_command(deploy_s3)
9699
cli.add_command(build)
97100
cli.add_command(cleanup)
98101
cli()

0 commit comments

Comments
 (0)