Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
22 changes: 14 additions & 8 deletions README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -167,14 +167,20 @@ This would create environment variables in the lambda instance upon deploy. If y

Uploading to S3
===============
You may find that you do not need the toolkit to fully deploy your Lambda or that your code bundle is too large to upload via the API. You can use the `upload` command to send the bundle to an S3 bucket of your choosing.
Before doing this, you will need to set the following variables in `config.yaml`:
```
role: basic_s3_upload
bucket_name: 'example-bucket'
s3_key_prefix: 'path/to/file/'
```
Your role must have `s3:PutObject` permission on the bucket/key that you specify for the upload to work properly. Once you have that set, you can execute `lambda upload` to initiate the transfer.
You may find that you do not need the toolkit to fully deploy your Lambda or that your code bundle is too large to upload via the API. You can use the ``upload`` command to send the bundle to an S3 bucket of your choosing.
Before doing this, you will need to set the following variables in ``config.yaml``:

.. code:: yaml

role: basic_s3_upload
bucket_name: 'example-bucket'
s3_key_prefix: 'path/to/file/'

Your role must have ``s3:PutObject`` permission on the bucket/key that you specify for the upload to work properly. Once you have that set, you can execute ``lambda upload`` to initiate the transfer.

Deploying via S3
===============
You can also choose to use S3 as your source for Lambda deployments. This can be done by issuing ``lambda deploy_s3`` with the same variables/AWS permissions you'd set for executing the ``upload`` command.

Development
===========
Expand Down
2 changes: 1 addition & 1 deletion aws_lambda/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
__email__ = 'nficano@gmail.com'
__version__ = '2.1.1'

from .aws_lambda import deploy, invoke, init, build, upload, cleanup_old_versions
from .aws_lambda import deploy, deploy_s3, invoke, init, build, upload, cleanup_old_versions

# Set default logging handler to avoid "No handler found" warnings.
import logging
Expand Down
103 changes: 79 additions & 24 deletions aws_lambda/aws_lambda.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,6 +103,32 @@ def deploy(src, requirements=False, local_package=None):
else:
create_function(cfg, path_to_zip_file)

def deploy_s3(src, requirements=False, local_package=None):
"""Deploys a new function via AWS S3.

:param str src:
The path to your Lambda ready project (folder must contain a valid
config.yaml and handler module (e.g.: service.py).
:param str local_package:
The path to a local package with should be included in the deploy as
well (and/or is not available on PyPi)
"""
# Load and parse the config file.
path_to_config_file = os.path.join(src, 'config.yaml')
cfg = read(path_to_config_file, loader=yaml.load)

# Copy all the pip dependencies required to run your code into a temporary
# folder then add the handler file in the root of this directory.
# Zip the contents of this folder into a single file and output to the dist
# directory.
path_to_zip_file = build(src, requirements, local_package)

use_s3 = True
s3_file = upload_s3(cfg, path_to_zip_file, use_s3)
if function_exists(cfg, cfg.get('function_name')):
update_function(cfg, path_to_zip_file, use_s3, s3_file)
else:
create_function(cfg, path_to_zip_file, use_s3, s3_file)

def upload(src, requirements=False, local_package=None):
"""Uploads a new function to AWS S3.
Expand Down Expand Up @@ -406,7 +432,7 @@ def get_client(client, aws_access_key_id, aws_secret_access_key, region=None):
)


def create_function(cfg, path_to_zip_file):
def create_function(cfg, path_to_zip_file, *use_s3, **s3_file):
"""Register and upload a function to AWS Lambda."""

print('Creating your new Lambda function')
Expand All @@ -426,21 +452,41 @@ def create_function(cfg, path_to_zip_file):
)

# Do we prefer development variable over config?
buck_name = (
os.environ.get('S3_BUCKET_NAME') or cfg.get('bucket_name')
)
func_name = (
os.environ.get('LAMBDA_FUNCTION_NAME') or cfg.get('function_name')
)
print('Creating lambda function with name: {}'.format(func_name))
kwargs = {
'FunctionName': func_name,
'Runtime': cfg.get('runtime', 'python2.7'),
'Role': role,
'Handler': cfg.get('handler'),
'Code': {'ZipFile': byte_stream},
'Description': cfg.get('description'),
'Timeout': cfg.get('timeout', 15),
'MemorySize': cfg.get('memory_size', 512),
'Publish': True,
}

if use_s3 == True:
kwargs = {
'FunctionName': func_name,
'Runtime': cfg.get('runtime', 'python2.7'),
'Role': role,
'Handler': cfg.get('handler'),
'Code': {
'S3Bucket': '{}'.format(buck_name),
'S3Key': '{}'.format(s3_file)
},
'Description': cfg.get('description'),
'Timeout': cfg.get('timeout', 15),
'MemorySize': cfg.get('memory_size', 512),
'Publish': True
}
else:
kwargs = {
'FunctionName': func_name,
'Runtime': cfg.get('runtime', 'python2.7'),
'Role': role,
'Handler': cfg.get('handler'),
'Code': {'ZipFile': byte_stream},
'Description': cfg.get('description'),
'Timeout': cfg.get('timeout', 15),
'MemorySize': cfg.get('memory_size', 512),
'Publish': True
}

if 'environment_variables' in cfg:
kwargs.update(
Expand All @@ -456,7 +502,7 @@ def create_function(cfg, path_to_zip_file):
client.create_function(**kwargs)


def update_function(cfg, path_to_zip_file):
def update_function(cfg, path_to_zip_file, *use_s3, **s3_file):
"""Updates the code of an existing Lambda function"""

print('Updating your Lambda function')
Expand All @@ -475,12 +521,25 @@ def update_function(cfg, path_to_zip_file):
cfg.get('region'),
)

client.update_function_code(
FunctionName=cfg.get('function_name'),
ZipFile=byte_stream,
Publish=False,
# Do we prefer development variable over config?
buck_name = (
os.environ.get('S3_BUCKET_NAME') or cfg.get('bucket_name')
)

if use_s3 == True:
client.update_function_code(
FunctionName=cfg.get('function_name'),
S3Bucket='{}'.format(buck_name),
S3Key='{}'.format(s3_file),
Publish=True
)
else:
client.update_function_code(
FunctionName=cfg.get('function_name'),
ZipFile=byte_stream,
Publish=True
)

kwargs = {
'FunctionName': cfg.get('function_name'),
'Role': role,
Expand All @@ -507,13 +566,7 @@ def update_function(cfg, path_to_zip_file):

client.update_function_configuration(**kwargs)

# Publish last, so versions pick up eventually updated description...
client.publish_version(
FunctionName=cfg.get('function_name'),
)


def upload_s3(cfg, path_to_zip_file):
def upload_s3(cfg, path_to_zip_file, *use_s3):
"""Upload a function to AWS S3."""

print('Uploading your new Lambda function')
Expand Down Expand Up @@ -548,6 +601,8 @@ def upload_s3(cfg, path_to_zip_file):

client.put_object(**kwargs)
print('Finished uploading {} to S3 bucket {}'.format(func_name, buck_name))
if use_s3 == True:
return filename


def function_exists(cfg, function_name):
Expand Down
15 changes: 9 additions & 6 deletions scripts/lambda
Original file line number Diff line number Diff line change
Expand Up @@ -78,12 +78,14 @@ def deploy(use_requirements, local_package):
def upload(use_requirements, local_package):
aws_lambda.upload(CURRENT_DIR, use_requirements, local_package)


@click.command(help='Delete old versions of your functions')
@click.option(
'--keep-last', type=int,
prompt='Please enter the number of recent versions to keep',
)
@click.command(help="Deploy your lambda via S3.")
@click.option('--use-requirements', default=False, is_flag=True, help='Install all packages defined in requirements.txt')
@click.option('--local-package', default=None, help='Install local package as well.', type=click.Path(), multiple=True)
def deploy_s3(use_requirements, local_package):
aws_lambda.deploy_s3(CURRENT_DIR, use_requirements, local_package)

@click.command(help="Delete old versions of your functions")
@click.option("--keep-last", type=int, prompt="Please enter the number of recent versions to keep")
def cleanup(keep_last):
aws_lambda.cleanup_old_versions(CURRENT_DIR, keep_last)

Expand All @@ -93,6 +95,7 @@ if __name__ == '__main__':
cli.add_command(invoke)
cli.add_command(deploy)
cli.add_command(upload)
cli.add_command(deploy_s3)
cli.add_command(build)
cli.add_command(cleanup)
cli()