|
15 | 15 | import botocore |
16 | 16 | import pip |
17 | 17 | import yaml |
| 18 | +import hashlib |
18 | 19 |
|
19 | 20 | from .helpers import archive |
20 | 21 | from .helpers import mkdir |
@@ -94,6 +95,27 @@ def deploy(src, requirements=False, local_package=None): |
94 | 95 | else: |
95 | 96 | create_function(cfg, path_to_zip_file) |
96 | 97 |
|
| 98 | +def upload(src, requirements=False, local_package=None): |
| 99 | + """Uploads a new function to AWS S3. |
| 100 | +
|
| 101 | + :param str src: |
| 102 | + The path to your Lambda ready project (folder must contain a valid |
| 103 | + config.yaml and handler module (e.g.: service.py). |
| 104 | + :param str local_package: |
| 105 | + The path to a local package with should be included in the deploy as |
| 106 | + well (and/or is not available on PyPi) |
| 107 | + """ |
| 108 | + # Load and parse the config file. |
| 109 | + path_to_config_file = os.path.join(src, 'config.yaml') |
| 110 | + cfg = read(path_to_config_file, loader=yaml.load) |
| 111 | + |
| 112 | + # Copy all the pip dependencies required to run your code into a temporary |
| 113 | + # folder then add the handler file in the root of this directory. |
| 114 | + # Zip the contents of this folder into a single file and output to the dist |
| 115 | + # directory. |
| 116 | + path_to_zip_file = build(src, requirements, local_package) |
| 117 | + |
| 118 | + upload_s3(cfg, path_to_zip_file) |
97 | 119 |
|
98 | 120 | def invoke(src, alt_event=None, verbose=False): |
99 | 121 | """Simulates a call to your function. |
@@ -437,6 +459,39 @@ def update_function(cfg, path_to_zip_file): |
437 | 459 |
|
438 | 460 | client.update_function_configuration(**kwargs) |
439 | 461 |
|
| 462 | +def upload_s3(cfg, path_to_zip_file): |
| 463 | + """Upload a function to AWS S3.""" |
| 464 | + |
| 465 | + print('Uploading your new Lambda function') |
| 466 | + aws_access_key_id = cfg.get('aws_access_key_id') |
| 467 | + aws_secret_access_key = cfg.get('aws_secret_access_key') |
| 468 | + account_id = get_account_id(aws_access_key_id, aws_secret_access_key) |
| 469 | + client = get_client('s3', aws_access_key_id, aws_secret_access_key, |
| 470 | + cfg.get('region')) |
| 471 | + role = get_role_name(account_id, cfg.get('role', 'basic_s3_upload')) |
| 472 | + byte_stream = b'' |
| 473 | + with open(path_to_zip_file, mode='rb') as fh: |
| 474 | + byte_stream = fh.read() |
| 475 | + s3_key_prefix = cfg.get('s3_key_prefix', '/dist') |
| 476 | + checksum = hashlib.new('md5', byte_stream).hexdigest() |
| 477 | + timestamp = str(time.time()) |
| 478 | + filename = '{prefix}{checksum}-{ts}.zip'.format(prefix=s3_key_prefix, checksum=checksum, ts=timestamp) |
| 479 | + |
| 480 | + # Do we prefer development variable over config? |
| 481 | + buck_name = ( |
| 482 | + os.environ.get('S3_BUCKET_NAME') or cfg.get('bucket_name') |
| 483 | + ) |
| 484 | + func_name = ( |
| 485 | + os.environ.get('LAMBDA_FUNCTION_NAME') or cfg.get('function_name') |
| 486 | + ) |
| 487 | + kwargs = { |
| 488 | + 'Bucket': '{}'.format(buck_name), |
| 489 | + 'Key': '{}'.format(filename), |
| 490 | + 'Body': byte_stream |
| 491 | + } |
| 492 | + |
| 493 | + client.put_object(**kwargs) |
| 494 | + print('Finished uploading {} to S3 bucket {}'.format(func_name, buck_name)) |
440 | 495 |
|
441 | 496 | def function_exists(cfg, function_name): |
442 | 497 | """Check whether a function exists or not""" |
|
0 commit comments