Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 3 additions & 2 deletions localstack/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -192,9 +192,10 @@ def in_docker():
if LOCALSTACK_HOSTNAME == HOSTNAME:
DOCKER_HOST_FROM_CONTAINER = 'host.docker.internal'
# update LOCALSTACK_HOSTNAME if host.docker.internal is available
if is_in_docker and LOCALSTACK_HOSTNAME == DOCKER_BRIDGE_IP:
if is_in_docker:
DOCKER_HOST_FROM_CONTAINER = socket.gethostbyname('host.docker.internal')
LOCALSTACK_HOSTNAME = DOCKER_HOST_FROM_CONTAINER
if LOCALSTACK_HOSTNAME == DOCKER_BRIDGE_IP:
LOCALSTACK_HOSTNAME = DOCKER_HOST_FROM_CONTAINER
except socket.error:
pass

Expand Down
5 changes: 3 additions & 2 deletions localstack/services/es/es_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
import time
from random import randint
from flask import Flask, jsonify, request, make_response
from localstack.utils import persistence
from localstack.services import generic_proxy
from localstack.utils.aws import aws_stack
from localstack.constants import TEST_AWS_ACCOUNT_ID
Expand Down Expand Up @@ -161,7 +162,7 @@ def get_domain_status(domain_name, deleted=False):
def start_elasticsearch_instance():
# Note: keep imports here to avoid circular dependencies
from localstack.services.es import es_starter
from localstack.services.infra import check_infra, restore_persisted_data, Plugin
from localstack.services.infra import check_infra, Plugin

api_name = 'elasticsearch'
plugin = Plugin(api_name, start=es_starter.start_elasticsearch, check=es_starter.check_elasticsearch)
Expand All @@ -172,7 +173,7 @@ def start_elasticsearch_instance():
# ensure that all infra components are up and running
check_infra(apis=apis, additional_checks=[es_starter.check_elasticsearch])
# restore persisted data
restore_persisted_data(apis=apis)
persistence.restore_persisted_data(apis=apis)
return t1


Expand Down
2 changes: 1 addition & 1 deletion localstack/services/generic_proxy.py
Original file line number Diff line number Diff line change
Expand Up @@ -289,7 +289,7 @@ def is_full_url(url):
kwargs = {
'method': method,
'path': path,
'data': data,
'data': self.data_bytes,
'headers': forward_headers,
'response': response
}
Expand Down
7 changes: 1 addition & 6 deletions localstack/services/infra.py
Original file line number Diff line number Diff line change
Expand Up @@ -270,11 +270,6 @@ def get_service_status(service, port=None):
return status


def restore_persisted_data(apis):
for api in apis:
persistence.restore_persisted_data(api)


def register_signal_handlers():
global SIGNAL_HANDLERS_SETUP
if SIGNAL_HANDLERS_SETUP:
Expand Down Expand Up @@ -464,7 +459,7 @@ def start_infra(asynchronous=False, apis=None):
# ensure that all infra components are up and running
check_infra(apis=apis)
# restore persisted data
restore_persisted_data(apis=apis)
persistence.restore_persisted_data(apis=apis)
print('Ready.')
sys.stdout.flush()
if not asynchronous and thread:
Expand Down
6 changes: 3 additions & 3 deletions localstack/services/s3/s3_listener.py
Original file line number Diff line number Diff line change
Expand Up @@ -802,9 +802,6 @@ def forward_request(self, method, path, data, headers):
if method == 'PUT' and not headers.get('content-type'):
headers['content-type'] = 'binary/octet-stream'

# persist this API call to disk
persistence.record('s3', method, path, data, headers)

# parse query params
query = parsed_path.query
path = parsed_path.path
Expand Down Expand Up @@ -893,6 +890,9 @@ def return_response(self, method, path, data, headers, response):
method = to_str(method)
bucket_name = get_bucket_name(path, headers)

# persist this API call to disk
persistence.record('s3', method, path, data, headers, response)

# No path-name based bucket name? Try host-based
hostname_parts = headers['host'].split('.')
if (not bucket_name or len(bucket_name) == 0) and len(hostname_parts) > 1:
Expand Down
2 changes: 1 addition & 1 deletion localstack/utils/bootstrap.py
Original file line number Diff line number Diff line change
Expand Up @@ -425,7 +425,7 @@ def stop(self, quiet=False):
def run(cmd, print_error=True, asynchronous=False, stdin=False,
stderr=subprocess.STDOUT, outfile=None, env_vars=None, inherit_cwd=False,
inherit_env=True, tty=False):
# don't use subprocess module inn Python 2 as it is not thread-safe
# don't use subprocess module in Python 2 as it is not thread-safe
# http://stackoverflow.com/questions/21194380/is-subprocess-popen-not-thread-safe
if six.PY2:
import subprocess32 as subprocess
Expand Down
62 changes: 42 additions & 20 deletions localstack/utils/persistence.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,12 @@
from localstack.utils.aws import aws_stack
from localstack.utils.common import to_bytes, to_str

API_FILE_PATTERN = '{data_dir}/{api}_api_calls.json'
USE_SINGLE_DUMP_FILE = True

if USE_SINGLE_DUMP_FILE:
API_FILE_PATTERN = '{data_dir}/recorded_api_calls.json'
else:
API_FILE_PATTERN = '{data_dir}/{api}_api_calls.json'

# Stack with flags to indicate whether we are currently re-playing API calls.
# (We should not be re-playing and recording at the same time)
Expand All @@ -18,53 +23,66 @@
API_FILE_PATHS = {}

# set up logger
LOGGER = logging.getLogger(__name__)
LOG = logging.getLogger(__name__)


def should_record(api, method, path, data, headers):
def should_record(api, method, path, data, headers, response=None):
""" Decide whether or not a given API call should be recorded (persisted to disk) """
if api == 's3':
return method in ['PUT', 'POST', 'DELETE']
return False


def record(api, method, path, data, headers):
def record(api, method, path, data, headers, response=None):
""" Record a given API call to a persistent file on disk """
file_path = get_file_path(api)
if CURRENTLY_REPLAYING or not file_path or not should_record(api, method, path, data, headers):
should_be_recorded = should_record(api, method, path, data, headers, response=response)
if CURRENTLY_REPLAYING or not file_path or not should_be_recorded:
return
entry = None
try:
if isinstance(data, dict):
data = json.dumps(data)
if data or data in [u'', b'']:
try:
data = to_bytes(data)
except Exception as e:
LOGGER.warning('Unable to call to_bytes: %s' % e)
data = to_str(base64.b64encode(data))

def get_recordable_data(data):
if data or data in [u'', b'']:
try:
data = to_bytes(data)
except Exception as e:
LOG.warning('Unable to call to_bytes: %s' % e)
data = to_str(base64.b64encode(data))
return data

data = get_recordable_data(data)
response_data = get_recordable_data('' if response is None else response.content)

entry = {
'a': api,
'm': method,
'p': path,
'd': data,
'h': dict(headers)
'h': dict(headers),
'rd': response_data
}
with open(file_path, 'a') as dumpfile:
dumpfile.write('%s\n' % json.dumps(entry))
except Exception as e:
print('Error recording API call to persistent file: %s %s' % (e, traceback.format_exc()))


def prepare_replay_data(command):
data = command['d']
data = data and base64.b64decode(data)
return data


def replay_command(command):
function = getattr(requests, command['m'].lower())
data = command['d']
if data:
data = base64.b64decode(data)
data = prepare_replay_data(command)
endpoint = aws_stack.get_local_service_url(command['a'])
full_url = (endpoint[:-1] if endpoint.endswith('/') else endpoint) + command['p']
result = function(full_url, data=data, headers=command['h'], verify=False)
return result
response = function(full_url, data=data, headers=command['h'], verify=False)
return response


def replay(api):
Expand All @@ -83,11 +101,15 @@ def replay(api):
finally:
CURRENTLY_REPLAYING.pop(0)
if count:
LOGGER.info('Restored %s API calls from persistent file: %s' % (count, file_path))
LOG.info('Restored %s API calls from persistent file: %s' % (count, file_path))


def restore_persisted_data(api):
return replay(api)
def restore_persisted_data(apis):
if USE_SINGLE_DUMP_FILE:
return replay('_all_')
apis = apis if isinstance(apis, list) else [apis]
for api in apis:
replay(apis)


# ---------------
Expand Down