Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
tests_require = ['flake8', 'pytest', 'pytest-mock', 'coverage', 'pytest-cov']
install_requires = [
'requests>=2.9.1',
'pyyaml>=5.1',
'future>=0.15.2',
'docopt>=0.6.2',
]
Expand Down
4 changes: 2 additions & 2 deletions splitio/api/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,8 @@ def __init__(self, message):
class HttpClient(object):
"""HttpClient wrapper."""

SDK_URL = 'https://split.io/api'
EVENTS_URL = 'https://split.io/api'
SDK_URL = 'https://sdk.split.io/api'
EVENTS_URL = 'https://events.split.io/api'

def __init__(self, timeout=None, sdk_url=None, events_url=None):
"""
Expand Down
6 changes: 3 additions & 3 deletions splitio/api/impressions.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ def _build_bulk(impressions):
"""
return [
{
'testName': group[0],
'testName': test_name,
'keyImpressions': [
{
'keyName': impression.matching_key,
Expand All @@ -53,10 +53,10 @@ def _build_bulk(impressions):
'label': impression.label,
'bucketingKey': impression.bucketing_key
}
for impression in group[1]
for impression in imps
]
}
for group in groupby(
for (test_name, imps) in groupby(
sorted(impressions, key=lambda i: i.feature_name),
lambda i: i.feature_name
)
Expand Down
33 changes: 31 additions & 2 deletions splitio/client/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,6 +86,21 @@ def _send_impression_to_listener(self, impression, attributes):
self._logger.debug('Error', exc_info=True)

def get_treatment_with_config(self, key, feature, attributes=None):
"""
Get the treatment and config for a feature and key, with optional dictionary of attributes.

This method never raises an exception. If there's a problem, the appropriate log message
will be generated and the method will return the CONTROL treatment.

:param key: The key for which to get the treatment
:type key: str
:param feature: The name of the feature for which to get the treatment
:type feature: str
:param attributes: An optional dictionary of attributes
:type attributes: dict
:return: The treatment for the key and feature
:rtype: tuple(str, str)
"""
try:
if self.destroyed:
self._logger.error("Client has already been destroyed - no calls possible")
Expand Down Expand Up @@ -162,7 +177,7 @@ def get_treatment(self, key, feature, attributes=None):

def get_treatments_with_config(self, key, features, attributes=None):
"""
Evaluate multiple features and return a dictionary with all the feature/treatments.
Evaluate multiple features and return a dict with feature -> (treatment, config).

Get the treatments for a list of features considering a key, with an optional dictionary of
attributes. This method never raises an exception. If there's a problem, the appropriate
Expand Down Expand Up @@ -240,7 +255,21 @@ def get_treatments_with_config(self, key, features, attributes=None):


def get_treatments(self, key, features, attributes=None):
"""TODO"""
"""
Evaluate multiple features and return a dictionary with all the feature/treatments.

Get the treatments for a list of features considering a key, with an optional dictionary of
attributes. This method never raises an exception. If there's a problem, the appropriate
log message will be generated and the method will return the CONTROL treatment.
:param key: The key for which to get the treatment
:type key: str
:param features: Array of the names of the features for which to get the treatment
:type feature: list
:param attributes: An optional dictionary of attributes
:type attributes: dict
:return: Dictionary with the result of all the features provided
:rtype: dict
"""
with_config = self.get_treatments_with_config(key, features, attributes)
return {feature: result[0] for (feature, result) in six.iteritems(with_config)}

Expand Down
1 change: 1 addition & 0 deletions splitio/client/factory.py
Original file line number Diff line number Diff line change
Expand Up @@ -363,6 +363,7 @@ def _build_localhost_factory(config):
tasks = {'splits': LocalhostSplitSynchronizationTask(
cfg['splitFile'],
storages['splits'],
cfg['featuresRefreshRate'],
ready_event
)}
tasks['splits'].start()
Expand Down
185 changes: 125 additions & 60 deletions splitio/client/localhost.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,19 @@
"""Localhost client mocked components."""

import itertools
import logging
import re
from splitio.models.splits import from_raw

from six import raise_from
import yaml

from splitio.models import splits
from splitio.storage import ImpressionStorage, EventStorage, TelemetryStorage
from splitio.tasks import BaseSynchronizationTask
from splitio.tasks.util import asynctask

_COMMENT_LINE_RE = re.compile('^#.*$')
_DEFINITION_LINE_RE = re.compile('^(?<![^#])(?P<feature>[\w_-]+)\s+(?P<treatment>[\w_-]+)$')
_LEGACY_COMMENT_LINE_RE = re.compile(r'^#.*$')
_LEGACY_DEFINITION_LINE_RE = re.compile(r'^(?<![^#])(?P<feature>[\w_-]+)\s+(?P<treatment>[\w_-]+)$')


_LOGGER = logging.getLogger(__name__)
Expand Down Expand Up @@ -69,7 +74,7 @@ def pop_gauges(self, *_, **__): #pylint: disable=arguments-differ
class LocalhostSplitSynchronizationTask(BaseSynchronizationTask):
"""Split synchronization task that periodically checks the file and updated the splits."""

def __init__(self, filename, storage, ready_event):
def __init__(self, filename, storage, period, ready_event):
"""
Class constructor.

Expand All @@ -83,22 +88,23 @@ def __init__(self, filename, storage, ready_event):
self._filename = filename
self._ready_event = ready_event
self._storage = storage
self._task = asynctask.AsyncTask(self._update_splits, 5, self._on_start)
self._period = period
self._task = asynctask.AsyncTask(self._update_splits, period, self._on_start)

def _on_start(self):
"""Sync splits and set event if successful."""
self._update_splits()
self._ready_event.set()

@staticmethod
def _make_all_keys_based_split(split_name, treatment):
def _make_split(split_name, conditions, configs=None):
"""
Make a split with a single all_keys matcher.

:param split_name: Name of the split.
:type split_name: str.
"""
return from_raw({
return splits.from_raw({
'changeNumber': 123,
'trafficTypeName': 'user',
'name': split_name,
Expand All @@ -107,30 +113,55 @@ def _make_all_keys_based_split(split_name, treatment):
'seed': 321654,
'status': 'ACTIVE',
'killed': False,
'defaultTreatment': treatment,
'defaultTreatment': 'control',
'algo': 2,
'conditions': [
{
'partitions': [
{'treatment': treatment, 'size': 100}
],
'contitionType': 'WHITELIST',
'label': 'some_other_label',
'matcherGroup': {
'matchers': [
{
'matcherType': 'ALL_KEYS',
'negate': False,
}
],
'combiner': 'AND'
}
}
]
'conditions': conditions,
'configurations': configs
})

@staticmethod
def _make_all_keys_condition(treatment):
return {
'partitions': [
{'treatment': treatment, 'size': 100}
],
'conditionType': 'WHITELIST',
'label': 'some_other_label',
'matcherGroup': {
'matchers': [
{
'matcherType': 'ALL_KEYS',
'negate': False,
}
],
'combiner': 'AND'
}
}

@staticmethod
def _make_whitelist_condition(whitelist, treatment):
return {
'partitions': [
{'treatment': treatment, 'size': 100}
],
'conditionType': 'WHITELIST',
'label': 'some_other_label',
'matcherGroup': {
'matchers': [
{
'matcherType': 'WHITELIST',
'negate': False,
'whitelistMatcherData': {
'whitelist': whitelist
}
}
],
'combiner': 'AND'
}
}

@classmethod
def _read_splits_from_file(cls, filename):
def _read_splits_from_legacy_file(cls, filename):
"""
Parse a splits file and return a populated storage.

Expand All @@ -140,53 +171,89 @@ def _read_splits_from_file(cls, filename):
:return: Storage populataed with splits ready to be evaluated.
:rtype: InMemorySplitStorage
"""
splits = {}
to_return = {}
try:
with open(filename, 'r') as flo:
for line in flo:
if line.strip() == '':
continue

comment_match = _COMMENT_LINE_RE.match(line)
if comment_match:
if line.strip() == '' or _LEGACY_COMMENT_LINE_RE.match(line):
continue

definition_match = _DEFINITION_LINE_RE.match(line)
if definition_match:
splits[definition_match.group('feature')] = cls._make_all_keys_based_split(
definition_match.group('feature'),
definition_match.group('treatment')
definition_match = _LEGACY_DEFINITION_LINE_RE.match(line)
if not definition_match:
_LOGGER.warning(
'Invalid line on localhost environment split '
'definition. Line = %s',
line
)
continue

_LOGGER.warning(
'Invalid line on localhost environment split '
'definition. Line = %s',
line
)
return splits
except IOError as e:
raise ValueError("Error parsing split file")
# TODO: ver raise from!
# raise_from(ValueError(
# 'There was a problem with '
# 'the splits definition file "{}"'.format(filename)),
# e
# )
cond = cls._make_all_keys_condition(definition_match.group('treatment'))
splt = cls._make_split(definition_match.group('feature'), [cond])
to_return[splt.name] = splt
return to_return

except IOError as exc:
raise_from(
ValueError("Error parsing file %s. Make sure it's readable." % filename),
exc
)

@classmethod
def _read_splits_from_yaml_file(cls, filename):
"""
Parse a splits file and return a populated storage.

:param filename: Path of the file containing mocked splits & treatments.
:type filename: str.

:return: Storage populataed with splits ready to be evaluated.
:rtype: InMemorySplitStorage
"""
try:
with open(filename, 'r') as flo:
parsed = yaml.load(flo.read(), Loader=yaml.FullLoader)

grouped_by_feature_name = itertools.groupby(
sorted(parsed, key=lambda i: next(iter(i.keys()))),
lambda i: next(iter(i.keys())))

to_return = {}
for (split_name, statements) in grouped_by_feature_name:
configs = {}
whitelist = []
all_keys = []
for statement in statements:
data = next(iter(statement.values())) # grab the first (and only) value.
if 'keys' in data:
keys = data['keys'] if isinstance(data['keys'], list) else [data['keys']]
whitelist.append(cls._make_whitelist_condition(keys, data['treatment']))
else:
all_keys.append(cls._make_all_keys_condition(data['treatment']))
if 'config' in data:
configs[data['treatment']] = data['config']
to_return[split_name] = cls._make_split(split_name, whitelist + all_keys, configs)
return to_return

except IOError as exc:
raise_from(
ValueError("Error parsing file %s. Make sure it's readable." % filename),
exc
)

def _update_splits(self):
"""Update splits in storage."""
_LOGGER.info('Synchronizing splits now.')
splits = self._read_splits_from_file(self._filename)
to_delete = [name for name in self._storage.get_split_names() if name not in splits.keys()]
for split in splits.values():
if self._filename.lower().endswith(('.yaml', '.yml')):
fetched = self._read_splits_from_yaml_file(self._filename)
else:
fetched = self._read_splits_from_legacy_file(self._filename)
to_delete = [name for name in self._storage.get_split_names() if name not in fetched.keys()]
for split in fetched.values():
self._storage.put(split)

for split in to_delete:
self._storage.remove(split)


def is_running(self):
"""Return whether the task is running."""
return self._task.running
Expand All @@ -195,13 +262,11 @@ def start(self):
"""Start split synchronization."""
self._task.start()

def stop(self, stop_event):
def stop(self, event=None):
"""
Stop task.

:param stop_event: Event top set when the task finishes.
:type stop_event: threading.Event.
"""
self._task.stop(stop_event)


self._task.stop(event)
14 changes: 14 additions & 0 deletions tests/client/files/file1.split
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
events_write_es on
events_routing sqs
impressions_routing sqs
workspaces_v1 on
create_org_with_workspace on
sqs_events_processing on
sqs_impressions_processing on
sqs_events_fetch on
sqs_impressions_fetch off
sqs_impressions_fetch_period 700
sqs_impressions_fetch_threads 10
sqs_events_fetch_period 500
sqs_events_fetch_threads 5

Loading