Skip to content

Commit b53c901

Browse files
authored
Merge pull request #111 from splitio/bug/TrafficAllocation
[SDKS-352]: Traffic Allocation 1 bucket off fix
2 parents 2b4aac3 + ad25170 commit b53c901

File tree

3 files changed

+85
-64
lines changed

3 files changed

+85
-64
lines changed

CHANGES.txt

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
1-
6.2.1 (Nov 26, 2018)
1+
6.2.1 (Dec 6, 2018)
2+
- Fixed traffic allocation issue on 1%.
23
- Allowed hypens in 'localhost mode' file values.
3-
6.2.0 (Oct 5, 2018)
4+
6.2.0 (Oct 5, 2018)
45
- Added get_treatments method.
56
6.1.0 (Sep 25, 2018)
67
- Add custom impression listener feature.

splitio/clients.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -245,7 +245,7 @@ def _get_treatment_for_split(self, split, matching_key, bucketing_key, attribute
245245
split.traffic_allocation_seed,
246246
split.algo
247247
)
248-
if bucket >= split.traffic_allocation:
248+
if bucket > split.traffic_allocation:
249249
return split.default_treatment, Label.NOT_IN_SPLIT
250250
roll_out = True
251251

splitio/tests/test_splits.py

Lines changed: 81 additions & 61 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,7 @@
2323
from splitio.uwsgi import get_uwsgi, UWSGISegmentCache, UWSGISplitParser
2424
from splitio.clients import Client
2525
from splitio.brokers import RedisBroker
26+
from splitio.splitters import Splitter
2627

2728

2829
class InMemorySplitFetcherTests(TestCase):
@@ -345,7 +346,7 @@ def test_fetch_doesnt_raise_an_exception_fetch_from_backend_raises_one(self):
345346

346347
try:
347348
self.fetcher.fetch(self.some_since)
348-
except:
349+
except Exception:
349350
self.fail('Unexpected exception raised')
350351

351352
def test_fetch_returns_empty_response_if_fetch_from_backend_raises_an_exception(self):
@@ -718,8 +719,9 @@ def test_parse_matcher_greater_than_or_equal_to_calls_equal_to_matcher_for_data_
718719
Tests that _parse_matcher_greater_than_or_equal_to calls
719720
GreaterThanOrEqualToMatcher.for_data_type
720721
"""
721-
self.parser._parse_matcher_greater_than_or_equal_to(self.some_partial_split,
722-
self.some_greater_than_or_equal_to_matcher)
722+
self.parser \
723+
._parse_matcher_greater_than_or_equal_to(self.some_partial_split,
724+
self.some_greater_than_or_equal_to_matcher)
723725
self.greater_than_or_equal_to_matcher_mock.for_data_type.assert_called_once_with(
724726
self.get_matcher_data_data_type_mock.return_value,
725727
self.some_greater_than_or_equal_to_matcher['unaryNumericMatcherData']['value'])
@@ -731,15 +733,17 @@ def test_parse_matcher_greater_than_or_equal_to_returns_equal_to_matcher(self):
731733
"""
732734
self.assertEqual(
733735
self.greater_than_or_equal_to_matcher_mock.for_data_type.return_value,
734-
self.parser._parse_matcher_greater_than_or_equal_to(self.some_partial_split,
736+
self.parser._parse_matcher_greater_than_or_equal_to(
737+
self.some_partial_split,
735738
self.some_greater_than_or_equal_to_matcher))
736739

737740
def test_parse_matcher_less_than_or_equal_to_calls_equal_to_matcher_for_data_type(self):
738741
"""
739742
Tests that _parse_matcher_less_than_or_equal_to calls
740743
LessThanOrEqualToMatcher.for_data_type
741744
"""
742-
self.parser._parse_matcher_less_than_or_equal_to(self.some_partial_split,
745+
self.parser._parse_matcher_less_than_or_equal_to(
746+
self.some_partial_split,
743747
self.some_less_than_or_equal_to_matcher)
744748
self.less_than_or_equal_to_matcher_mock.for_data_type.assert_called_once_with(
745749
self.get_matcher_data_data_type_mock.return_value,
@@ -752,7 +756,8 @@ def test_parse_matcher_less_than_or_equal_to_returns_equal_to_matcher(self):
752756
"""
753757
self.assertEqual(
754758
self.less_than_or_equal_to_matcher_mock.for_data_type.return_value,
755-
self.parser._parse_matcher_less_than_or_equal_to(self.some_partial_split,
759+
self.parser._parse_matcher_less_than_or_equal_to(
760+
self.some_partial_split,
756761
self.some_less_than_or_equal_to_matcher))
757762

758763
def test_parse_matcher_between_calls_between_matcher_for_data_type(self):
@@ -927,31 +932,33 @@ def setUp(self):
927932
fn = join(dirname(__file__), 'algoSplits.json')
928933
with open(fn, 'r') as flo:
929934
rawData = json.load(flo)['splits']
930-
self._testData = [{
931-
'body': rawData[0],
932-
'algo': HashAlgorithm.LEGACY,
933-
'hashfn': legacy_hash
934-
},
935-
{
936-
'body': rawData[1],
937-
'algo': HashAlgorithm.MURMUR,
938-
'hashfn': _murmur_hash
939-
},
940-
{
941-
'body': rawData[2],
942-
'algo': HashAlgorithm.LEGACY,
943-
'hashfn': legacy_hash
944-
},
945-
{
946-
'body': rawData[3],
947-
'algo': HashAlgorithm.LEGACY,
948-
'hashfn': legacy_hash
949-
},
950-
{
951-
'body': rawData[4],
952-
'algo': HashAlgorithm.LEGACY,
953-
'hashfn': legacy_hash
954-
}]
935+
self._testData = [
936+
{
937+
'body': rawData[0],
938+
'algo': HashAlgorithm.LEGACY,
939+
'hashfn': legacy_hash
940+
},
941+
{
942+
'body': rawData[1],
943+
'algo': HashAlgorithm.MURMUR,
944+
'hashfn': _murmur_hash
945+
},
946+
{
947+
'body': rawData[2],
948+
'algo': HashAlgorithm.LEGACY,
949+
'hashfn': legacy_hash
950+
},
951+
{
952+
'body': rawData[3],
953+
'algo': HashAlgorithm.LEGACY,
954+
'hashfn': legacy_hash
955+
},
956+
{
957+
'body': rawData[4],
958+
'algo': HashAlgorithm.LEGACY,
959+
'hashfn': legacy_hash
960+
}
961+
]
955962

956963
def testAlgoHandlers(self):
957964
'''
@@ -972,31 +979,33 @@ def setUp(self):
972979
fn = join(dirname(__file__), 'algoSplits.json')
973980
with open(fn, 'r') as flo:
974981
rawData = json.load(flo)['splits']
975-
self._testData = [{
976-
'body': rawData[0],
977-
'algo': HashAlgorithm.LEGACY,
978-
'hashfn': legacy_hash
979-
},
980-
{
981-
'body': rawData[1],
982-
'algo': HashAlgorithm.MURMUR,
983-
'hashfn': _murmur_hash
984-
},
985-
{
986-
'body': rawData[2],
987-
'algo': HashAlgorithm.LEGACY,
988-
'hashfn': legacy_hash
989-
},
990-
{
991-
'body': rawData[3],
992-
'algo': HashAlgorithm.LEGACY,
993-
'hashfn': legacy_hash
994-
},
995-
{
996-
'body': rawData[4],
997-
'algo': HashAlgorithm.LEGACY,
998-
'hashfn': legacy_hash
999-
}]
982+
self._testData = [
983+
{
984+
'body': rawData[0],
985+
'algo': HashAlgorithm.LEGACY,
986+
'hashfn': legacy_hash
987+
},
988+
{
989+
'body': rawData[1],
990+
'algo': HashAlgorithm.MURMUR,
991+
'hashfn': _murmur_hash
992+
},
993+
{
994+
'body': rawData[2],
995+
'algo': HashAlgorithm.LEGACY,
996+
'hashfn': legacy_hash
997+
},
998+
{
999+
'body': rawData[3],
1000+
'algo': HashAlgorithm.LEGACY,
1001+
'hashfn': legacy_hash
1002+
},
1003+
{
1004+
'body': rawData[4],
1005+
'algo': HashAlgorithm.LEGACY,
1006+
'hashfn': legacy_hash
1007+
}
1008+
]
10001009

10011010
def testAlgoHandlers(self):
10021011
'''
@@ -1010,7 +1019,7 @@ def testAlgoHandlers(self):
10101019
self.assertEqual(get_hash_fn(split.algo), sp['hashfn'])
10111020

10121021

1013-
class TrafficAllocationTests(TestCase):
1022+
class TrafficAllocationTests(TestCase, MockUtilsMixin):
10141023
'''
10151024
'''
10161025

@@ -1067,10 +1076,14 @@ def setUp(self):
10671076
self._splitObjects['rollout2'] = split_parser.parse(raw_split, True)
10681077

10691078
raw_split['name'] = 'test4'
1070-
raw_split['trafficAllocation'] = None #must be mapped as 100
1079+
raw_split['trafficAllocation'] = None # must be mapped as 100
10711080
raw_split['trafficAllocationSeed'] = -1
10721081
self._splitObjects['rollout3'] = split_parser.parse(raw_split, True)
10731082

1083+
raw_split['name'] = 'test5'
1084+
raw_split['trafficAllocation'] = 99
1085+
raw_split['trafficAllocationSeed'] = -1
1086+
self._splitObjects['rollout4'] = split_parser.parse(raw_split, True)
10741087

10751088
def testTrafficAllocation(self):
10761089
'''
@@ -1093,11 +1106,18 @@ def testTrafficAllocation(self):
10931106
self.assertEqual(treatment2, 'on')
10941107

10951108
treatment3, label1 = self._client._get_treatment_for_split(
1096-
self._splitObjects['rollout2'], 'testKey', None
1109+
self._splitObjects['rollout3'], 'testKey', None
10971110
)
1098-
self.assertEqual(treatment3, 'default')
1111+
self.assertEqual(treatment3, 'on')
10991112

1113+
self.patch_object(Splitter, 'get_bucket', return_value=1)
11001114
treatment4, label1 = self._client._get_treatment_for_split(
1101-
self._splitObjects['rollout3'], 'testKey', None
1115+
self._splitObjects['rollout2'], 'testKey', None
11021116
)
11031117
self.assertEqual(treatment4, 'on')
1118+
1119+
self.patch_object(Splitter, 'get_bucket', return_value=100)
1120+
treatment5, label1 = self._client._get_treatment_for_split(
1121+
self._splitObjects['rollout4'], 'testKey', None
1122+
)
1123+
self.assertEqual(treatment5, 'default')

0 commit comments

Comments
 (0)