Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[SDKS-7798] Add flag sets support for pluggable storage #65

Merged
merged 9 commits into from
Dec 12, 2023
6 changes: 6 additions & 0 deletions CHANGES.txt
Original file line number Diff line number Diff line change
@@ -1,3 +1,9 @@
0.5.0 (December 12, 2023)
- Added support for Flag Sets, which enables grouping feature flags and interacting with the group rather than individually (more details in our documentation).
- Added a new Split Filter type named 'bySet'. This allows the Synchronizer to only synchronize the flags in the specified flag sets, avoiding unused or unwanted flags from being synced on the pluggable storage instance, bringing all the benefits of a reduced payload.
- Updated PluggableStorage to clear the storage before initiating the synchronization process, if it was previously synchronized with a different SDK key (i.e., a different environment) or different Split Filter criteria.
- Updated @splitsoftware/splitio-commons package to version 1.12.1, which includes support for flag sets, vulnerability fixes and other improvements.

0.4.0 (August 31, 2023)
- Updated @splitsoftware/splitio-commons package to version 1.9.0 that includes a minor improvement:
- Updated streaming architecture implementation to apply feature flag updates from the notification received which is now enhanced, improving efficiency and reliability of the whole update system.
Expand Down
5 changes: 5 additions & 0 deletions e2e/server/db.json
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
"changeNumber": 1619720346270,
"algo": 2,
"configurations": {},
"sets": ["set_a"],
"conditions": [
{
"conditionType": "ROLLOUT",
Expand Down Expand Up @@ -97,6 +98,7 @@
"changeNumber": 1619205925116,
"algo": 2,
"configurations": {},
"sets": ["set_b"],
"conditions": [
{
"conditionType": "WHITELIST",
Expand Down Expand Up @@ -197,6 +199,7 @@
"configurations": {
"on": "{\"ojoijoii\":\"oijoijioj\",\"\":\"\"}"
},
"sets": ["set_a", "set_b"],
"conditions": [
{
"conditionType": "ROLLOUT",
Expand Down Expand Up @@ -246,6 +249,7 @@
"changeNumber": 1619205566698,
"algo": 2,
"configurations": {},
"sets": [],
"conditions": [
{
"conditionType": "WHITELIST",
Expand Down Expand Up @@ -430,6 +434,7 @@
"changeNumber": 1619720346272,
"algo": 2,
"configurations": {},
"sets": ["set_c"],
"conditions": [
{
"conditionType": "ROLLOUT",
Expand Down
130 changes: 112 additions & 18 deletions e2e/synchronizer.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -86,6 +86,18 @@ describe('Synchronizer e2e tests', () => {
expect(Number(ttTest)).toBe(1);
expect(Number(ttUser)).toBe(3);
});

test('saves flag set keys', async () => {
const flagSets = await _redisWrapper.getKeysByPrefix(`${REDIS_PREFIX}.flagSet.*`);
const itemsSetA = await _redisWrapper.getItems(`${REDIS_PREFIX}.flagSet.set_a`);
const itemsSetB = await _redisWrapper.getItems(`${REDIS_PREFIX}.flagSet.set_b`);
const itemsInexistentSet = await _redisWrapper.getItems(`${REDIS_PREFIX}.flagSet.inexistent_set`);

expect(flagSets.sort()).toEqual([`${REDIS_PREFIX}.flagSet.set_a`, `${REDIS_PREFIX}.flagSet.set_b`]);
expect(itemsSetA.sort()).toEqual(['MATIAS_TEST', 'TEST_DOC']);
expect(itemsSetB.sort()).toEqual(['TEST_DOC', 'TEST_MATIAS']);
expect(itemsInexistentSet).toEqual([]);
});
});

describe('Runs SDK Consumer with DEBUG impressions mode, and', () => {
Expand Down Expand Up @@ -138,6 +150,18 @@ describe('Synchronizer e2e tests', () => {
expect(Number(ttUser)).toBe(2);
});

test('saves flag set keys', async () => {
const flagSets = await _redisWrapper.getKeysByPrefix(`${REDIS_PREFIX}.flagSet.*`);
const itemsSetA = await _redisWrapper.getItems(`${REDIS_PREFIX}.flagSet.set_a`);
const itemsSetB = await _redisWrapper.getItems(`${REDIS_PREFIX}.flagSet.set_b`);
const itemsSetC = await _redisWrapper.getItems(`${REDIS_PREFIX}.flagSet.set_c`);

expect(flagSets.sort()).toEqual([`${REDIS_PREFIX}.flagSet.set_b`, `${REDIS_PREFIX}.flagSet.set_c`]);
expect(itemsSetA).toEqual([]);
expect(itemsSetB).toEqual(['TEST_MATIAS']);
expect(itemsSetC).toEqual(['MATIAS_TEST']);
});

test('checks that [0] impressions are saved in Redis', async () => {
const impressions = await _redisWrapper.getItemsCount(`${REDIS_PREFIX}.impressions`);
expect(impressions).toBe(0);
Expand Down Expand Up @@ -247,10 +271,11 @@ describe('Synchronizer e2e tests', () => {

});

describe('Synchronizer e2e tests - InMemoryOperation - only Splits & Segments mode', () => {
describe('Synchronizer e2e tests - OPTIMIZED impressions mode & Flag Sets filter - only Splits & Segments mode', () => {
/**
* Settings creation.
*/
const redisWrapper = redisAdapterWrapper({ url: REDIS_URL });
const settings: ISynchronizerSettings = {
core: {
authorizationKey: 'fakeSdkKeyForTesting',
Expand All @@ -263,17 +288,20 @@ describe('Synchronizer e2e tests - InMemoryOperation - only Splits & Segments mo
storage: {
type: 'PLUGGABLE',
prefix: PREFIX,
wrapper: redisAdapterWrapper({ url: REDIS_URL }),
wrapper: redisWrapper,
},
sync: {
impressionsMode: 'OPTIMIZED',
splitFilters: [{
type: 'bySet',
values: ['set_b'],
}],
},
scheduler: {
// @ts-ignore. Not part of public API
synchronizerMode: 'MODE_RUN_FEATURE_FLAGS_AND_SEGMENTS',
},
logger: 'NONE',
streamingEnabled: false,
};

const _synchronizer = new Synchronizer(settings);
Expand All @@ -292,11 +320,11 @@ describe('Synchronizer e2e tests - InMemoryOperation - only Splits & Segments mo
await _synchronizer.execute();
});

test('saves [4] Splits as keys in Redis', async () => {
test('saves [2] Splits as keys in Redis that matches the flag sets filter', async () => {
const splits = await _redisWrapper.getKeysByPrefix(`${REDIS_PREFIX}.split.*`);

// Check changeNumber(...71)
expect(splits).toHaveLength(4);
expect(splits.sort()).toEqual([`${REDIS_PREFIX}.split.TEST_DOC`, `${REDIS_PREFIX}.split.TEST_MATIAS`]);
});

test('saves new changeNumber value', async () => {
Expand All @@ -305,24 +333,36 @@ describe('Synchronizer e2e tests - InMemoryOperation - only Splits & Segments mo
expect(till).toBe('1619720346271');
});

test('saves [2] Segments as keys in Redis', async () => {
test('saves [1] Segments as keys in Redis', async () => {
const segments = await _redisWrapper.getKeysByPrefix(`${REDIS_PREFIX}.segment.*`);
const segmentsRegistered = await _redisWrapper.getKeysByPrefix(`${REDIS_PREFIX}.segments.*`);

expect(segments.filter(x => !x.match(/.till$/))).toHaveLength(2);
expect(segments.filter(x => !x.match(/.till$/))).toHaveLength(1);
expect(segmentsRegistered).toHaveLength(1);
});

test('saves [2] Traffic Types keys', async () => {
test('saves [1] Traffic Types key', async () => {
const trafficTypes = await _redisWrapper.getKeysByPrefix(`${REDIS_PREFIX}.trafficType.*`);
const ttAccount = await _redisWrapper.get(`${REDIS_PREFIX}.trafficType.account`);
const ttTest = await _redisWrapper.get(`${REDIS_PREFIX}.trafficType.testTT`);
const ttUser = await _redisWrapper.get(`${REDIS_PREFIX}.trafficType.user`);

expect(trafficTypes).toHaveLength(2);
expect(trafficTypes).toHaveLength(1);
expect(Number(ttAccount)).toBe(0);
expect(Number(ttTest)).toBe(1);
expect(Number(ttUser)).toBe(3);
expect(Number(ttTest)).toBe(0);
expect(Number(ttUser)).toBe(2);
});

test('saves flag set keys considering the flag sets filter', async () => {
const flagSets = await _redisWrapper.getKeysByPrefix(`${REDIS_PREFIX}.flagSet.*`);
const itemsSetA = await _redisWrapper.getItems(`${REDIS_PREFIX}.flagSet.set_a`);
const itemsSetB = await _redisWrapper.getItems(`${REDIS_PREFIX}.flagSet.set_b`);
const itemsInexistentSet = await _redisWrapper.getItems(`${REDIS_PREFIX}.flagSet.inexistent_set`);

expect(flagSets).toEqual([`${REDIS_PREFIX}.flagSet.set_b`]);
expect(itemsSetA).toEqual([]);
expect(itemsSetB.sort()).toEqual(['TEST_DOC', 'TEST_MATIAS']);
expect(itemsInexistentSet).toEqual([]);
});
});

Expand All @@ -331,10 +371,10 @@ describe('Synchronizer e2e tests - InMemoryOperation - only Splits & Segments mo
await _synchronizer.execute();
});

test('runs again and saves [17] Splits as keys in Redis', async () => {
test('runs again and saves [1] Split as keys in Redis', async () => {
const splits = await _redisWrapper.getKeysByPrefix(`${REDIS_PREFIX}.split.*`);

expect(splits).toHaveLength(4);
expect(splits).toEqual([`${REDIS_PREFIX}.split.TEST_MATIAS`]);
});

test('saves new changeNumber value', async () => {
Expand All @@ -343,20 +383,74 @@ describe('Synchronizer e2e tests - InMemoryOperation - only Splits & Segments mo
expect(till).toBe('1619720346272');
});

test('updates [4] Traffic Types keys\' values', async () => {
test('updates [1] Traffic Types keys values', async () => {
const trafficTypes = await _redisWrapper.getKeysByPrefix(`${REDIS_PREFIX}.trafficType.*`);
const ttAccount = await _redisWrapper.get(`${REDIS_PREFIX}.trafficType.account`);
const ttTest = await _redisWrapper.get(`${REDIS_PREFIX}.trafficType.testTT`);
const ttUser = await _redisWrapper.get(`${REDIS_PREFIX}.trafficType.user`);

expect(trafficTypes).toHaveLength(2);
expect(Number(ttAccount)).toBe(2);
expect(trafficTypes).toHaveLength(1);
expect(Number(ttAccount)).toBe(0);
expect(Number(ttTest)).toBe(0);
expect(Number(ttUser)).toBe(2);
expect(Number(ttUser)).toBe(1);
});

test('saves flag set keys', async () => {
const flagSets = await _redisWrapper.getKeysByPrefix(`${REDIS_PREFIX}.flagSet.*`);
const itemsSetA = await _redisWrapper.getItems(`${REDIS_PREFIX}.flagSet.set_a`);
const itemsSetB = await _redisWrapper.getItems(`${REDIS_PREFIX}.flagSet.set_b`);
const itemsSetC = await _redisWrapper.getItems(`${REDIS_PREFIX}.flagSet.set_c`);

expect(flagSets).toEqual([`${REDIS_PREFIX}.flagSet.set_b`]);
expect(itemsSetA).toEqual([]);
expect(itemsSetB).toEqual(['TEST_MATIAS']);
expect(itemsSetC).toEqual([]);
});
});

test('Synchronizer runs a 3rd time with same SDK key and filter criteria, but wrong URLs. Execution should fail and storage should not be updated', async () => {
const keys = await _redisWrapper.getKeysByPrefix(`${REDIS_PREFIX}.`);

const synchronizer = new Synchronizer({
...settings,
sync: {
// To final filter query after validation is `&sets=set_b`
splitFilters: [{
type: 'bySet', values: ['set_b', ' '],
}, {
type: 'byName', values: ['set_b'],
}],
},
urls: {
sdk: SERVER_MOCK_URL + '/invalidpath',
events: SERVER_MOCK_URL + '/invalidpath',
telemetry: SERVER_MOCK_URL + '/invalidpath',
},
});

expect(await synchronizer.execute()).toBe(false);
expect(await _redisWrapper.getKeysByPrefix(`${REDIS_PREFIX}.`)).toEqual(keys);
});

// @todo test remove split by fetching `archive` sstatus
test('Synchronizer runs a 4th time with a different SDK key and wrong URLs. Execution should fail and storage should be empty, except for storage hash', async () => {
const keys = await _redisWrapper.getKeysByPrefix(`${REDIS_PREFIX}.`);

const synchronizer = new Synchronizer({
...settings,
core: {
authorizationKey: 'fakeSdkKeyForTesting-2',
},
urls: {
sdk: SERVER_MOCK_URL + '/invalidpath',
events: SERVER_MOCK_URL + '/invalidpath',
telemetry: SERVER_MOCK_URL + '/invalidpath',
},
});

expect(await synchronizer.execute()).toBe(false);
expect(keys.length).toBeGreaterThan(0);
expect(await _redisWrapper.getKeysByPrefix(`${REDIS_PREFIX}.`)).toEqual([`${REDIS_PREFIX}.hash`]);
});
});

describe('Synchronizer - only Splits & Segments mode', () => {
Expand Down
Loading