Skip to content
This repository was archived by the owner on May 30, 2024. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
28 commits
Select commit Hold shift + click to select a range
926bee2
support segments
eli-darkly Jan 20, 2018
795fb3e
Merge branch 'master' into eb/segments
eli-darkly Jan 20, 2018
ab74820
Merge branch 'segments' into eb/segments
eli-darkly Feb 1, 2018
2957f76
genericize feature store
eli-darkly Feb 1, 2018
5b71a1a
misc fixes
eli-darkly Feb 1, 2018
997f4ef
typo
eli-darkly Feb 1, 2018
134ac1e
fix package refs
eli-darkly Feb 1, 2018
d55f6c7
misc fixes
eli-darkly Feb 1, 2018
64a5b2c
unit tests for evaluating segments
eli-darkly Feb 1, 2018
842ce64
update stream/poll processors for feature store changes
eli-darkly Feb 1, 2018
175b961
more fixes to stream and store wrapper
eli-darkly Feb 1, 2018
77333d3
can't use object as object key
eli-darkly Feb 2, 2018
5da08c5
misc fixes
eli-darkly Feb 2, 2018
cf93ed4
fix tests
eli-darkly Feb 2, 2018
07c759b
make callback null-tolerant
eli-darkly Feb 2, 2018
6086bec
add temp debug logging
eli-darkly Feb 2, 2018
f8687a8
further cleanup of streaming & polling logic
eli-darkly Feb 2, 2018
4c060ef
fix indirect patch
eli-darkly Feb 2, 2018
14a3fb2
misc fixes to feature store
eli-darkly Feb 2, 2018
2d7fb5c
feature store unit tests
eli-darkly Feb 2, 2018
bff9dd6
Merge branch 'segments' into eb/segments
eli-darkly Feb 2, 2018
10623ac
comment
eli-darkly Feb 2, 2018
606e8fe
Merge branch 'segments' into eb/segments
eli-darkly Feb 2, 2018
0bce620
fix tests to be async
eli-darkly Feb 2, 2018
662bae4
Merge pull request #47 from launchdarkly/eb/segments
eli-darkly Feb 6, 2018
2c058a4
Merge branch 'master' into segments
eli-darkly Feb 6, 2018
087e025
Merge branch 'master' into segments
eli-darkly Feb 21, 2018
543f73d
version 4.0.0
eli-darkly Feb 21, 2018
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,13 @@
All notable changes to the LaunchDarkly Node.js SDK will be documented in this file. This project adheres
to [Semantic Versioning](http://semver.org).

## [4.0.0] - 2018-02-21
### Added
- Support for a new LaunchDarkly feature: reusable user segments.

### Changed
- The feature store interface has been changed to support user segment data as well as feature flags. Existing code that uses `RedisFeatureStore` should work as before, but custom feature store implementations will need to be updated.

## [3.4.0] - 2018-02-13
### Added
- Adds support for a future LaunchDarkly feature, coming soon: semantic version user attributes.
Expand Down
123 changes: 97 additions & 26 deletions evaluate_flag.js
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
var operators = require('./operators');
var dataKind = require('./versioned_data_kind');
var util = require('util');
var sha1 = require('node-sha1');
var async = require('async');
Expand All @@ -7,7 +8,7 @@ var builtins = ['key', 'ip', 'country', 'email', 'firstName', 'lastName', 'avata

var noop = function(){};

function evaluate(flag, user, store, cb) {
function evaluate(flag, user, featureStore, cb) {
cb = cb || noop;
if (!user || user.key === null || user.key === undefined) {
cb(null, null, null);
Expand All @@ -31,7 +32,7 @@ function evaluate(flag, user, store, cb) {
return;
}

eval_internal(flag, user, store, [], function(err, result, events) {
eval_internal(flag, user, featureStore, [], function(err, result, events) {
if (err) {
cb(err, result, events);
return;
Expand All @@ -53,19 +54,19 @@ function evaluate(flag, user, store, cb) {
return;
}

function eval_internal(flag, user, store, events, cb) {
function eval_internal(flag, user, featureStore, events, cb) {
// Evaluate prerequisites, if any
if (flag.prerequisites) {
async.mapSeries(flag.prerequisites,
function(prereq, callback) {
store.get(prereq.key, function(f) {
featureStore.get(dataKind.features, prereq.key, function(f) {
// If the flag does not exist in the store or is not on, the prerequisite
// is not satisfied
if (!f || !f.on) {
callback(new Error("Unsatisfied prerequisite"), null);
return;
}
eval_internal(f, user, store, events, function(err, value) {
eval_internal(f, user, featureStore, events, function(err, value) {
// If there was an error, the value is null, the variation index is out of range,
// or the value does not match the indexed variation the prerequisite is not satisfied
var variation = get_variation(f, prereq.variation);
Expand All @@ -86,18 +87,18 @@ function eval_internal(flag, user, store, events, cb) {
cb(null, null, events);
return;
}
evalRules(flag, user, function(e, variation) {
evalRules(flag, user, featureStore, function(e, variation) {
cb(e, variation, events);
});
})
} else {
evalRules(flag, user, function(e, variation) {
evalRules(flag, user, featureStore, function(e, variation) {
cb(e, variation, events);
});
}
}

function evalRules(flag, user, cb) {
function evalRules(flag, user, featureStore, cb) {
var i, j;
var target;
var variation;
Expand All @@ -119,38 +120,73 @@ function evalRules(flag, user, cb) {
}
}

// Check rules
for (i = 0; i < flag.rules.length; i++) {
rule = flag.rules[i];
if (rule_match_user(rule, user)) {
variation = variation_for_user(rule, user, flag);
cb(variation === null ? new Error("Undefined variation for flag " + flag.key) : null, variation);
return;
async.mapSeries(flag.rules,
function(rule, callback) {
rule_match_user(rule, user, featureStore, function(matched) {
callback(matched ? rule : null, null);
});
},
function(err, results) {
// we use the "error" value to indicate that a rule was successfully matched (since we only care
// about the first match, and mapSeries terminates on the first "error")
if (err) {
var rule = err;
variation = variation_for_user(rule, user, flag);
cb(variation === null ? new Error("Undefined variation for flag " + flag.key) : null, variation);
} else {
// no rule matched; check the fallthrough
variation = variation_for_user(flag.fallthrough, user, flag);
cb(variation === null ? new Error("Undefined variation for flag " + flag.key) : null, variation);
}
}
}

// Check the fallthrough
variation = variation_for_user(flag.fallthrough, user, flag);
cb(variation === null ? new Error("Undefined variation for flag " + flag.key) : null, variation);
);
}

function rule_match_user(r, user) {
function rule_match_user(r, user, featureStore, cb) {
var i;

if (!r.clauses) {
return false;
}

// A rule matches if all its clauses match
for (i = 0; i < r.clauses.length; i++) {
if (!clause_match_user(r.clauses[i], user)) {
return false;
async.mapSeries(r.clauses,
function(clause, callback) {
clause_match_user(clause, user, featureStore, function(matched) {
// on the first clause that does *not* match, we raise an "error" to stop the loop
callback(matched ? null : clause, null);
});
},
function(err, results) {
cb(!err);
}
);
}

function clause_match_user(c, user, featureStore, cb) {
if (c.op == 'segmentMatch') {
async.mapSeries(c.values,
function(value, callback) {
featureStore.get(dataKind.segments, value, function(segment) {
if (segment && segment_match_user(segment, user)) {
// on the first segment that matches, we raise an "error" to stop the loop
callback(segment, null);
} else {
callback(null, null);
}
});
},
function(err, results) {
// an "error" indicates that a segment *did* match
cb(maybe_negate(c, !!err));
}
);
} else {
cb(clause_match_user_no_segments(c, user));
}
return true;
}

function clause_match_user(c, user) {
function clause_match_user_no_segments(c, user) {
var uValue;
var matchFn;
var i;
Expand All @@ -176,6 +212,41 @@ function clause_match_user(c, user) {
return maybe_negate(c, match_any(matchFn, uValue, c.values));
}

function segment_match_user(segment, user) {
if (user.key) {
if ((segment.included || []).indexOf(user.key) >= 0) {
return true;
}
if ((segment.excluded || []).indexOf(user.key) >= 0) {
return false;
}
for (var i = 0; i < (segment.rules || []).length; i++) {
if (segment_rule_match_user(segment.rules[i], user, segment.key, segment.salt)) {
return true;
}
}
}
return false;
}

function segment_rule_match_user(rule, user, segmentKey, salt) {
for (var i = 0; i < (rule.clauses || []).length; i++) {
if (!clause_match_user_no_segments(rule.clauses[i], user)) {
return false;
}
}

// If the weight is absent, this rule matches
if (rule.weight === undefined || rule.weight === null) {
return true;
}

// All of the clauses are met. See if the user buckets in
var bucket = bucket_user(user, segmentKey, rule.bucketBy || "key", salt);
var weight = rule.weight / 100000.0;
return bucket < weight;
}

function maybe_negate(c, b) {
if (c.negate) {
return !b;
Expand Down
74 changes: 44 additions & 30 deletions feature_store.js
Original file line number Diff line number Diff line change
@@ -1,74 +1,88 @@
// An in-memory feature store with an async interface.
var dataKind = require('./versioned_data_kind');

// An in-memory store with an async interface.
// It's async as other implementations (e.g. the RedisFeatureStore)
// may be async, and we want to retain interface compatibility.
var noop = function(){};
function InMemoryFeatureStore() {
var store = {flags:{}};
var store = {allData:{}};

function callbackResult(cb, result) {
cb = cb || noop;
setTimeout(function() { cb(result); }, 0); // ensure this is dispatched asynchronously
}

store.get = function(key, cb) {
if (this.flags.hasOwnProperty(key)) {
var flag = this.flags[key];
store.get = function(kind, key, cb) {
var items = this.allData[kind.namespace] || {};
if (Object.hasOwnProperty.call(items, key)) {
var item = items[key];

if (!flag || flag.deleted) {
if (!item || item.deleted) {
callbackResult(cb, null);
} else {
callbackResult(cb, clone(flag));
callbackResult(cb, clone(item));
}
} else {
cb(null);
callbackResult(cb, null);
}
}

store.all = function(cb) {
store.all = function(kind, cb) {
var results = {};
var items = this.allData[kind.namespace] || {};

for (var key in this.flags) {
if (this.flags.hasOwnProperty(key)) {
var flag = this.flags[key];
if (flag && !flag.deleted) {
results[key] = clone(flag);
for (var key in items) {
if (Object.hasOwnProperty.call(items, key)) {
var item = items[key];
if (item && !item.deleted) {
results[key] = clone(item);
}
}
}

callbackResult(cb, results);
}

store.init = function(flags, cb) {
this.flags = flags;
store.init = function(allData, cb) {
this.allData = allData;
this.init_called = true;
callbackResult(cb);
}

store.delete = function(key, version, cb) {
store.delete = function(kind, key, version, cb) {
var items = this.allData[kind.namespace];
if (!items) {
items = {};
this.allData[kind] = items;
}
var deletedItem = { version: version, deleted: true };
if (this.flags.hasOwnProperty(key)) {
var old = this.flags[key];
if (old && old.version < version) {
this.flags[key] = deletedItem;
if (Object.hasOwnProperty.call(items, key)) {
var old = items[key];
if (!old || old.version < version) {
items[key] = deletedItem;
}
} else {
this.flags[key] = deletedItem;
items[key] = deletedItem;
}

callbackResult(cb);
}

store.upsert = function(key, flag, cb) {
var old = this.flags[key];
store.upsert = function(kind, item, cb) {
var key = item.key;
var items = this.allData[kind.namespace];
if (!items) {
items = {};
this.allData[kind] = items;
}

if (this.flags.hasOwnProperty(key)) {
var old = this.flags[key];
if (old && old.version < flag.version) {
this.flags[key] = flag;
if (Object.hasOwnProperty.call(items, key)) {
var old = items[key];
if (old && old.version < item.version) {
items[key] = item;
}
} else {
this.flags[key] = flag;
items[key] = item;
}

callbackResult(cb);
Expand All @@ -91,4 +105,4 @@ function clone(obj) {
return JSON.parse(JSON.stringify(obj));
}

module.exports = InMemoryFeatureStore;
module.exports = InMemoryFeatureStore;
29 changes: 18 additions & 11 deletions feature_store_event_wrapper.js
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
var dataKind = require('./versioned_data_kind');

function FeatureStoreEventWrapper(featureStore, emitter) {
function differ(key, oldValue, newValue) {
if(newValue && oldValue && newValue.version < oldValue.version) return;
Expand All @@ -13,10 +15,11 @@ function FeatureStoreEventWrapper(featureStore, emitter) {
initialized: featureStore.initialized.bind(featureStore),
close: featureStore.close.bind(featureStore),

init: function(newFlags, callback) {
featureStore.all(function(oldFlags){
featureStore.init(newFlags, function(){
init: function(newData, callback) {
featureStore.all(dataKind, function(oldFlags){
featureStore.init(newData, function(){
var allFlags = {};
var newFlags = newData[dataKind.features] || {};
Object.assign(allFlags, oldFlags, newFlags);
var handledFlags = {};

Expand All @@ -31,19 +34,23 @@ function FeatureStoreEventWrapper(featureStore, emitter) {
});
},

delete: function(key, version, callback) {
featureStore.get(key, function(oldFlag) {
featureStore.delete(key, version, function() {
differ(key, oldFlag, {});
delete: function(kind, key, version, callback) {
featureStore.get(kind, key, function(oldFlag) {
featureStore.delete(kind, key, version, function() {
if (kind === dataKind.features) {
differ(key, oldFlag, {});
}
callback && callback.apply(null, arguments);
});
});
},

upsert: function(key, newFlag, callback) {
featureStore.get(key, function(oldFlag) {
featureStore.upsert(key, newFlag, function() {
differ(key, oldFlag, newFlag);
upsert: function(kind, newItem, callback) {
featureStore.get(kind, newItem.key, function(oldItem) {
featureStore.upsert(kind, newItem, function() {
if (kind === dataKind.features) {
differ(oldItem ? oldItem.key : null, oldItem, newItem);
}
callback && callback.apply(null, arguments);
});
});
Expand Down
Loading