diff --git a/CHANGELOG.md b/CHANGELOG.md index d52b6a3..29f7a92 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,6 +3,13 @@ All notable changes to the LaunchDarkly Node.js SDK will be documented in this file. This project adheres to [Semantic Versioning](http://semver.org). +## [4.0.0] - 2018-02-21 +### Added +- Support for a new LaunchDarkly feature: reusable user segments. + +### Changed +- The feature store interface has been changed to support user segment data as well as feature flags. Existing code that uses `RedisFeatureStore` should work as before, but custom feature store implementations will need to be updated. + ## [3.4.0] - 2018-02-13 ### Added - Adds support for a future LaunchDarkly feature, coming soon: semantic version user attributes. diff --git a/evaluate_flag.js b/evaluate_flag.js index 372c0af..ca38a3e 100644 --- a/evaluate_flag.js +++ b/evaluate_flag.js @@ -1,4 +1,5 @@ var operators = require('./operators'); +var dataKind = require('./versioned_data_kind'); var util = require('util'); var sha1 = require('node-sha1'); var async = require('async'); @@ -7,7 +8,7 @@ var builtins = ['key', 'ip', 'country', 'email', 'firstName', 'lastName', 'avata var noop = function(){}; -function evaluate(flag, user, store, cb) { +function evaluate(flag, user, featureStore, cb) { cb = cb || noop; if (!user || user.key === null || user.key === undefined) { cb(null, null, null); @@ -31,7 +32,7 @@ function evaluate(flag, user, store, cb) { return; } - eval_internal(flag, user, store, [], function(err, result, events) { + eval_internal(flag, user, featureStore, [], function(err, result, events) { if (err) { cb(err, result, events); return; @@ -53,19 +54,19 @@ function evaluate(flag, user, store, cb) { return; } -function eval_internal(flag, user, store, events, cb) { +function eval_internal(flag, user, featureStore, events, cb) { // Evaluate prerequisites, if any if (flag.prerequisites) { async.mapSeries(flag.prerequisites, function(prereq, callback) { - store.get(prereq.key, function(f) { + featureStore.get(dataKind.features, prereq.key, function(f) { // If the flag does not exist in the store or is not on, the prerequisite // is not satisfied if (!f || !f.on) { callback(new Error("Unsatisfied prerequisite"), null); return; } - eval_internal(f, user, store, events, function(err, value) { + eval_internal(f, user, featureStore, events, function(err, value) { // If there was an error, the value is null, the variation index is out of range, // or the value does not match the indexed variation the prerequisite is not satisfied var variation = get_variation(f, prereq.variation); @@ -86,18 +87,18 @@ function eval_internal(flag, user, store, events, cb) { cb(null, null, events); return; } - evalRules(flag, user, function(e, variation) { + evalRules(flag, user, featureStore, function(e, variation) { cb(e, variation, events); }); }) } else { - evalRules(flag, user, function(e, variation) { + evalRules(flag, user, featureStore, function(e, variation) { cb(e, variation, events); }); } } -function evalRules(flag, user, cb) { +function evalRules(flag, user, featureStore, cb) { var i, j; var target; var variation; @@ -119,22 +120,29 @@ function evalRules(flag, user, cb) { } } - // Check rules - for (i = 0; i < flag.rules.length; i++) { - rule = flag.rules[i]; - if (rule_match_user(rule, user)) { - variation = variation_for_user(rule, user, flag); - cb(variation === null ? new Error("Undefined variation for flag " + flag.key) : null, variation); - return; + async.mapSeries(flag.rules, + function(rule, callback) { + rule_match_user(rule, user, featureStore, function(matched) { + callback(matched ? rule : null, null); + }); + }, + function(err, results) { + // we use the "error" value to indicate that a rule was successfully matched (since we only care + // about the first match, and mapSeries terminates on the first "error") + if (err) { + var rule = err; + variation = variation_for_user(rule, user, flag); + cb(variation === null ? new Error("Undefined variation for flag " + flag.key) : null, variation); + } else { + // no rule matched; check the fallthrough + variation = variation_for_user(flag.fallthrough, user, flag); + cb(variation === null ? new Error("Undefined variation for flag " + flag.key) : null, variation); + } } - } - - // Check the fallthrough - variation = variation_for_user(flag.fallthrough, user, flag); - cb(variation === null ? new Error("Undefined variation for flag " + flag.key) : null, variation); + ); } -function rule_match_user(r, user) { +function rule_match_user(r, user, featureStore, cb) { var i; if (!r.clauses) { @@ -142,15 +150,43 @@ function rule_match_user(r, user) { } // A rule matches if all its clauses match - for (i = 0; i < r.clauses.length; i++) { - if (!clause_match_user(r.clauses[i], user)) { - return false; + async.mapSeries(r.clauses, + function(clause, callback) { + clause_match_user(clause, user, featureStore, function(matched) { + // on the first clause that does *not* match, we raise an "error" to stop the loop + callback(matched ? null : clause, null); + }); + }, + function(err, results) { + cb(!err); } + ); +} + +function clause_match_user(c, user, featureStore, cb) { + if (c.op == 'segmentMatch') { + async.mapSeries(c.values, + function(value, callback) { + featureStore.get(dataKind.segments, value, function(segment) { + if (segment && segment_match_user(segment, user)) { + // on the first segment that matches, we raise an "error" to stop the loop + callback(segment, null); + } else { + callback(null, null); + } + }); + }, + function(err, results) { + // an "error" indicates that a segment *did* match + cb(maybe_negate(c, !!err)); + } + ); + } else { + cb(clause_match_user_no_segments(c, user)); } - return true; } -function clause_match_user(c, user) { +function clause_match_user_no_segments(c, user) { var uValue; var matchFn; var i; @@ -176,6 +212,41 @@ function clause_match_user(c, user) { return maybe_negate(c, match_any(matchFn, uValue, c.values)); } +function segment_match_user(segment, user) { + if (user.key) { + if ((segment.included || []).indexOf(user.key) >= 0) { + return true; + } + if ((segment.excluded || []).indexOf(user.key) >= 0) { + return false; + } + for (var i = 0; i < (segment.rules || []).length; i++) { + if (segment_rule_match_user(segment.rules[i], user, segment.key, segment.salt)) { + return true; + } + } + } + return false; +} + +function segment_rule_match_user(rule, user, segmentKey, salt) { + for (var i = 0; i < (rule.clauses || []).length; i++) { + if (!clause_match_user_no_segments(rule.clauses[i], user)) { + return false; + } + } + + // If the weight is absent, this rule matches + if (rule.weight === undefined || rule.weight === null) { + return true; + } + + // All of the clauses are met. See if the user buckets in + var bucket = bucket_user(user, segmentKey, rule.bucketBy || "key", salt); + var weight = rule.weight / 100000.0; + return bucket < weight; +} + function maybe_negate(c, b) { if (c.negate) { return !b; diff --git a/feature_store.js b/feature_store.js index bc9cdd3..35eebb0 100644 --- a/feature_store.js +++ b/feature_store.js @@ -1,37 +1,41 @@ -// An in-memory feature store with an async interface. +var dataKind = require('./versioned_data_kind'); + +// An in-memory store with an async interface. // It's async as other implementations (e.g. the RedisFeatureStore) // may be async, and we want to retain interface compatibility. var noop = function(){}; function InMemoryFeatureStore() { - var store = {flags:{}}; + var store = {allData:{}}; function callbackResult(cb, result) { cb = cb || noop; setTimeout(function() { cb(result); }, 0); // ensure this is dispatched asynchronously } - store.get = function(key, cb) { - if (this.flags.hasOwnProperty(key)) { - var flag = this.flags[key]; + store.get = function(kind, key, cb) { + var items = this.allData[kind.namespace] || {}; + if (Object.hasOwnProperty.call(items, key)) { + var item = items[key]; - if (!flag || flag.deleted) { + if (!item || item.deleted) { callbackResult(cb, null); } else { - callbackResult(cb, clone(flag)); + callbackResult(cb, clone(item)); } } else { - cb(null); + callbackResult(cb, null); } } - store.all = function(cb) { + store.all = function(kind, cb) { var results = {}; + var items = this.allData[kind.namespace] || {}; - for (var key in this.flags) { - if (this.flags.hasOwnProperty(key)) { - var flag = this.flags[key]; - if (flag && !flag.deleted) { - results[key] = clone(flag); + for (var key in items) { + if (Object.hasOwnProperty.call(items, key)) { + var item = items[key]; + if (item && !item.deleted) { + results[key] = clone(item); } } } @@ -39,36 +43,46 @@ function InMemoryFeatureStore() { callbackResult(cb, results); } - store.init = function(flags, cb) { - this.flags = flags; + store.init = function(allData, cb) { + this.allData = allData; this.init_called = true; callbackResult(cb); } - store.delete = function(key, version, cb) { + store.delete = function(kind, key, version, cb) { + var items = this.allData[kind.namespace]; + if (!items) { + items = {}; + this.allData[kind] = items; + } var deletedItem = { version: version, deleted: true }; - if (this.flags.hasOwnProperty(key)) { - var old = this.flags[key]; - if (old && old.version < version) { - this.flags[key] = deletedItem; + if (Object.hasOwnProperty.call(items, key)) { + var old = items[key]; + if (!old || old.version < version) { + items[key] = deletedItem; } } else { - this.flags[key] = deletedItem; + items[key] = deletedItem; } callbackResult(cb); } - store.upsert = function(key, flag, cb) { - var old = this.flags[key]; + store.upsert = function(kind, item, cb) { + var key = item.key; + var items = this.allData[kind.namespace]; + if (!items) { + items = {}; + this.allData[kind] = items; + } - if (this.flags.hasOwnProperty(key)) { - var old = this.flags[key]; - if (old && old.version < flag.version) { - this.flags[key] = flag; + if (Object.hasOwnProperty.call(items, key)) { + var old = items[key]; + if (old && old.version < item.version) { + items[key] = item; } } else { - this.flags[key] = flag; + items[key] = item; } callbackResult(cb); @@ -91,4 +105,4 @@ function clone(obj) { return JSON.parse(JSON.stringify(obj)); } -module.exports = InMemoryFeatureStore; \ No newline at end of file +module.exports = InMemoryFeatureStore; diff --git a/feature_store_event_wrapper.js b/feature_store_event_wrapper.js index d33f88f..087a021 100644 --- a/feature_store_event_wrapper.js +++ b/feature_store_event_wrapper.js @@ -1,3 +1,5 @@ +var dataKind = require('./versioned_data_kind'); + function FeatureStoreEventWrapper(featureStore, emitter) { function differ(key, oldValue, newValue) { if(newValue && oldValue && newValue.version < oldValue.version) return; @@ -13,10 +15,11 @@ function FeatureStoreEventWrapper(featureStore, emitter) { initialized: featureStore.initialized.bind(featureStore), close: featureStore.close.bind(featureStore), - init: function(newFlags, callback) { - featureStore.all(function(oldFlags){ - featureStore.init(newFlags, function(){ + init: function(newData, callback) { + featureStore.all(dataKind, function(oldFlags){ + featureStore.init(newData, function(){ var allFlags = {}; + var newFlags = newData[dataKind.features] || {}; Object.assign(allFlags, oldFlags, newFlags); var handledFlags = {}; @@ -31,19 +34,23 @@ function FeatureStoreEventWrapper(featureStore, emitter) { }); }, - delete: function(key, version, callback) { - featureStore.get(key, function(oldFlag) { - featureStore.delete(key, version, function() { - differ(key, oldFlag, {}); + delete: function(kind, key, version, callback) { + featureStore.get(kind, key, function(oldFlag) { + featureStore.delete(kind, key, version, function() { + if (kind === dataKind.features) { + differ(key, oldFlag, {}); + } callback && callback.apply(null, arguments); }); }); }, - upsert: function(key, newFlag, callback) { - featureStore.get(key, function(oldFlag) { - featureStore.upsert(key, newFlag, function() { - differ(key, oldFlag, newFlag); + upsert: function(kind, newItem, callback) { + featureStore.get(kind, newItem.key, function(oldItem) { + featureStore.upsert(kind, newItem, function() { + if (kind === dataKind.features) { + differ(oldItem ? oldItem.key : null, oldItem, newItem); + } callback && callback.apply(null, arguments); }); }); diff --git a/index.js b/index.js index 83b4510..a470053 100644 --- a/index.js +++ b/index.js @@ -15,6 +15,7 @@ var async = require('async'); var errors = require('./errors'); var package_json = require('./package.json'); var wrapPromiseCallback = require('./utils/wrapPromiseCallback'); +var dataKind = require('./versioned_data_kind'); function createErrorReporter(emitter, logger) { return function(error) { @@ -171,7 +172,7 @@ var new_client = function(sdk_key, config) { } function variationInternal(key, user, default_val, resolve, reject) { - config.feature_store.get(key, function(flag) { + config.feature_store.get(dataKind.features, key, function(flag) { evaluate.evaluate(flag, user, config.feature_store, function(err, result, events) { var i; var version = flag ? flag.version : null; @@ -215,7 +216,7 @@ var new_client = function(sdk_key, config) { return resolve({}); } - config.feature_store.all(function(flags) { + config.feature_store.all(dataKind.features, function(flags) { async.forEachOf(flags, function(flag, key, iteratee_cb) { // At the moment, we don't send any events here evaluate.evaluate(flag, user, config.feature_store, function(err, result, events) { diff --git a/package.json b/package.json index c293b9a..2ac4ca3 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "ldclient-node", - "version": "3.4.0", + "version": "4.0.0", "description": "LaunchDarkly SDK for Node.js", "main": "index.js", "scripts": { diff --git a/polling.js b/polling.js index a608c4b..b15ad8c 100644 --- a/polling.js +++ b/polling.js @@ -1,8 +1,10 @@ var errors = require('./errors'); +var dataKind = require('./versioned_data_kind'); function PollingProcessor(config, requestor) { var processor = {}, - store = config.feature_store, + featureStore = config.feature_store, + segmentStore = config.segment_store, stopped = false; function poll(cb) { @@ -16,7 +18,7 @@ function PollingProcessor(config, requestor) { start_time = new Date().getTime(); config.logger.debug("Polling LaunchDarkly for feature flag updates"); - requestor.request_all_flags(function(err, flags) { + requestor.request_all_data(function(err, resp) { elapsed = new Date().getTime() - start_time; sleepFor = Math.max(config.poll_interval * 1000 - elapsed, 0); config.logger.debug("Elapsed: %d ms, sleeping for %d ms", elapsed, sleepFor); @@ -29,7 +31,11 @@ function PollingProcessor(config, requestor) { setTimeout(function() { poll(cb); }, sleepFor); } } else { - store.init(JSON.parse(flags), function() { + var allData = JSON.parse(resp); + var initData = {}; + initData[dataKind.features.namespace] = allData.flags; + initData[dataKind.segments.namespace] = allData.segments; + featureStore.init(initData, function() { cb(); // Recursively call poll after the appropriate delay setTimeout(function() { poll(cb); }, sleepFor); diff --git a/redis_feature_store.js b/redis_feature_store.js index 2220a99..03fac3b 100644 --- a/redis_feature_store.js +++ b/redis_feature_store.js @@ -1,15 +1,17 @@ var redis = require('redis'), NodeCache = require( "node-cache" ), - winston = require('winston'); + winston = require('winston'), + dataKind = require('./versioned_data_kind'); var noop = function(){}; function RedisFeatureStore(redis_opts, cache_ttl, prefix, logger) { + var client = redis.createClient(redis_opts), store = {}, - features_key = prefix ? prefix + ":features" : "launchdarkly:features", + items_prefix = (prefix || "launchdarkly") + ":", cache = cache_ttl ? new NodeCache({ stdTTL: cache_ttl}) : null, inited = false, checked_init = false; @@ -27,55 +29,64 @@ function RedisFeatureStore(redis_opts, cache_ttl, prefix, logger) { // socket is active client.unref(); + function items_key(kind) { + return items_prefix + kind.namespace; + } + + function cache_key(kind, key) { + return kind.namespace + ":" + key; + } + // A helper that performs a get with the redis client - function do_get(key, cb) { - var flag; + function do_get(kind, key, cb) { + var item; cb = cb || noop; if (cache_ttl) { - flag = cache.get(key); - if (flag) { - cb(flag); + item = cache.get(cache_key(kind, key)); + if (item) { + cb(item); return; } } - client.hget(features_key, key, function(err, obj) { + client.hget(items_key(kind), key, function(err, obj) { if (err) { - logger.error("Error fetching flag from redis", err); + logger.error("Error fetching key " + key + " from redis in '" + kind.namespace + "'", err); cb(null); } else { - flag = JSON.parse(obj); - cb( (!flag || flag.deleted) ? null : flag); + item = JSON.parse(obj); + cb(item); } }); } - store.get = function(key, cb) { - do_get(key, function(flag) { - if (flag && !flag.deleted) { - cb(flag); + store.get = function(kind, key, cb) { + cb = cb || noop; + do_get(kind, key, function(item) { + if (item && !item.deleted) { + cb(item); } else { cb(null); } }); }; - store.all = function(cb) { + store.all = function(kind, cb) { cb = cb || noop; - client.hgetall(features_key, function(err, obj) { + client.hgetall(items_key(kind), function(err, obj) { if (err) { - logger.error("Error fetching flag from redis", err); + logger.error("Error fetching '" + kind.namespace + "'' from redis", err); cb(null); } else { var results = {}, - flags = obj; + items = obj; - for (var key in flags) { - if (Object.hasOwnProperty.call(flags,key)) { - var flag = JSON.parse(flags[key]); - if (!flag.deleted) { - results[key] = flag; + for (var key in items) { + if (Object.hasOwnProperty.call(items,key)) { + var item = JSON.parse(items[key]); + if (!item.deleted) { + results[key] = item; } } } @@ -84,31 +95,36 @@ function RedisFeatureStore(redis_opts, cache_ttl, prefix, logger) { }); }; - store.init = function(flags, cb) { - var stringified = {}; + store.init = function(allData, cb) { var multi = client.multi(); cb = cb || noop; - multi.del(features_key); if (cache_ttl) { cache.flushAll(); } - - for (var key in flags) { - if (Object.hasOwnProperty.call(flags,key)) { - stringified[key] = JSON.stringify(flags[key]); - } - if (cache_ttl) { - cache.set(key, flags[key]); + for (var kindNamespace in allData) { + if (Object.hasOwnProperty.call(allData, kindNamespace)) { + var kind = dataKind[kindNamespace]; + var baseKey = items_key(kind); + var items = allData[kindNamespace]; + var stringified = {}; + multi.del(baseKey); + for (var key in items) { + if (Object.hasOwnProperty.call(items, key)) { + stringified[key] = JSON.stringify(items[key]); + } + if (cache_ttl) { + cache.set(cache_key(kind, key), items[key]); + } + } + multi.hmset(baseKey, stringified); } } - multi.hmset(features_key, stringified); - multi.exec(function(err, replies) { if (err) { - logger.error("Error initializing redis feature store", err); + logger.error("Error initializing redis store", err); } else { inited = true; } @@ -116,25 +132,25 @@ function RedisFeatureStore(redis_opts, cache_ttl, prefix, logger) { }); }; - store.delete = function(key, version, cb) { + store.delete = function(kind, key, version, cb) { var multi; + var baseKey = items_key(kind); cb = cb || noop; - client.watch(features_key); + client.watch(baseKey); multi = client.multi(); - do_get(key, function(flag) { - if (flag && flag.version >= version) { + do_get(kind, key, function(item) { + if (item && item.version >= version) { multi.discard(); cb(); - return; } else { - var deletedItem = { version: version, deleted: true }; - multi.hset(features_key, key, JSON.stringify(deletedItem)); + deletedItem = { version: version, deleted: true }; + multi.hset(baseKey, key, JSON.stringify(deletedItem)); multi.exec(function(err, replies) { if (err) { - logger.error("Error deleting feature flag", err); + logger.error("Error deleting key " + key + " in '" + kind.namespace + "'", err); } else if (cache_ttl) { - cache.set(key, deletedItem); + cache.set(cache_key(kind, key), deletedItem); } cb(); }); @@ -142,25 +158,27 @@ function RedisFeatureStore(redis_opts, cache_ttl, prefix, logger) { }); }; - store.upsert = function(key, flag, cb) { + store.upsert = function(kind, item, cb) { var multi; + var baseKey = items_key(kind); + var key = item.key; cb = cb || noop; - client.watch(features_key); + client.watch(baseKey); multi = client.multi(); - do_get(key, function(original) { - if (original && original.version >= flag.version) { + do_get(kind, key, function(original) { + if (original && original.version >= item.version) { cb(); return; } - multi.hset(features_key, key, JSON.stringify(flag)); + multi.hset(baseKey, key, JSON.stringify(item)); multi.exec(function(err, replies) { if (err) { - logger.error("Error upserting feature flag", err); + logger.error("Error upserting key " + key + " in '" + kind.namespace + "'", err); } else { if (cache_ttl) { - cache.set(key, flag); + cache.set(cache_key(kind, key), item); } } cb(); @@ -181,7 +199,8 @@ function RedisFeatureStore(redis_opts, cache_ttl, prefix, logger) { cb(false); } else { - client.exists(features_key, function(err, obj) { + var inited = false; + client.exists(items_key(dataKind.features), function(err, obj) { if (!err && obj) { inited = true; } diff --git a/requestor.js b/requestor.js index ef46235..107d065 100644 --- a/requestor.js +++ b/requestor.js @@ -1,6 +1,6 @@ var ETagRequest = require('request-etag'); /** - * Creates a new Requestor object, which handles remote requests to fetch feature flags for LaunchDarkly. + * Creates a new Requestor object, which handles remote requests to fetch feature flags or segments for LaunchDarkly. * This is never called synchronously when requesting a feature flag for a user (e.g. via the toggle) call. * * It will be called once per second in polling mode (i.e. when streaming is disabled), or for extremely large @@ -65,16 +65,16 @@ function Requestor(sdk_key, config) { } } - requestor.request_flag = function(key, cb) { - var req = make_request('/sdk/latest-flags/' + key); + requestor.request_object = function(kind, key, cb) { + var req = make_request(kind.requestPath + key); req( process_response(cb), process_error_response(cb) ); - } + } - requestor.request_all_flags = function(cb) { - var req = make_request('/sdk/latest-flags'); + requestor.request_all_data = function(cb) { + var req = make_request('/sdk/latest-all'); req( process_response(cb), process_error_response(cb) diff --git a/streaming.js b/streaming.js index e2eb502..979c9f6 100644 --- a/streaming.js +++ b/streaming.js @@ -1,15 +1,20 @@ var errors = require('./errors'); var EventSource = require('./eventsource'); +var dataKind = require('./versioned_data_kind'); function StreamProcessor(sdk_key, config, requestor) { var processor = {}, - store = config.feature_store, + featureStore = config.feature_store, es; + function getKeyFromPath(kind, path) { + return path.startsWith(kind.streamApiPath) ? path.substring(kind.streamApiPath.length) : null; + } + processor.start = function(fn) { var cb = fn || function(){}; - es = new EventSource(config.stream_uri + "/flags", + es = new EventSource(config.stream_uri + "/all", { agent: config.proxy_agent, headers: {'Authorization': sdk_key,'User-Agent': config.user_agent} @@ -22,10 +27,13 @@ function StreamProcessor(sdk_key, config, requestor) { es.addEventListener('put', function(e) { config.logger.debug('Received put event'); if (e && e.data) { - var flags = JSON.parse(e.data); - store.init(flags, function() { + var all = JSON.parse(e.data); + var initData = {}; + initData[dataKind.features.namespace] = all.data.flags; + initData[dataKind.segments.namespace] = all.data.segments; + featureStore.init(initData, function() { cb(); - }) + }); } else { cb(new errors.LDStreamingError('Unexpected payload from event stream')); } @@ -35,7 +43,15 @@ function StreamProcessor(sdk_key, config, requestor) { config.logger.debug('Received patch event'); if (e && e.data) { var patch = JSON.parse(e.data); - store.upsert(patch.data.key, patch.data); + for (var k in dataKind) { + var kind = dataKind[k]; + var key = getKeyFromPath(kind, patch.path); + if (key != null) { + config.logger.debug('Updating ' + key + ' in ' + kind.namespace); + featureStore.upsert(kind, patch.data); + break; + } + } } else { cb(new errors.LDStreamingError('Unexpected payload from event stream')); } @@ -45,10 +61,16 @@ function StreamProcessor(sdk_key, config, requestor) { config.logger.debug('Received delete event'); if (e && e.data) { var data = JSON.parse(e.data), - key = data.path.charAt(0) === '/' ? data.path.substring(1) : data.path, // trim leading '/' version = data.version; - - store.delete(key, version); + for (var k in dataKind) { + var kind = dataKind[k]; + var key = getKeyFromPath(kind, data.path); + if (key != null) { + config.logger.debug('Deleting ' + key + ' in ' + kind.namespace); + featureStore.delete(kind, key, version); + break; + } + } } else { cb(new errors.LDStreamingError('Unexpected payload from event stream')); } @@ -56,13 +78,17 @@ function StreamProcessor(sdk_key, config, requestor) { es.addEventListener('indirect/put', function(e) { config.logger.debug('Received indirect put event') - requestor.request_all_flags(function (err, flags) { + requestor.request_all_flags(function (err, resp) { if (err) { cb(err); } else { - store.init(JSON.parse(flags), function() { + var all = JSON.parse(resp); + var initData = {}; + initData[dataKind.features.namespace] = all.flags; + initData[dataKind.segments.namespace] = all.segments; + featureStore.init(initData, function() { cb(); - }) + }); } }) }); @@ -70,14 +96,22 @@ function StreamProcessor(sdk_key, config, requestor) { es.addEventListener('indirect/patch', function(e) { config.logger.debug('Received indirect patch event') if (e && e.data) { - var key = e.data.charAt(0) === '/' ? e.data.substring(1) : e.data; - requestor.request_flag(key, function(err, flag) { - if (err) { - cb(new errors.LDStreamingError('Unexpected error requesting feature flag')); - } else { - store.upsert(key, JSON.parse(flag)); + var path = e.data; + for (var k in dataKind) { + var kind = dataKind[k]; + var key = getKeyFromPath(kind, path); + if (key != null) { + requestor.request_object(kind, key, function(err, resp) { + if (err) { + cb(new errors.LDStreamingError('Unexpected error requesting ' + key + ' in ' + kind.namespace)); + } else { + config.logger.debug('Updating ' + key + ' in ' + kind.namespace); + featureStore.upsert(kind, JSON.parse(resp)); + } + }); + break; } - }) + } } else { cb(new errors.LDStreamingError('Unexpected payload from event stream')); } diff --git a/test/evaluate_flag-test.js b/test/evaluate_flag-test.js index b770750..acd3d1d 100644 --- a/test/evaluate_flag-test.js +++ b/test/evaluate_flag-test.js @@ -1,4 +1,233 @@ -var evaluate = require('../evaluate_flag.js'); +var evaluate = require('../evaluate_flag'); +var InMemoryFeatureStore = require('../feature_store'); +var dataKind = require('../versioned_data_kind'); + +var featureStore = new InMemoryFeatureStore(); + +function defineSegment(segment, cb) { + var data = {}; + data[dataKind.segments.namespace] = {}; + data[dataKind.segments.namespace][segment.key] = segment; + featureStore.init(data); + setTimeout(cb, 0); +} + +function evalBooleanFlag(flag, user, cb) { + evaluate.evaluate(flag, user, featureStore, function(err, result) { + cb(result); + }); +} + +function makeFlagWithSegmentMatch(segment) { + return { + key: 'flagKey', + version: 1, + on: true, + prerequisites: [], + salt: "", + targets: [], + rules: [ + { + clauses: [ + { + attribute: "", + op: "segmentMatch", + values: [ segment.key ] + } + ], + variation: 1 + } + ], + fallthrough: { + variation: 0 + }, + variations: [ false, true ] + }; +} + +describe('evaluate', function() { + + it('matches segment with explicitly included user', function(done) { + var segment = { + key: 'test', + included: [ 'foo' ], + version: 1 + }; + defineSegment(segment, function() { + var flag = makeFlagWithSegmentMatch(segment); + var user = { key: 'foo' }; + evaluate.evaluate(flag, user, featureStore, function(err, result) { + expect(result).toBe(true); + done(); + }); + }); + }); + + it('does not match segment with explicitly excluded user', function(done) { + var segment = { + key: 'test', + excluded: [ 'foo' ], + version: 1 + }; + defineSegment(segment, function() { + var flag = makeFlagWithSegmentMatch(segment); + var user = { key: 'foo' }; + evaluate.evaluate(flag, user, featureStore, function(err, result) { + expect(result).toBe(false); + done(); + }); + }); + }); + + it('does not match segment with unknown user', function(done) { + var segment = { + key: 'test', + included: [ 'foo' ], + version: 1 + }; + defineSegment(segment, function() { + var flag = makeFlagWithSegmentMatch(segment); + var user = { key: 'bar' }; + evaluate.evaluate(flag, user, featureStore, function(err, result) { + expect(result).toBe(false); + done(); + }); + }); + }); + + it('matches segment with user who is both included and excluded', function(done) { + var segment = { + key: 'test', + included: [ 'foo' ], + excluded: [ 'foo' ], + version: 1 + }; + defineSegment(segment, function() { + var flag = makeFlagWithSegmentMatch(segment); + var user = { key: 'foo' }; + evaluate.evaluate(flag, user, featureStore, function(err, result) { + expect(result).toBe(true); + done(); + }); + }); + }); + + it('matches segment with rule with full rollout', function(done) { + var segment = { + key: 'test', + rules: [ + { + clauses: [ + { + attribute: 'email', + op: 'in', + values: [ 'test@example.com' ] + } + ], + weight: 100000 + } + ], + version: 1 + }; + defineSegment(segment, function() { + var flag = makeFlagWithSegmentMatch(segment); + var user = { key: 'foo', email: 'test@example.com' }; + evaluate.evaluate(flag, user, featureStore, function(err, result) { + expect(result).toBe(true); + done(); + }); + }); + }); + + it('does not match segment with rule with zero rollout', function(done) { + var segment = { + key: 'test', + rules: [ + { + clauses: [ + { + attribute: 'email', + op: 'in', + values: [ 'test@example.com' ] + } + ], + weight: 0 + } + ], + version: 1 + }; + defineSegment(segment, function() { + var flag = makeFlagWithSegmentMatch(segment); + var user = { key: 'foo', email: 'test@example.com' }; + evaluate.evaluate(flag, user, featureStore, function(err, result) { + expect(result).toBe(false); + done(); + }); + }); + }); + + it('matches segment with multiple matching clauses', function(done) { + var segment = { + key: 'test', + rules: [ + { + clauses: [ + { + attribute: 'email', + op: 'in', + values: [ 'test@example.com' ] + }, + { + attribute: 'name', + op: 'in', + values: [ 'bob' ] + } + ] + } + ], + version: 1 + }; + defineSegment(segment, function() { + var flag = makeFlagWithSegmentMatch(segment); + var user = { key: 'foo', email: 'test@example.com', name: 'bob' }; + evaluate.evaluate(flag, user, featureStore, function(err, result) { + expect(result).toBe(true); + done(); + }); + }); + }); + + it('does not match segment if one clause does not match', function(done) { + var segment = { + key: 'test', + rules: [ + { + clauses: [ + { + attribute: 'email', + op: 'in', + values: [ 'test@example.com' ] + }, + { + attribute: 'name', + op: 'in', + values: [ 'bill' ] + } + ] + } + ], + version: 1 + }; + defineSegment(segment, function() { + var flag = makeFlagWithSegmentMatch(segment); + var user = { key: 'foo', email: 'test@example.com', name: 'bob' }; + evaluate.evaluate(flag, user, featureStore, function(err, result) { + expect(result).toBe(false); + done(); + }); + }); + }); +}); describe('bucket_user', function() { it('gets expected bucket values for specific keys', function() { diff --git a/test/feature_store_test_base.js b/test/feature_store_test_base.js index 49e2021..981da51 100644 --- a/test/feature_store_test_base.js +++ b/test/feature_store_test_base.js @@ -1,3 +1,4 @@ +var dataKind = require('../versioned_data_kind'); function allFeatureStoreTests(makeStore) { var feature1 = { @@ -11,7 +12,8 @@ function allFeatureStoreTests(makeStore) { function initedStore(cb) { var store = makeStore(); - var initData = { + var initData = {}; + initData[dataKind.features.namespace] = { 'foo': feature1, 'bar': feature2 }; @@ -31,7 +33,7 @@ function allFeatureStoreTests(makeStore) { it('gets existing feature', function(done) { initedStore(function(store) { - store.get(feature1.key, function(result) { + store.get(dataKind.features, feature1.key, function(result) { expect(result).toEqual(feature1); done(); }); @@ -40,7 +42,7 @@ function allFeatureStoreTests(makeStore) { it('does not get nonexisting feature', function(done) { initedStore(function(store) { - store.get('biz', function(result) { + store.get(dataKind.features, 'biz', function(result) { expect(result).toBe(null); done(); }); @@ -49,7 +51,7 @@ function allFeatureStoreTests(makeStore) { it('gets all features', function(done) { initedStore(function(store) { - store.all(function(result) { + store.all(dataKind.features, function(result) { expect(result).toEqual({ 'foo': feature1, 'bar': feature2 @@ -62,8 +64,8 @@ function allFeatureStoreTests(makeStore) { it('upserts with newer version', function(done) { var newVer = { key: feature1.key, version: feature1.version + 1 }; initedStore(function(store) { - store.upsert(feature1.key, newVer, function(result) { - store.get(feature1.key, function(result) { + store.upsert(dataKind.features, newVer, function(result) { + store.get(dataKind.features, feature1.key, function(result) { expect(result).toEqual(newVer); done(); }); @@ -74,8 +76,8 @@ function allFeatureStoreTests(makeStore) { it('does not upsert with older version', function(done) { var oldVer = { key: feature1.key, version: feature1.version - 1 }; initedStore(function(store) { - store.upsert(feature1.key, oldVer, function(result) { - store.get(feature1.key, function(result) { + store.upsert(dataKind.features, oldVer, function(result) { + store.get(dataKind.features, feature1.key, function(result) { expect(result).toEqual(feature1); done(); }); @@ -86,8 +88,8 @@ function allFeatureStoreTests(makeStore) { it('upserts new feature', function(done) { var newFeature = { key: 'biz', version: 99 }; initedStore(function(store) { - store.upsert(newFeature.key, newFeature, function(result) { - store.get(newFeature.key, function(result) { + store.upsert(dataKind.features, newFeature, function(result) { + store.get(dataKind.features, newFeature.key, function(result) { expect(result).toEqual(newFeature); done(); }); @@ -97,8 +99,8 @@ function allFeatureStoreTests(makeStore) { it('deletes with newer version', function(done) { initedStore(function(store) { - store.delete(feature1.key, feature1.version + 1, function(result) { - store.get(feature1.key, function(result) { + store.delete(dataKind.features, feature1.key, feature1.version + 1, function(result) { + store.get(dataKind.features, feature1.key, function(result) { expect(result).toBe(null); done(); }); @@ -108,8 +110,8 @@ function allFeatureStoreTests(makeStore) { it('does not delete with older version', function(done) { initedStore(function(store) { - store.delete(feature1.key, feature1.version - 1, function(result) { - store.get(feature1.key, function(result) { + store.delete(dataKind.features, feature1.key, feature1.version - 1, function(result) { + store.get(dataKind.features, feature1.key, function(result) { expect(result).not.toBe(null); done(); }); @@ -119,8 +121,8 @@ function allFeatureStoreTests(makeStore) { it('allows deleting unknown feature', function(done) { initedStore(function(store) { - store.delete('biz', 99, function(result) { - store.get('biz', function(result) { + store.delete(dataKind.features, 'biz', 99, function(result) { + store.get(dataKind.features, 'biz', function(result) { expect(result).toBe(null); done(); }); @@ -130,9 +132,9 @@ function allFeatureStoreTests(makeStore) { it('does not upsert older version after delete', function(done) { initedStore(function(store) { - store.delete(feature1.key, feature1.version + 1, function(result) { - store.upsert(feature1.key, feature1, function(result) { - store.get(feature1.key, function(result) { + store.delete(dataKind.features, feature1.key, feature1.version + 1, function(result) { + store.upsert(dataKind.features, feature1, function(result) { + store.get(dataKind.features, feature1.key, function(result) { expect(result).toBe(null); done(); }); diff --git a/versioned_data_kind.js b/versioned_data_kind.js new file mode 100644 index 0000000..21834ca --- /dev/null +++ b/versioned_data_kind.js @@ -0,0 +1,27 @@ + +/* + These objects denote the types of data that can be stored in the feature store and + referenced in the API. If we add another storable data type in the future, as long as it + follows the same pattern (having "key", "version", and "deleted" properties), we only need + to add a corresponding constant here and the existing store should be able to handle it. + + Note, for things to work correctly, the "namespace" property must match the key used in + module.exports. +*/ + +var features = { + namespace: 'features', + streamApiPath: '/flags/', + requestPath: '/sdk/latest-flags/' +}; + +var segments = { + namespace: 'segments', + streamApiPath: '/segments/', + requestPath: '/sdk/latest-segments/' +}; + +module.exports = { + features: features, + segments: segments +};