diff --git a/.circleci/config.yml b/.circleci/config.yml index 100dee9..feade2a 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -1,36 +1,45 @@ -version: 2 +version: 2.1 workflows: - version: 2 - test: + workflow: jobs: - - oldest-long-term-support-release - - current-release - -node-template: &node-template - steps: - - checkout - - run: echo "Node version:" `node --version` - - run: npm install - - run: - command: npm test - environment: - JEST_JUNIT_OUTPUT: "reports/junit/js-test-results.xml" - - run: npm run check-typescript - - store_test_results: - path: reports/junit - - store_artifacts: - path: reports/junit + - build-and-test: + name: latest Node version + run-lint: true + - build-and-test: + name: oldest supported Node version + docker-image: circleci/node:6 + run-lint: false jobs: - oldest-long-term-support-release: - <<: *node-template - docker: - - image: circleci/node:6 - - image: redis - - current-release: - <<: *node-template + build-and-test: + parameters: + run-lint: + type: boolean + default: false + docker-image: + type: string + default: circleci/node:latest docker: - - image: circleci/node:latest + - image: <> - image: redis + steps: + - checkout + - run: echo "Node version:" `node --version` + - run: npm install + - run: + command: npm test + environment: + JEST_JUNIT_OUTPUT: "reports/junit/js-test-results.xml" + - run: npm run check-typescript + - when: + condition: <> + steps: + - run: npm run lint + - run: + name: dependency audit + command: ./scripts/better-audit.sh + - store_test_results: + path: reports/junit + - store_artifacts: + path: reports/junit diff --git a/.eslintignore b/.eslintignore new file mode 100644 index 0000000..53d8bff --- /dev/null +++ b/.eslintignore @@ -0,0 +1,5 @@ +node_modules/ +docs/ +test-types.js +test/ +test.js diff --git a/.eslintrc.js b/.eslintrc.js new file mode 100644 index 0000000..a8b69ba --- /dev/null +++ b/.eslintrc.js @@ -0,0 +1,26 @@ +module.exports = { + "env": { + "node": true, + "es6": true, + "jest": true + }, + "extends": "eslint:recommended", + "rules": { + "indent": [ + "error", + 2 + ], + "linebreak-style": [ + "error", + "unix" + ], + "quotes": [ + "error", + "single" + ], + "semi": [ + "error", + "always" + ] + } +}; diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 24424dd..4f32d88 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -39,3 +39,13 @@ To verify that the TypeScript declarations compile correctly (this involves comp ``` npm run check-typescript ``` + +### Auditing package dependencies + +The `npm audit` tool compares all dependencies and transitive dependencies to a database of package versions with known vulnerabilities. However, the output of this tool includes both runtime and development dependencies. + +Runtime dependencies can affect applications using the SDK; they can only be fixed by updating one of the explicit dependencies in `package.json`. Development dependencies cannot affect applications, but will still cause `npm audit` to flag the project; they can be fixed by running `npm audit fix` to add overrides for transitive dependencies in `package-lock.json`. + +It is important _not_ to run `npm audit fix` if there are any bad _runtime_ dependencies, because it will hide the problem in our own build, without actually fixing the vulnerability when an application uses the SDK. + +The script `scripts/better-audit.sh`, which is run in the CI build and can also be run manually, processes the output of `npm audit` to eliminate all duplicate entries and then determines whether each entry is coming from a runtime dependency or a development dependency. If there are any runtime ones, it terminates with an error code so the build will fail. diff --git a/caching_store_wrapper.js b/caching_store_wrapper.js index 88541e1..d7eff5e 100644 --- a/caching_store_wrapper.js +++ b/caching_store_wrapper.js @@ -1,16 +1,16 @@ -var NodeCache = require('node-cache'), - dataKind = require('./versioned_data_kind'), - UpdateQueue = require('./update_queue'); +const NodeCache = require('node-cache'), + dataKind = require('./versioned_data_kind'), + UpdateQueue = require('./update_queue'); function cacheKey(kind, key) { - return kind.namespace + ":" + key; + return kind.namespace + ':' + key; } function allCacheKey(kind) { - return "$all:" + kind.namespace; + return '$all:' + kind.namespace; } -var initializedKey = "$checkedInit"; +const initializedKey = '$checkedInit'; /* CachingStoreWrapper provides commonly needed functionality for implementations of an @@ -61,18 +61,18 @@ var initializedKey = "$checkedInit"; all of the items provided. */ function CachingStoreWrapper(underlyingStore, ttl) { - var cache = ttl ? new NodeCache({ stdTTL: ttl }) : null; - var queue = new UpdateQueue(); - var initialized = false; + const cache = ttl ? new NodeCache({ stdTTL: ttl }) : null; + const queue = new UpdateQueue(); + let initialized = false; this.underlyingStore = underlyingStore; - this.init = function(allData, cb) { - queue.enqueue(function(cb) { + this.init = (allData, cb) => { + queue.enqueue(cb => { // The underlying store can either implement initInternal, which receives unordered data, // or initOrderedInternal, which receives ordered data (for implementations that cannot do // an atomic update and therefore need to be told what order to do the operations in). - var afterInit = function() { + const afterInit = () => { initialized = true; if (cache) { @@ -80,11 +80,11 @@ function CachingStoreWrapper(underlyingStore, ttl) { cache.flushAll(); // populate cache with initial data - Object.keys(allData).forEach(function(kindNamespace) { - var kind = dataKind[kindNamespace]; - var items = allData[kindNamespace]; + Object.keys(allData).forEach(kindNamespace => { + const kind = dataKind[kindNamespace]; + const items = allData[kindNamespace]; cache.set(allCacheKey(kind), items); - Object.keys(items).forEach(function(key) { + Object.keys(items).forEach(key => { cache.set(cacheKey(kind, key), items[key]); }); }); @@ -94,7 +94,7 @@ function CachingStoreWrapper(underlyingStore, ttl) { }; if (underlyingStore.initOrderedInternal) { - var orderedData = sortAllCollections(allData); + const orderedData = sortAllCollections(allData); underlyingStore.initOrderedInternal(orderedData, afterInit); } else { underlyingStore.initInternal(allData, afterInit); @@ -102,13 +102,13 @@ function CachingStoreWrapper(underlyingStore, ttl) { }, [], cb); }; - this.initialized = function(cb) { + this.initialized = cb => { if (initialized) { cb(true); } else if (cache && cache.get(initializedKey)) { cb(false); } else { - underlyingStore.initializedInternal(function(inited) { + underlyingStore.initializedInternal(inited => { initialized = inited; if (!initialized) { cache && cache.set(initializedKey, true); @@ -118,21 +118,21 @@ function CachingStoreWrapper(underlyingStore, ttl) { } }; - this.all = function(kind, cb) { - var items = cache && cache.get(allCacheKey(kind)); + this.all = (kind, cb) => { + const items = cache && cache.get(allCacheKey(kind)); if (items) { cb(items); return; } - underlyingStore.getAllInternal(kind, function(items) { + underlyingStore.getAllInternal(kind, items => { if (items === null || items === undefined) { cb(items); return; } - var filteredItems = {}; - Object.keys(items).forEach(function(key) { - var item = items[key]; + const filteredItems = {}; + Object.keys(items).forEach(key => { + const item = items[key]; if (item && !item.deleted) { filteredItems[key] = item; } @@ -142,16 +142,16 @@ function CachingStoreWrapper(underlyingStore, ttl) { }); }; - this.get = function(kind, key, cb) { + this.get = (kind, key, cb) => { if (cache) { - var item = cache.get(cacheKey(kind, key)); + const item = cache.get(cacheKey(kind, key)); if (item !== undefined) { cb(itemOnlyIfNotDeleted(item)); return; } } - underlyingStore.getInternal(kind, key, function(item) { + underlyingStore.getInternal(kind, key, item => { cache && cache.set(cacheKey(kind, key), item); cb(itemOnlyIfNotDeleted(item)); }); @@ -161,10 +161,10 @@ function CachingStoreWrapper(underlyingStore, ttl) { return (!item || item.deleted) ? null : item; } - this.upsert = function(kind, newItem, cb) { - queue.enqueue(function (cb) { + this.upsert = (kind, newItem, cb) => { + queue.enqueue(cb => { flushAllCaches(); - underlyingStore.upsertInternal(kind, newItem, function(err, updatedItem) { + underlyingStore.upsertInternal(kind, newItem, (err, updatedItem) => { if (!err) { cache && cache.set(cacheKey(kind, newItem.key), updatedItem); } @@ -173,11 +173,11 @@ function CachingStoreWrapper(underlyingStore, ttl) { }, [], cb); }; - this.delete = function(kind, key, version, cb) { + this.delete = (kind, key, version, cb) => { this.upsert(kind, { key: key, version: version, deleted: true }, cb); }; - this.close = function() { + this.close = () => { cache && cache.close(); underlyingStore.close(); }; @@ -186,7 +186,7 @@ function CachingStoreWrapper(underlyingStore, ttl) { if (!cache) { return; } - for (var kindNamespace in dataKind) { + for (let kindNamespace in dataKind) { cache.del(allCacheKey(dataKind[kindNamespace])); } } @@ -194,29 +194,25 @@ function CachingStoreWrapper(underlyingStore, ttl) { // This and the next function are used by init() to provide the best ordering of items // to write the underlying store, if the store supports the initOrderedInternal method. function sortAllCollections(dataMap) { - var result = []; + const result = []; Object.keys(dataMap).forEach(function(kindNamespace) { - var kind = dataKind[kindNamespace]; + const kind = dataKind[kindNamespace]; result.push({ kind: kind, items: sortCollection(kind, dataMap[kindNamespace]) }); }); - var kindPriority = function(kind) { - return kind.priority === undefined ? kind.namespace.length : kind.priority - }; - result.sort(function(i1, i2) { - return kindPriority(i1.kind) - kindPriority(i2.kind); - }); + const kindPriority = kind => kind.priority === undefined ? kind.namespace.length : kind.priority; + result.sort((i1, i2) => kindPriority(i1.kind) - kindPriority(i2.kind)); return result; } function sortCollection(kind, itemsMap) { - var itemsOut = []; - var remainingItems = new Set(Object.keys(itemsMap)); - var addWithDependenciesFirst = function(key) { + const itemsOut = []; + const remainingItems = new Set(Object.keys(itemsMap)); + const addWithDependenciesFirst = key => { if (remainingItems.has(key)) { remainingItems.delete(key); - var item = itemsMap[key]; + const item = itemsMap[key]; if (kind.getDependencyKeys) { - kind.getDependencyKeys(item).forEach(function(prereqKey) { + kind.getDependencyKeys(item).forEach(prereqKey => { addWithDependenciesFirst(prereqKey); }); } @@ -225,7 +221,7 @@ function CachingStoreWrapper(underlyingStore, ttl) { }; while (remainingItems.size > 0) { // pick a random item that hasn't been updated yet - var key = remainingItems.values().next().value; + const key = remainingItems.values().next().value; addWithDependenciesFirst(key); } return itemsOut; @@ -233,4 +229,3 @@ function CachingStoreWrapper(underlyingStore, ttl) { } module.exports = CachingStoreWrapper; - diff --git a/configuration.js b/configuration.js index 6ab6ad8..459fb2a 100644 --- a/configuration.js +++ b/configuration.js @@ -1,10 +1,10 @@ -var winston = require('winston'); -var InMemoryFeatureStore = require('./feature_store'); -var messages = require('./messages'); -var package_json = require('./package.json'); +const winston = require('winston'); +const InMemoryFeatureStore = require('./feature_store'); +const messages = require('./messages'); +const package_json = require('./package.json'); module.exports = (function() { - var defaults = function() { + const defaults = function() { return { baseUri: 'https://app.launchdarkly.com', streamUri: 'https://stream.launchdarkly.com', @@ -25,7 +25,7 @@ module.exports = (function() { }; }; - var deprecatedOptions = { + const deprecatedOptions = { base_uri: 'baseUri', stream_uri: 'streamUri', events_uri: 'eventsUri', @@ -44,7 +44,7 @@ module.exports = (function() { function checkDeprecatedOptions(config) { Object.keys(deprecatedOptions).forEach(function(oldName) { if (config[oldName] !== undefined) { - var newName = deprecatedOptions[oldName]; + const newName = deprecatedOptions[oldName]; config.logger.warn(messages.deprecated(oldName, newName)); if (config[newName] === undefined) { config[newName] = config[oldName]; @@ -57,7 +57,7 @@ module.exports = (function() { function applyDefaults(config, defaults) { // This works differently from Object.assign() in that it will *not* override a default value // if the provided value is explicitly set to null. - var ret = Object.assign({}, config); + const ret = Object.assign({}, config); Object.keys(defaults).forEach(function(name) { if (ret[name] === undefined || ret[name] === null) { ret[name] = defaults[name]; @@ -67,11 +67,11 @@ module.exports = (function() { } function canonicalizeUri(uri) { - return uri.replace(/\/+$/, ""); + return uri.replace(/\/+$/, ''); } function validate(options) { - var config = Object.assign({}, options || {}); + let config = Object.assign({}, options || {}); config.userAgent = 'NodeJSClient/' + package_json.version; config.logger = (config.logger || diff --git a/errors.js b/errors.js index 7112be3..6dfeda1 100644 --- a/errors.js +++ b/errors.js @@ -23,4 +23,4 @@ exports.isHttpErrorRecoverable = function(status) { return status === 400 || status === 408 || status === 429; } return true; -} +}; diff --git a/evaluate_flag.js b/evaluate_flag.js index 2a731cf..37ce427 100644 --- a/evaluate_flag.js +++ b/evaluate_flag.js @@ -1,16 +1,16 @@ -var operators = require('./operators'); -var dataKind = require('./versioned_data_kind'); -var util = require('util'); -var sha1 = require('node-sha1'); -var async = require('async'); -var stringifyAttrs = require('./utils/stringifyAttrs'); - -var builtins = ['key', 'ip', 'country', 'email', 'firstName', 'lastName', 'avatar', 'name', 'anonymous']; -var userAttrsToStringifyForEvaluation = [ "key", "secondary" ]; +const operators = require('./operators'); +const dataKind = require('./versioned_data_kind'); +const util = require('util'); +const sha1 = require('node-sha1'); +const stringifyAttrs = require('./utils/stringifyAttrs'); +const { safeAsyncEachSeries } = require('./utils/asyncUtils'); + +const builtins = ['key', 'ip', 'country', 'email', 'firstName', 'lastName', 'avatar', 'name', 'anonymous']; +const userAttrsToStringifyForEvaluation = [ 'key', 'secondary' ]; // Currently we are not stringifying the rest of the built-in attributes prior to evaluation, only for events. // This is because it could affect evaluation results for existing users (ch35206). -var noop = function(){}; +const noop = () => {}; // Callback receives (err, detail, events) where detail has the properties "value", "variationIndex", and "reason"; // detail will never be null even if there's an error. @@ -26,9 +26,9 @@ function evaluate(flag, user, featureStore, eventFactory, cb) { return; } - var sanitizedUser = stringifyAttrs(user, userAttrsToStringifyForEvaluation); - var events = []; - evalInternal(flag, sanitizedUser, featureStore, events, eventFactory, function(err, detail) { + const sanitizedUser = stringifyAttrs(user, userAttrsToStringifyForEvaluation); + const events = []; + evalInternal(flag, sanitizedUser, featureStore, events, eventFactory, (err, detail) => { cb(err, detail, events); }); } @@ -36,13 +36,11 @@ function evaluate(flag, user, featureStore, eventFactory, cb) { function evalInternal(flag, user, featureStore, events, eventFactory, cb) { // If flag is off, return the off variation if (!flag.on) { - getOffResult(flag, { kind: 'OFF' }, function(err, detail) { - cb(err, detail); - }); + getOffResult(flag, { kind: 'OFF' }, cb); return; } - checkPrerequisites(flag, user, featureStore, events, eventFactory, function(err, failureReason) { + checkPrerequisites(flag, user, featureStore, events, eventFactory, (err, failureReason) => { if (err != null || failureReason != null) { getOffResult(flag, failureReason, cb); } else { @@ -53,17 +51,17 @@ function evalInternal(flag, user, featureStore, events, eventFactory, cb) { // Callback receives (err, reason) where reason is null if successful, or a "prerequisite failed" reason function checkPrerequisites(flag, user, featureStore, events, eventFactory, cb) { - if (flag.prerequisites) { - async.mapSeries(flag.prerequisites, - function(prereq, callback) { - featureStore.get(dataKind.features, prereq.key, function(prereqFlag) { + if (flag.prerequisites && flag.prerequisites.length) { + safeAsyncEachSeries(flag.prerequisites, + (prereq, callback) => { + featureStore.get(dataKind.features, prereq.key, prereqFlag => { // If the flag does not exist in the store or is not on, the prerequisite // is not satisfied if (!prereqFlag) { - callback({ key: prereq.key, err: new Error("Could not retrieve prerequisite feature flag \"" + prereq.key + "\"") }); + callback({ key: prereq.key, err: new Error('Could not retrieve prerequisite feature flag "' + prereq.key + '"') }); return; } - evalInternal(prereqFlag, user, featureStore, events, eventFactory, function(err, detail) { + evalInternal(prereqFlag, user, featureStore, events, eventFactory, (err, detail) => { // If there was an error, the value is null, the variation index is out of range, // or the value does not match the indexed variation the prerequisite is not satisfied events.push(eventFactory.newEvalEvent(prereqFlag, user, detail, null, flag)); @@ -80,7 +78,7 @@ function checkPrerequisites(flag, user, featureStore, events, eventFactory, cb) }); }); }, - function(errInfo) { + errInfo => { if (errInfo) { cb(errInfo.err, { 'kind': 'PREREQUISITE_FAILED', 'prerequisiteKey': errInfo.key }); } else { @@ -94,19 +92,15 @@ function checkPrerequisites(flag, user, featureStore, events, eventFactory, cb) // Callback receives (err, detail) function evalRules(flag, user, featureStore, cb) { - var i, j; - var target; - var variation; - var rule; // Check target matches - for (i = 0; i < (flag.targets || []).length; i++) { - target = flag.targets[i]; + for (let i = 0; i < (flag.targets || []).length; i++) { + const target = flag.targets[i]; if (!target.values) { continue; } - for (j = 0; j < target.values.length; j++) { + for (let j = 0; j < target.values.length; j++) { if (user.key === target.values[j]) { getVariation(flag, target.variation, { kind: 'TARGET_MATCH' }, cb); return; @@ -114,19 +108,21 @@ function evalRules(flag, user, featureStore, cb) { } } - async.mapSeries(flag.rules || [], - function(rule, callback) { - ruleMatchUser(rule, user, featureStore, function(matched) { - setImmediate(callback, matched ? rule : null, null); + safeAsyncEachSeries(flag.rules, + (rule, callback) => { + ruleMatchUser(rule, user, featureStore, matched => { + // We raise an "error" on the first rule that *does* match, to stop evaluating more rules + callback(matched ? rule : null); }); }, - function(err, results) { + // The following function executes once all of the rules have been checked + err => { // we use the "error" value to indicate that a rule was successfully matched (since we only care - // about the first match, and mapSeries terminates on the first "error") + // about the first match, and eachSeries terminates on the first "error") if (err) { - var rule = err; - var reason = { kind: 'RULE_MATCH', ruleId: rule.id }; - for (var i = 0; i < flag.rules.length; i++) { + let rule = err; + const reason = { kind: 'RULE_MATCH', ruleId: rule.id }; + for (let i = 0; i < flag.rules.length; i++) { if (flag.rules[i].id === rule.id) { reason.ruleIndex = i; break; @@ -142,21 +138,20 @@ function evalRules(flag, user, featureStore, cb) { } function ruleMatchUser(r, user, featureStore, cb) { - var i; - if (!r.clauses) { - return false; + cb(false); + return; } - // A rule matches if all its clauses match - async.mapSeries(r.clauses, - function(clause, callback) { - clauseMatchUser(clause, user, featureStore, function(matched) { + // A rule matches if all its clauses match. + safeAsyncEachSeries(r.clauses, + (clause, callback) => { + clauseMatchUser(clause, user, featureStore, matched => { // on the first clause that does *not* match, we raise an "error" to stop the loop - setImmediate(callback, matched ? null : clause, null); + callback(matched ? null : clause); }); }, - function(err, results) { + err => { cb(!err); } ); @@ -164,18 +159,19 @@ function ruleMatchUser(r, user, featureStore, cb) { function clauseMatchUser(c, user, featureStore, cb) { if (c.op == 'segmentMatch') { - async.mapSeries(c.values, - function(value, callback) { - featureStore.get(dataKind.segments, value, function(segment) { + safeAsyncEachSeries(c.values, + (value, callback) => { + featureStore.get(dataKind.segments, value, segment => { if (segment && segmentMatchUser(segment, user)) { // on the first segment that matches, we raise an "error" to stop the loop - callback(segment, null); + callback(segment); } else { - callback(null, null); + callback(null); } }); }, - function(err, results) { + // The following function executes once all of the clauses have been checked + err => { // an "error" indicates that a segment *did* match cb(maybeNegate(c, !!err)); } @@ -186,21 +182,17 @@ function clauseMatchUser(c, user, featureStore, cb) { } function clauseMatchUserNoSegments(c, user) { - var uValue; - var matchFn; - var i; - - uValue = userValue(user, c.attribute); + const uValue = userValue(user, c.attribute); if (uValue === null || uValue === undefined) { return false; } - matchFn = operators.fn(c.op) + const matchFn = operators.fn(c.op); // The user's value is an array if (Array === uValue.constructor) { - for (i = 0; i < uValue.length; i++) { + for (let i = 0; i < uValue.length; i++) { if (matchAny(matchFn, uValue[i], c.values)) { return maybeNegate(c, true); } @@ -219,7 +211,7 @@ function segmentMatchUser(segment, user) { if ((segment.excluded || []).indexOf(user.key) >= 0) { return false; } - for (var i = 0; i < (segment.rules || []).length; i++) { + for (let i = 0; i < (segment.rules || []).length; i++) { if (segmentRuleMatchUser(segment.rules[i], user, segment.key, segment.salt)) { return true; } @@ -229,7 +221,7 @@ function segmentMatchUser(segment, user) { } function segmentRuleMatchUser(rule, user, segmentKey, salt) { - for (var i = 0; i < (rule.clauses || []).length; i++) { + for (let i = 0; i < (rule.clauses || []).length; i++) { if (!clauseMatchUserNoSegments(rule.clauses[i], user)) { return false; } @@ -241,8 +233,8 @@ function segmentRuleMatchUser(rule, user, segmentKey, salt) { } // All of the clauses are met. See if the user buckets in - var bucket = bucketUser(user, segmentKey, rule.bucketBy || "key", salt); - var weight = rule.weight / 100000.0; + const bucket = bucketUser(user, segmentKey, rule.bucketBy || 'key', salt); + const weight = rule.weight / 100000.0; return bucket < weight; } @@ -255,9 +247,7 @@ function maybeNegate(c, b) { } function matchAny(matchFn, value, values) { - var i = 0; - - for (i = 0; i < values.length; i++) { + for (let i = 0; i < values.length; i++) { if (matchFn(value, values[i])) { return true; } @@ -286,7 +276,7 @@ function getResultForVariationOrRollout(r, user, flag, reason, cb) { if (!r) { cb(new Error('Fallthrough variation undefined'), errorResult('MALFORMED_FLAG')); } else { - var index = variationForUser(r, user, flag); + const index = variationForUser(r, user, flag); if (index === null) { cb(new Error('Variation/rollout object with no variation or rollout'), errorResult('MALFORMED_FLAG')); } else { @@ -302,21 +292,17 @@ function errorResult(errorKind) { // Given a variation or rollout 'r', select // the variation for the given user function variationForUser(r, user, flag) { - var bucketBy; - var bucket; - var sum = 0; - var i; - var variation; if (r.variation != null) { // This represets a fixed variation; return it return r.variation; } else if (r.rollout != null) { // This represents a percentage rollout. Assume // we're rolling out by key - bucketBy = r.rollout.bucketBy != null ? r.rollout.bucketBy : "key"; - bucket = bucketUser(user, flag.key, bucketBy, flag.salt); - for (i = 0; i < r.rollout.variations.length; i++) { - var variate = r.rollout.variations[i]; + const bucketBy = r.rollout.bucketBy != null ? r.rollout.bucketBy : 'key'; + const bucket = bucketUser(user, flag.key, bucketBy, flag.salt); + let sum = 0; + for (let i = 0; i < r.rollout.variations.length; i++) { + const variate = r.rollout.variations[i]; sum += variate.weight / 100000.0; if (bucket < sum) { return variate.variation; @@ -330,10 +316,10 @@ function variationForUser(r, user, flag) { // Fetch an attribute value from a user object. Automatically // navigates into the custom array when necessary function userValue(user, attr) { - if (builtins.indexOf(attr) >= 0 && user.hasOwnProperty(attr)) { + if (builtins.indexOf(attr) >= 0 && Object.hasOwnProperty.call(user, attr)) { return user[attr]; } - if (user.custom && user.custom.hasOwnProperty(attr)) { + if (user.custom && Object.hasOwnProperty.call(user.custom, attr)) { return user.custom[attr]; } return null; @@ -341,21 +327,18 @@ function userValue(user, attr) { // Compute a percentile for a user function bucketUser(user, key, attr, salt) { - var uValue; - var idHash; - - idHash = bucketableStringValue(userValue(user, attr)); + let idHash = bucketableStringValue(userValue(user, attr)); if (idHash === null) { return 0; } if (user.secondary) { - idHash += "." + user.secondary; + idHash += '.' + user.secondary; } - var hashKey = util.format("%s.%s.%s", key, salt, idHash); - var hashVal = parseInt(sha1(hashKey).substring(0,15), 16); + const hashKey = util.format('%s.%s.%s', key, salt, idHash); + const hashVal = parseInt(sha1(hashKey).substring(0,15), 16); return hashVal / 0xFFFFFFFFFFFFFFF; } diff --git a/event_factory.js b/event_factory.js index ad5cb39..78389a7 100644 --- a/event_factory.js +++ b/event_factory.js @@ -1,28 +1,28 @@ function EventFactory(withReasons) { - var ef = {}; + const ef = {}; function isExperiment(flag, reason) { if (reason) { switch (reason.kind) { - case 'RULE_MATCH': - var index = reason.ruleIndex; - if (index !== undefined) { - var rules = flag.rules || []; - return index >= 0 && index < rules.length && !!rules[index].trackEvents; - } - break; - case 'FALLTHROUGH': - return !!flag.trackEventsFallthrough; - break; + case 'RULE_MATCH': { + const index = reason.ruleIndex; + if (index !== undefined) { + const rules = flag.rules || []; + return index >= 0 && index < rules.length && !!rules[index].trackEvents; + } + break; + } + case 'FALLTHROUGH': + return !!flag.trackEventsFallthrough; } } return false; } - ef.newEvalEvent = function(flag, user, detail, defaultVal, prereqOfFlag) { - var addExperimentData = isExperiment(flag, detail.reason); - var e = { + ef.newEvalEvent = (flag, user, detail, defaultVal, prereqOfFlag) => { + const addExperimentData = isExperiment(flag, detail.reason); + const e = { kind: 'feature', creationDate: new Date().getTime(), key: flag.key, @@ -48,8 +48,8 @@ function EventFactory(withReasons) { return e; }; - ef.newDefaultEvent = function(flag, user, detail) { - var e = { + ef.newDefaultEvent = (flag, user, detail) => { + const e = { kind: 'feature', creationDate: new Date().getTime(), key: flag.key, @@ -71,8 +71,8 @@ function EventFactory(withReasons) { return e; }; - ef.newUnknownFlagEvent = function(key, user, detail) { - var e = { + ef.newUnknownFlagEvent = (key, user, detail) => { + const e = { kind: 'feature', creationDate: new Date().getTime(), key: key, @@ -86,7 +86,7 @@ function EventFactory(withReasons) { return e; }; - ef.newIdentifyEvent = function(user) { + ef.newIdentifyEvent = user => { return { kind: 'identify', creationDate: new Date().getTime(), @@ -95,8 +95,8 @@ function EventFactory(withReasons) { }; }; - ef.newCustomEvent = function(eventName, user, data, metricValue) { - var e = { + ef.newCustomEvent = (eventName, user, data, metricValue) => { + const e = { kind: 'custom', creationDate: new Date().getTime(), key: eventName, diff --git a/event_processor.js b/event_processor.js index e53473b..89e8951 100644 --- a/event_processor.js +++ b/event_processor.js @@ -1,26 +1,27 @@ -var LRUCache = require('lrucache'); -var request = require('request'); -var EventSummarizer = require('./event_summarizer'); -var UserFilter = require('./user_filter'); -var errors = require('./errors'); -var messages = require('./messages'); -var stringifyAttrs = require('./utils/stringifyAttrs'); -var wrapPromiseCallback = require('./utils/wrapPromiseCallback'); +const LRUCache = require('lrucache'); +const request = require('request'); +const EventSummarizer = require('./event_summarizer'); +const UserFilter = require('./user_filter'); +const errors = require('./errors'); +const messages = require('./messages'); +const stringifyAttrs = require('./utils/stringifyAttrs'); +const wrapPromiseCallback = require('./utils/wrapPromiseCallback'); -var userAttrsToStringifyForEvents = [ "key", "secondary", "ip", "country", "email", "firstName", "lastName", "avatar", "name" ]; +const userAttrsToStringifyForEvents = [ 'key', 'secondary', 'ip', 'country', 'email', 'firstName', 'lastName', 'avatar', 'name' ]; function EventProcessor(sdkKey, config, errorReporter) { - var ep = {}; + const ep = {}; - var userFilter = UserFilter(config), - summarizer = EventSummarizer(config), - userKeysCache = LRUCache(config.userKeysCapacity), - queue = [], - lastKnownPastTime = 0, - exceededCapacity = false, - shutdown = false, - flushTimer, - flushUsersTimer; + const userFilter = UserFilter(config), + summarizer = EventSummarizer(config), + userKeysCache = LRUCache(config.userKeysCapacity); + + let queue = [], + lastKnownPastTime = 0, + exceededCapacity = false, + shutdown = false, + flushTimer, + flushUsersTimer; function enqueue(event) { if (queue.length < config.capacity) { @@ -29,7 +30,7 @@ function EventProcessor(sdkKey, config, errorReporter) { } else { if (!exceededCapacity) { exceededCapacity = true; - config.logger.warn("Exceeded event queue capacity. Increase capacity to avoid dropping events."); + config.logger.warn('Exceeded event queue capacity. Increase capacity to avoid dropping events.'); } } } @@ -46,63 +47,65 @@ function EventProcessor(sdkKey, config, errorReporter) { function makeOutputEvent(event) { switch (event.kind) { - case 'feature': - var debug = !!event.debug; - var out = { - kind: debug ? 'debug' : 'feature', - creationDate: event.creationDate, - key: event.key, - value: event.value, - default: event.default, - prereqOf: event.prereqOf - }; - if (event.variation !== undefined && event.variation !== null) { - out.variation = event.variation; - } - if (event.version) { - out.version = event.version; - } - if (event.reason) { - out.reason = event.reason; - } - if (config.inlineUsersInEvents || debug) { - out.user = processUser(event); - } else { - out.userKey = getUserKey(event); - } - return out; - case 'identify': - return { - kind: 'identify', - creationDate: event.creationDate, - key: getUserKey(event), - user: processUser(event) - }; - case 'custom': - var out = { - kind: 'custom', - creationDate: event.creationDate, - key: event.key - }; - if (config.inlineUsersInEvents) { - out.user = processUser(event); - } else { - out.userKey = getUserKey(event); - } - if (event.data !== null && event.data !== undefined) { - out.data = event.data; - } - if (event.metricValue !== null && event.metricValue !== undefined) { - out.metricValue = event.metricValue; - } - return out; - default: - return event; + case 'feature': { + const debug = !!event.debug; + const out = { + kind: debug ? 'debug' : 'feature', + creationDate: event.creationDate, + key: event.key, + value: event.value, + default: event.default, + prereqOf: event.prereqOf + }; + if (event.variation !== undefined && event.variation !== null) { + out.variation = event.variation; + } + if (event.version) { + out.version = event.version; + } + if (event.reason) { + out.reason = event.reason; + } + if (config.inlineUsersInEvents || debug) { + out.user = processUser(event); + } else { + out.userKey = getUserKey(event); + } + return out; + } + case 'identify': + return { + kind: 'identify', + creationDate: event.creationDate, + key: getUserKey(event), + user: processUser(event) + }; + case 'custom': { + const out = { + kind: 'custom', + creationDate: event.creationDate, + key: event.key + }; + if (config.inlineUsersInEvents) { + out.user = processUser(event); + } else { + out.userKey = getUserKey(event); + } + if (event.data !== null && event.data !== undefined) { + out.data = event.data; + } + if (event.metricValue !== null && event.metricValue !== undefined) { + out.metricValue = event.metricValue; + } + return out; + } + default: + return event; } } function processUser(event) { - var filtered = userFilter.filterUser(event.user); + const filtered = userFilter.filterUser(event.user); return stringifyAttrs(filtered, userAttrsToStringifyForEvents); } @@ -110,10 +113,10 @@ function EventProcessor(sdkKey, config, errorReporter) { return event.user && String(event.user.key); } - ep.sendEvent = function(event) { - var addIndexEvent = false, - addFullEvent = false, - addDebugEvent = false; + ep.sendEvent = event => { + let addIndexEvent = false, + addFullEvent = false, + addDebugEvent = false; if (shutdown) { return; @@ -153,18 +156,18 @@ function EventProcessor(sdkKey, config, errorReporter) { enqueue(makeOutputEvent(event)); } if (addDebugEvent) { - var debugEvent = Object.assign({}, event, { debug: true }); + const debugEvent = Object.assign({}, event, { debug: true }); enqueue(makeOutputEvent(debugEvent)); } - } + }; ep.flush = function(callback) { return wrapPromiseCallback(new Promise(function(resolve, reject) { - var worklist; - var summary; + let worklist; + let summary; if (shutdown) { - var err = new errors.LDInvalidSDKKeyError("Events cannot be posted because SDK key is invalid"); + const err = new errors.LDInvalidSDKKeyError('Events cannot be posted because SDK key is invalid'); reject(err); return; } @@ -183,25 +186,25 @@ function EventProcessor(sdkKey, config, errorReporter) { return; } - config.logger.debug("Flushing %d events", worklist.length); + config.logger.debug('Flushing %d events', worklist.length); tryPostingEvents(worklist, resolve, reject, true); - }.bind(this)), callback); - } + }), callback); + }; function tryPostingEvents(events, resolve, reject, canRetry) { - var retryOrReject = function(err) { + const retryOrReject = err => { if (canRetry) { - config.logger && config.logger.warn("Will retry posting events after 1 second"); - setTimeout(function() { + config.logger && config.logger.warn('Will retry posting events after 1 second'); + setTimeout(() => { tryPostingEvents(events, resolve, reject, false); }, 1000); } else { reject(err); } - } + }; - var options = Object.assign({}, config.tlsParams, { + const options = Object.assign({}, config.tlsParams, { method: 'POST', url: config.eventsUri + '/bulk', headers: { @@ -214,15 +217,15 @@ function EventProcessor(sdkKey, config, errorReporter) { timeout: config.timeout * 1000, agent: config.proxyAgent }); - request(options).on('response', function(resp, body) { + request(options).on('response', (resp, body) => { if (resp.headers['date']) { - var date = Date.parse(resp.headers['date']); + const date = Date.parse(resp.headers['date']); if (date) { lastKnownPastTime = date; } } if (resp.statusCode > 204) { - var err = new errors.LDUnexpectedResponseError(messages.httpErrorMessage(resp.statusCode, 'event posting', 'some events were dropped')); + const err = new errors.LDUnexpectedResponseError(messages.httpErrorMessage(resp.statusCode, 'event posting', 'some events were dropped')); errorReporter && errorReporter(err); if (!errors.isHttpErrorRecoverable(resp.statusCode)) { reject(err); @@ -238,17 +241,18 @@ function EventProcessor(sdkKey, config, errorReporter) { }); } - ep.close = function() { + ep.close = () => { clearInterval(flushTimer); clearInterval(flushUsersTimer); - } + }; + + flushTimer = setInterval(() => { + ep.flush().then(() => {} , () => {}); + }, config.flushInterval * 1000); - flushTimer = setInterval(function() { - ep.flush().then(function() { } , function() { }); - }, config.flushInterval * 1000); - flushUsersTimer = setInterval(function() { - userKeysCache.removeAll(); - }, config.userKeysFlushInterval * 1000); + flushUsersTimer = setInterval(() => { + userKeysCache.removeAll(); + }, config.userKeysFlushInterval * 1000); return ep; } diff --git a/event_summarizer.js b/event_summarizer.js index c1a7bc4..7ef783b 100644 --- a/event_summarizer.js +++ b/event_summarizer.js @@ -1,19 +1,19 @@ -function EventSummarizer(config) { - var es = {}; +function EventSummarizer() { + const es = {}; - var startDate = 0, - endDate = 0, - counters = {}; + let startDate = 0, + endDate = 0, + counters = {}; - es.summarizeEvent = function(event) { + es.summarizeEvent = event => { if (event.kind === 'feature') { - var counterKey = event.key + + const counterKey = event.key + ':' + ((event.variation !== null && event.variation !== undefined) ? event.variation : '') + ':' + ((event.version !== null && event.version !== undefined) ? event.version : ''); - var counterVal = counters[counterKey]; + const counterVal = counters[counterKey]; if (counterVal) { counterVal.count = counterVal.count + 1; } else { @@ -33,13 +33,13 @@ function EventSummarizer(config) { endDate = event.creationDate; } } - } + }; - es.getSummary = function() { - var flagsOut = {}; - for (var i in counters) { - var c = counters[i]; - var flag = flagsOut[c.key]; + es.getSummary = () => { + const flagsOut = {}; + for (let i in counters) { + const c = counters[i]; + let flag = flagsOut[c.key]; if (!flag) { flag = { default: c.default, @@ -47,7 +47,7 @@ function EventSummarizer(config) { }; flagsOut[c.key] = flag; } - var counterOut = { + const counterOut = { value: c.value, count: c.count }; @@ -66,13 +66,13 @@ function EventSummarizer(config) { endDate: endDate, features: flagsOut }; - } + }; - es.clearSummary = function() { + es.clearSummary = () => { startDate = 0; endDate = 0; counters = {}; - } + }; return es; } diff --git a/eventsource.js b/eventsource.js index 42c6b92..d0d55f5 100644 --- a/eventsource.js +++ b/eventsource.js @@ -311,7 +311,7 @@ function Event(type, optionalProperties) { Object.defineProperty(this, 'type', { writable: false, value: type, enumerable: true }); if (optionalProperties) { for (var f in optionalProperties) { - if (optionalProperties.hasOwnProperty(f)) { + if (Object.hasOwnProperty.call(optionalProperties, f)) { Object.defineProperty(this, f, { writable: false, value: optionalProperties[f], enumerable: true }); } } @@ -327,7 +327,7 @@ function Event(type, optionalProperties) { function MessageEvent(type, eventInitDict) { Object.defineProperty(this, 'type', { writable: false, value: type, enumerable: true }); for (var f in eventInitDict) { - if (eventInitDict.hasOwnProperty(f)) { + if (Object.hasOwnProperty.call(eventInitDict, f)) { Object.defineProperty(this, f, { writable: false, value: eventInitDict[f], enumerable: true }); } } diff --git a/feature_store.js b/feature_store.js index 083928e..01a27be 100644 --- a/feature_store.js +++ b/feature_store.js @@ -1,4 +1,3 @@ -var dataKind = require('./versioned_data_kind'); // The default in-memory implementation of a feature store, which holds feature flags and // other related data received from LaunchDarkly. @@ -12,61 +11,69 @@ var dataKind = require('./versioned_data_kind'); // // Additional implementations should use CachingStoreWrapper if possible. -var noop = function(){}; +// Note that the contract for feature store methods does *not* require callbacks to be deferred +// with setImmediate, process.nextTick, etc. It is both allowed and desirable to call them +// directly whenever possible (i.e. if we don't actually have to do any I/O), since otherwise +// feature flag retrieval is a major performance bottleneck. These methods are for internal use +// by the SDK, and the SDK does not make any assumptions about whether a callback executes +// before or after the next statement. + function InMemoryFeatureStore() { - var store = {allData:{}}; + let allData = {}; + let initCalled = false; + + const store = {}; function callbackResult(cb, result) { - cb = cb || noop; - setTimeout(function() { cb(result); }, 0); // ensure this is dispatched asynchronously + cb && cb(result); } - store.get = function(kind, key, cb) { - var items = this.allData[kind.namespace] || {}; + store.get = (kind, key, cb) => { + const items = allData[kind.namespace] || {}; if (Object.hasOwnProperty.call(items, key)) { - var item = items[key]; + const item = items[key]; if (!item || item.deleted) { callbackResult(cb, null); } else { - callbackResult(cb, clone(item)); + callbackResult(cb, item); } } else { callbackResult(cb, null); } - } + }; - store.all = function(kind, cb) { - var results = {}; - var items = this.allData[kind.namespace] || {}; + store.all = (kind, cb) => { + const results = {}; + const items = allData[kind.namespace] || {}; - for (var key in items) { + for (let key in items) { if (Object.hasOwnProperty.call(items, key)) { - var item = items[key]; + const item = items[key]; if (item && !item.deleted) { - results[key] = clone(item); + results[key] = item; } } } callbackResult(cb, results); - } + }; - store.init = function(allData, cb) { - this.allData = allData; - this.initCalled = true; + store.init = (newData, cb) => { + allData = newData; + initCalled = true; callbackResult(cb); - } + }; - store.delete = function(kind, key, version, cb) { - var items = this.allData[kind.namespace]; + store.delete = (kind, key, version, cb) => { + let items = allData[kind.namespace]; if (!items) { items = {}; - this.allData[kind] = items; + allData[kind] = items; } - var deletedItem = { version: version, deleted: true }; + const deletedItem = { version: version, deleted: true }; if (Object.hasOwnProperty.call(items, key)) { - var old = items[key]; + const old = items[key]; if (!old || old.version < version) { items[key] = deletedItem; } @@ -75,35 +82,35 @@ function InMemoryFeatureStore() { } callbackResult(cb); - } + }; - store.upsert = function(kind, item, cb) { - var key = item.key; - var items = this.allData[kind.namespace]; + store.upsert = (kind, item, cb) => { + const key = item.key; + let items = allData[kind.namespace]; if (!items) { items = {}; - this.allData[kind.namespace] = items; + allData[kind.namespace] = items; } if (Object.hasOwnProperty.call(items, key)) { - var old = items[key]; + const old = items[key]; if (old && old.version < item.version) { - items[key] = item; + items[key] = clone(item); } } else { - items[key] = item; + items[key] = clone(item); } callbackResult(cb); - } + }; - store.initialized = function(cb) { - callbackResult(cb, this.initCalled === true); - } + store.initialized = cb => { + callbackResult(cb, initCalled === true); + }; - store.close = function() { + store.close = () => { // Close on the in-memory store is a no-op - } + }; return store; } diff --git a/feature_store_event_wrapper.js b/feature_store_event_wrapper.js index f081133..fdb931b 100644 --- a/feature_store_event_wrapper.js +++ b/feature_store_event_wrapper.js @@ -1,12 +1,150 @@ -var dataKind = require('./versioned_data_kind'); +const dataKind = require('./versioned_data_kind'); + +function NamespacedDataSet() { + let itemsByNamespace = {}; + + function get(namespace, key) { + const items = itemsByNamespace[namespace]; + return items && items[key]; + } + + function set(namespace, key, value) { + let items = itemsByNamespace[namespace]; + if (!items) { + items = {}; + itemsByNamespace[namespace] = items; + } + items[key] = value; + } + + function remove(namespace, key) { + const items = itemsByNamespace[namespace]; + if (items) { + delete items[key]; + } + } + + function removeAll() { + itemsByNamespace = {}; + } + + function enumerate(callback) { + for (let ns in itemsByNamespace) { + const items = itemsByNamespace[ns]; + const keys = Object.keys(items).sort(); // sort to make tests determinate + for (let i in keys) { + const key = keys[i]; + callback(ns, key, items[key]); + } + } + } + + function mergeFrom(otherSet) { + otherSet.enumerate(set); + } + + return { + get: get, + set: set, + remove: remove, + removeAll: removeAll, + enumerate: enumerate, + mergeFrom: mergeFrom, + toString: () => JSON.stringify(itemsByNamespace) + }; +} + +function DependencyTracker() { + const dependenciesFrom = NamespacedDataSet(); + const dependenciesTo = NamespacedDataSet(); + // dependenciesFrom: for a given flag/segment key, what are the flags/segments it relies on + // dependenciesTo: for a given flag/segment key, what are the flags/segments that rely on it + + function updateDependenciesFrom(namespace, key, newDependencySet) { + const oldDependencySet = dependenciesFrom.get(namespace, key); + oldDependencySet && oldDependencySet.enumerate((depNs, depKey) => { + const depsToThisDep = dependenciesTo.get(depNs, depKey); + depsToThisDep && depsToThisDep.remove(namespace, key); + }); + + dependenciesFrom.set(namespace, key, newDependencySet); + newDependencySet && newDependencySet.enumerate((depNs, depKey) => { + let depsToThisDep = dependenciesTo.get(depNs, depKey); + if (!depsToThisDep) { + depsToThisDep = NamespacedDataSet(); + dependenciesTo.set(depNs, depKey, depsToThisDep); + } + depsToThisDep.set(namespace, key, true); + }); + } + + function updateModifiedItems(inDependencySet, modifiedNamespace, modifiedKey) { + if (!inDependencySet.get(modifiedNamespace, modifiedKey)) { + inDependencySet.set(modifiedNamespace, modifiedKey, true); + const affectedItems = dependenciesTo.get(modifiedNamespace, modifiedKey); + affectedItems && affectedItems.enumerate((ns, key) => { + updateModifiedItems(inDependencySet, ns, key); + }); + } + } + + function reset() { + dependenciesFrom.removeAll(); + dependenciesTo.removeAll(); + } + + return { + updateDependenciesFrom: updateDependenciesFrom, + updateModifiedItems: updateModifiedItems, + reset: reset + }; +} function FeatureStoreEventWrapper(featureStore, emitter) { - function differ(key, oldValue, newValue) { - if(newValue && oldValue && newValue.version < oldValue.version) return; - setTimeout(function(){ - emitter.emit("update", newValue); - emitter.emit(`update:${key}`, oldValue, newValue); - }, 0); + const dependencyTracker = DependencyTracker(); + + function hasEventListeners() { + // Before we do something that could generate a change event, we'll check whether anyone is + // currently listening for such events. If they're not, then we can skip the whole "query the + // old data so we can compare it to the new data and see if there was a change" step, which + // could be expensive with a persistent feature store. + return emitter.eventNames().some(name => name === 'update' || name.substring(0, 7) === 'update:'); // Node 6 may not have startsWith() + } + + function addIfModified(namespace, key, oldValue, newValue, toDataSet) { + if (newValue && oldValue && newValue.version <= oldValue.version) return; + dependencyTracker.updateModifiedItems(toDataSet, namespace, key); + } + + function sendChangeEvents(dataSet) { + dataSet.enumerate((namespace, key) => { + if (namespace === dataKind.features.namespace) { + const arg = { key: key }; + setImmediate(() => { emitter.emit('update', arg); }); + setImmediate(() => { emitter.emit(`update:${key}`, arg); }); + } + }); + } + + function computeDependencies(kind, item) { + const ret = NamespacedDataSet(); + if (kind === dataKind.features) { + for (let i in item.prerequisites || []) { + ret.set(dataKind.features.namespace, item.prerequisites[i].key, true); + } + for (let i in item.rules || []) { + const rule = item.rules[i]; + for (let j in rule.clauses || []) { + const clause = rule.clauses[j]; + if (clause.op === 'segmentMatch') { + for (let k in clause.values) { + ret.set(dataKind.segments.namespace, clause.values[k], true); + } + } + } + } + } + return ret; } return { @@ -15,47 +153,96 @@ function FeatureStoreEventWrapper(featureStore, emitter) { initialized: featureStore.initialized.bind(featureStore), close: featureStore.close.bind(featureStore), - init: function(newData, callback) { - featureStore.all(dataKind.features, function(oldFlags){ - featureStore.init(newData, function(){ - var allFlags = {}; - var newFlags = newData[dataKind.features.namespace] || {}; - Object.assign(allFlags, oldFlags, newFlags); - var handledFlags = {}; - - for (var key in allFlags) { - if(handledFlags[key]) continue; - differ(key, oldFlags[key], newFlags[key]); - handledFlags[key] = true; + init: (newData, callback) => { + const checkForChanges = hasEventListeners(); + const doInit = oldData => { + featureStore.init(newData, () => { + dependencyTracker.reset(); + + for (let namespace in newData) { + const items = newData[namespace]; + const kind = dataKind[namespace]; + for (let key in items) { + const item = items[key]; + dependencyTracker.updateDependenciesFrom(namespace, key, computeDependencies(kind, item)); + } + } + + if (checkForChanges) { + const updatedItems = NamespacedDataSet(); + for (let namespace in newData) { + const oldDataForKind = oldData[namespace]; + const newDataForKind = newData[namespace]; + const mergedData = Object.assign({}, oldDataForKind, newDataForKind); + for (let key in mergedData) { + addIfModified(namespace, key, + oldDataForKind && oldDataForKind[key], + newDataForKind && newDataForKind[key], + updatedItems); + } + } + sendChangeEvents(updatedItems); } - callback && callback.apply(null, arguments); + callback && callback(); + }); + }; + + if (checkForChanges) { + featureStore.all(dataKind.features, oldFlags => { + featureStore.all(dataKind.segments, oldSegments => { + const oldData = {}; + oldData[dataKind.features.namespace] = oldFlags; + oldData[dataKind.segments.namespace] = oldSegments; + doInit(oldData); + }); }); - }); + } else { + doInit(); + } }, - delete: function(kind, key, version, callback) { - featureStore.get(kind, key, function(oldFlag) { - featureStore.delete(kind, key, version, function() { - if (kind === dataKind.features) { - differ(key, oldFlag, {}); + delete: (kind, key, version, callback) => { + const checkForChanges = hasEventListeners(); + const doDelete = oldItem => { + featureStore.delete(kind, key, version, () => { + dependencyTracker.updateDependenciesFrom(kind.namespace, key, null); + if (checkForChanges) { + const updatedItems = NamespacedDataSet(); + addIfModified(kind.namespace, key, oldItem, { version: version, deleted: true }, updatedItems); + sendChangeEvents(updatedItems); } - callback && callback.apply(null, arguments); + callback && callback(); }); - }); + }; + if (checkForChanges) { + featureStore.get(kind, key, doDelete); + } else { + doDelete(); + } }, - upsert: function(kind, newItem, callback) { - featureStore.get(kind, newItem.key, function(oldItem) { - featureStore.upsert(kind, newItem, function() { - if (kind === dataKind.features) { - differ(oldItem ? oldItem.key : null, oldItem, newItem); + upsert: (kind, newItem, callback) => { + const key = newItem.key; + const checkForChanges = hasEventListeners(); + const doUpsert = oldItem => { + featureStore.upsert(kind, newItem, () => { + dependencyTracker.updateDependenciesFrom(kind.namespace, key, computeDependencies(kind, newItem)); + if (checkForChanges) { + const updatedItems = NamespacedDataSet(); + addIfModified(kind.namespace, key, oldItem, newItem, updatedItems); + sendChangeEvents(updatedItems); } - callback && callback.apply(null, arguments); + callback && callback(); }); - }); + }; + if (checkForChanges) { + featureStore.get(kind, key, doUpsert); + } else { + doUpsert(); + } } - } + }; } -module.exports = FeatureStoreEventWrapper; \ No newline at end of file +module.exports = FeatureStoreEventWrapper; diff --git a/file_data_source.js b/file_data_source.js index 5493ecd..bf0de68 100644 --- a/file_data_source.js +++ b/file_data_source.js @@ -1,7 +1,7 @@ -var fs = require('fs'), - winston = require('winston'), - yaml = require('yaml'), - dataKind = require('./versioned_data_kind'); +const fs = require('fs'), + winston = require('winston'), + yaml = require('yaml'), + dataKind = require('./versioned_data_kind'); /* FileDataSource provides a way to use local files as a source of feature flag state, instead of @@ -10,16 +10,16 @@ var fs = require('fs'), See documentation in index.d.ts. */ function FileDataSource(options) { - var paths = (options && options.paths) || []; - var autoUpdate = !!options.autoUpdate; + const paths = (options && options.paths) || []; + const autoUpdate = !!options.autoUpdate; return config => { - var featureStore = config.featureStore; - var watchers = []; - var timestamps = {}; - var pendingUpdate = false; - var logger = options.logger || config.logger || defaultLogger(); - var inited = false; + const logger = options.logger || config.logger || defaultLogger(); + const featureStore = config.featureStore; + const timestamps = {}; + let watchers = []; + let pendingUpdate = false; + let inited = false; function defaultLogger() { return new winston.Logger({ @@ -45,8 +45,8 @@ function FileDataSource(options) { fs.readFile(path, 'utf8', (err, data) => err ? reject(err) : resolve(data)) ).then(data => { - var parsed = parseData(data) || {}; - var addItem = (kind, item) => { + const parsed = parseData(data) || {}; + const addItem = (kind, item) => { if (!allData[kind.namespace]) { allData[kind.namespace] = {}; } @@ -55,7 +55,7 @@ function FileDataSource(options) { } else { allData[kind.namespace][item.key] = item; } - } + }; Object.keys(parsed.flags || {}).forEach(key => { addItem(dataKind.features, parsed.flags[key]); }); @@ -75,9 +75,9 @@ function FileDataSource(options) { function loadAllPromise() { pendingUpdate = false; - var allData = {}; - var p = Promise.resolve(); - for (var i = 0; i < paths.length; i++) { + const allData = {}; + let p = Promise.resolve(); + for (let i = 0; i < paths.length; i++) { (path => { p = p.then(() => loadFilePromise(path, allData)) .catch(e => { @@ -114,7 +114,7 @@ function FileDataSource(options) { if (pendingUpdate) { return; // coalesce updates so we don't do multiple reloads if a whole set of files was just updated } - var reload = () => { + const reload = () => { loadAllPromise().then(() => { logger.warn('Reloaded flags from file data'); }).catch(() => {}); @@ -137,7 +137,7 @@ function FileDataSource(options) { function startWatching() { paths.forEach(path => { - var watcher = fs.watch(path, { persistent: false }, (event, filename) => { + const watcher = fs.watch(path, { persistent: false }, () => { maybeReloadForPath(path); }); watchers.push(watcher); @@ -149,10 +149,10 @@ function FileDataSource(options) { watchers = []; } - var fds = {}; + const fds = {}; fds.start = fn => { - var cb = fn || (() => {}); + const cb = fn || (() => {}); if (autoUpdate) { startWatching(); @@ -176,7 +176,7 @@ function FileDataSource(options) { }; return fds; - } + }; } module.exports = FileDataSource; diff --git a/flags_state.js b/flags_state.js index ba8b620..01913df 100644 --- a/flags_state.js +++ b/flags_state.js @@ -1,12 +1,12 @@ function FlagsStateBuilder(valid) { - var builder = {}; - var flagValues = {}; - var flagMetadata = {}; + const builder = {}; + const flagValues = {}; + const flagMetadata = {}; - builder.addFlag = function(flag, value, variation, reason, detailsOnlyIfTracked) { + builder.addFlag = (flag, value, variation, reason, detailsOnlyIfTracked) => { flagValues[flag.key] = value; - var meta = {}; + const meta = {}; if (!detailsOnlyIfTracked || flag.trackEvents || flag.debugEventsUntilDate) { meta.version = flag.version; if (reason) { @@ -25,19 +25,15 @@ function FlagsStateBuilder(valid) { flagMetadata[flag.key] = meta; }; - builder.build = function() { + builder.build = () => { return { valid: valid, - allValues: function() { return flagValues; }, - getFlagValue: function(key) { return flagValues[key]; }, - getFlagReason: function(key) { - return flagMetadata[key] ? flagMetadata[key].reason : null; - }, - toJSON: function() { - return Object.assign({}, flagValues, { $flagsState: flagMetadata, $valid: valid }); - } + allValues: () => flagValues, + getFlagValue: key => flagValues[key], + getFlagReason: key => flagMetadata[key] ? flagMetadata[key].reason : null, + toJSON: () => Object.assign({}, flagValues, { $flagsState: flagMetadata, $valid: valid }) }; - } + }; return builder; } diff --git a/index.d.ts b/index.d.ts index 229760c..2b46325 100644 --- a/index.d.ts +++ b/index.d.ts @@ -508,6 +508,12 @@ declare module 'launchdarkly-node-server-sdk' { * from LaunchDarkly. By default, it uses an in-memory implementation; there are also adapters * for Redis and other databases (see the [SDK Reference Guide](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store)). * You will not need to use this interface unless you are writing your own implementation. + * + * Feature store methods can and should call their callbacks directly whenever possible, rather + * than deferring them with setImmediate() or process.nextTick(). This means that if for any + * reason you are updating or querying a feature store directly in your application code (which + * is not part of normal use of the SDK) you should be aware that the callback may be executed + * immediately. */ export interface LDFeatureStore { /** @@ -681,9 +687,8 @@ declare module 'launchdarkly-node-server-sdk' { * - `"error"`: Contains an error object describing some abnormal condition that the client has detected * (such as a network error). * - `"update"`: The client has received a change to a feature flag. The event parameter is an object - * containing the flag configuration; its `key` property is the flag key. Note that this does not - * necessarily mean the flag's value has changed for any particular user, only that some part of the - * flag configuration was changed. + * containing a single property, `key`, the flag key. Note that this does not necessarily mean the flag's + * value has changed for any particular user, only that some part of the flag configuration was changed. * - `"update:KEY"`: The client has received a change to the feature flag whose key is KEY. This is the * same as `"update"` but allows you to listen for a specific flag. * diff --git a/index.js b/index.js index 46f1b04..f303234 100644 --- a/index.js +++ b/index.js @@ -1,25 +1,25 @@ -var FeatureStoreEventWrapper = require('./feature_store_event_wrapper'); -var RedisFeatureStore = require('./redis_feature_store'); -var FileDataSource = require('./file_data_source'); -var Requestor = require('./requestor'); -var EventEmitter = require('events').EventEmitter; -var EventFactory = require('./event_factory'); -var EventProcessor = require('./event_processor'); -var PollingProcessor = require('./polling'); -var StreamingProcessor = require('./streaming'); -var FlagsStateBuilder = require('./flags_state'); -var configuration = require('./configuration'); -var evaluate = require('./evaluate_flag'); -var messages = require('./messages'); -var tunnel = require('tunnel'); -var crypto = require('crypto'); -var async = require('async'); -var errors = require('./errors'); -var wrapPromiseCallback = require('./utils/wrapPromiseCallback'); -var dataKind = require('./versioned_data_kind'); +const FeatureStoreEventWrapper = require('./feature_store_event_wrapper'); +const RedisFeatureStore = require('./redis_feature_store'); +const FileDataSource = require('./file_data_source'); +const Requestor = require('./requestor'); +const EventEmitter = require('events').EventEmitter; +const EventFactory = require('./event_factory'); +const EventProcessor = require('./event_processor'); +const PollingProcessor = require('./polling'); +const StreamingProcessor = require('./streaming'); +const FlagsStateBuilder = require('./flags_state'); +const configuration = require('./configuration'); +const evaluate = require('./evaluate_flag'); +const messages = require('./messages'); +const tunnel = require('tunnel'); +const crypto = require('crypto'); +const errors = require('./errors'); +const { safeAsyncEach } = require('./utils/asyncUtils'); +const wrapPromiseCallback = require('./utils/wrapPromiseCallback'); +const dataKind = require('./versioned_data_kind'); function createErrorReporter(emitter, logger) { - return function(error) { + return error => { if (!error) { return; } @@ -36,34 +36,32 @@ global.setImmediate = global.setImmediate || process.nextTick.bind(process); function NullEventProcessor() { return { - sendEvent: function() {}, - flush: function(callback) { - return wrapPromiseCallback(Promise.resolve(), callback); - }, - close: function() {} + sendEvent: () => {}, + flush: callback => wrapPromiseCallback(Promise.resolve(), callback), + close: () => {} }; } function NullUpdateProcessor() { return { - start: function(callback) { - setImmediate(callback, null); + start: callback => { + setImmediate(callback, null); // the start() callback should always be deferred }, - close: function() {} + close: () => {} }; } -var newClient = function(sdkKey, config) { - var client = new EventEmitter(), - initComplete = false, - failure, - requestor, - updateProcessor, - eventProcessor, - eventFactoryDefault, - eventFactoryWithReasons; - - config = configuration.validate(config); +const newClient = function(sdkKey, originalConfig) { + const client = new EventEmitter(); + let initComplete = false, + failure, + requestor, + updateProcessor, + eventProcessor, + eventFactoryDefault, + eventFactoryWithReasons; + + const config = configuration.validate(originalConfig); // Initialize global tunnel if proxy options are set if (config.proxyHost && config.proxyPort ) { @@ -72,7 +70,7 @@ var newClient = function(sdkKey, config) { config.featureStore = FeatureStoreEventWrapper(config.featureStore, client); - var maybeReportError = createErrorReporter(client, config.logger); + const maybeReportError = createErrorReporter(client, config.logger); eventFactoryDefault = EventFactory(false); eventFactoryWithReasons = EventFactory(true); @@ -88,26 +86,26 @@ var newClient = function(sdkKey, config) { } if (!sdkKey && !config.offline) { - throw new Error("You must configure the client with an SDK key"); + throw new Error('You must configure the client with an SDK key'); } - var createDefaultUpdateProcessor = function(config) { + const createDefaultUpdateProcessor = config => { if (config.useLdd || config.offline) { return NullUpdateProcessor(); } else { requestor = Requestor(sdkKey, config); if (config.stream) { - config.logger.info("Initializing stream processor to receive feature flag updates"); + config.logger.info('Initializing stream processor to receive feature flag updates'); return StreamingProcessor(sdkKey, config, requestor); } else { - config.logger.info("Initializing polling processor to receive feature flag updates"); - config.logger.warn("You should only disable the streaming API if instructed to do so by LaunchDarkly support"); + config.logger.info('Initializing polling processor to receive feature flag updates'); + config.logger.warn('You should only disable the streaming API if instructed to do so by LaunchDarkly support'); return PollingProcessor(config, requestor); } } - } - var updateProcessorFactory = createDefaultUpdateProcessor; + }; + let updateProcessorFactory = createDefaultUpdateProcessor; if (config.updateProcessor) { if (typeof config.updateProcessor === 'function') { updateProcessorFactory = config.updateProcessor; @@ -119,11 +117,11 @@ var newClient = function(sdkKey, config) { updateProcessor = updateProcessorFactory(config); } - updateProcessor.start(function(err) { + updateProcessor.start(err => { if (err) { - var error; + let error; if ((err.status && err.status === 401) || (err.code && err.code === 401)) { - error = new Error("Authentication failed. Double check your SDK key."); + error = new Error('Authentication failed. Double check your SDK key.'); } else { error = err; } @@ -137,23 +135,21 @@ var newClient = function(sdkKey, config) { } }); - client.initialized = function() { - return initComplete; - }; + client.initialized = () => initComplete; - client.waitUntilReady = function() { - config.logger.warn(messages.deprecated("waitUntilReady", "waitForInitialization")); + client.waitUntilReady = () => { + config.logger.warn(messages.deprecated('waitUntilReady', 'waitForInitialization')); if (initComplete) { return Promise.resolve(); } - return new Promise(function(resolve) { + return new Promise(resolve => { client.once('ready', resolve); }); }; - client.waitForInitialization = function() { + client.waitForInitialization = () => { if (initComplete) { return Promise.resolve(client); } @@ -161,42 +157,42 @@ var newClient = function(sdkKey, config) { return Promise.reject(failure); } - return new Promise(function(resolve, reject) { - client.once('ready', function() { resolve(client) }); + return new Promise((resolve, reject) => { + client.once('ready', () => { resolve(client); }); client.once('failed', reject); }); }; - client.variation = function(key, user, defaultVal, callback) { - return wrapPromiseCallback(new Promise(function(resolve, reject) { + client.variation = (key, user, defaultVal, callback) => { + return wrapPromiseCallback(new Promise((resolve, reject) => { evaluateIfPossible(key, user, defaultVal, eventFactoryDefault, - function(detail) { - resolve(detail.value) + detail => { + resolve(detail.value); }, reject); - }.bind(this)), callback); + }), callback); }; - client.variationDetail = function(key, user, defaultVal, callback) { - return wrapPromiseCallback(new Promise(function(resolve, reject) { + client.variationDetail = (key, user, defaultVal, callback) => { + return wrapPromiseCallback(new Promise((resolve, reject) => { evaluateIfPossible(key, user, defaultVal, eventFactoryWithReasons, resolve, reject); - }.bind(this)), callback); + }), callback); }; function errorResult(errorKind, defaultVal) { return { value: defaultVal, variationIndex: null, reason: { kind: 'ERROR', errorKind: errorKind } }; - }; + } function evaluateIfPossible(key, user, defaultVal, eventFactory, resolve, reject) { if (!initComplete) { - config.featureStore.initialized(function(storeInited) { + config.featureStore.initialized(storeInited => { if (storeInited) { - config.logger.warn("Variation called before LaunchDarkly client initialization completed (did you wait for the 'ready' event?) - using last known values from feature store") + config.logger.warn('Variation called before LaunchDarkly client initialization completed (did you wait for the \'ready\' event?) - using last known values from feature store'); variationInternal(key, user, defaultVal, eventFactory, resolve, reject); } else { - var err = new errors.LDClientError("Variation called before LaunchDarkly client initialization completed (did you wait for the 'ready' event?) - using default value"); + const err = new errors.LDClientError('Variation called before LaunchDarkly client initialization completed (did you wait for the \'ready\' event?) - using default value'); maybeReportError(err); - var result = errorResult('CLIENT_NOT_READY', defaultVal); + const result = errorResult('CLIENT_NOT_READY', defaultVal); eventProcessor.sendEvent(eventFactory.newUnknownFlagEvent(key, user, result)); return resolve(result); } @@ -207,40 +203,39 @@ var newClient = function(sdkKey, config) { } // resolves to a "detail" object with properties "value", "variationIndex", "reason" - function variationInternal(key, user, defaultVal, eventFactory, resolve, reject) { + function variationInternal(key, user, defaultVal, eventFactory, resolve) { if (client.isOffline()) { - config.logger.info("Variation called in offline mode. Returning default value."); + config.logger.info('Variation called in offline mode. Returning default value.'); return resolve(errorResult('CLIENT_NOT_READY', defaultVal)); } else if (!key) { - var err = new errors.LDClientError('No feature flag key specified. Returning default value.'); + const err = new errors.LDClientError('No feature flag key specified. Returning default value.'); maybeReportError(err); return resolve(errorResult('FLAG_NOT_FOUND', defaultVal)); } - if (user && user.key === "") { - config.logger.warn("User key is blank. Flag evaluation will proceed, but the user will not be stored in LaunchDarkly"); + if (user && user.key === '') { + config.logger.warn('User key is blank. Flag evaluation will proceed, but the user will not be stored in LaunchDarkly'); } - config.featureStore.get(dataKind.features, key, function(flag) { - + config.featureStore.get(dataKind.features, key, flag => { if (!flag) { maybeReportError(new errors.LDClientError('Unknown feature flag "' + key + '"; returning default value')); - var result = errorResult('FLAG_NOT_FOUND', defaultVal); + const result = errorResult('FLAG_NOT_FOUND', defaultVal); eventProcessor.sendEvent(eventFactory.newUnknownFlagEvent(key, user, result)); return resolve(result); } if (!user) { - var variationErr = new errors.LDClientError('No user specified. Returning default value.'); + const variationErr = new errors.LDClientError('No user specified. Returning default value.'); maybeReportError(variationErr); - var result = errorResult('USER_NOT_SPECIFIED', defaultVal); + const result = errorResult('USER_NOT_SPECIFIED', defaultVal); eventProcessor.sendEvent(eventFactory.newDefaultEvent(flag, user, result)); return resolve(result); } - evaluate.evaluate(flag, user, config.featureStore, eventFactory, function(err, detail, events) { + evaluate.evaluate(flag, user, config.featureStore, eventFactory, (err, detail, events) => { if (err) { maybeReportError(new errors.LDClientError('Encountered error evaluating feature flag:' + (err.message ? (': ' + err.message) : err))); } @@ -248,13 +243,13 @@ var newClient = function(sdkKey, config) { // Send off any events associated with evaluating prerequisites. The events // have already been constructed, so we just have to push them onto the queue. if (events) { - for (var i = 0; i < events.length; i++) { + for (let i = 0; i < events.length; i++) { eventProcessor.sendEvent(events[i]); } } if (detail.variationIndex === null) { - config.logger.debug("Result value is null in variation"); + config.logger.debug('Result value is null in variation'); detail.value = defaultVal; } eventProcessor.sendEvent(eventFactory.newEvalEvent(flag, user, detail, defaultVal)); @@ -263,82 +258,73 @@ var newClient = function(sdkKey, config) { }); } - client.toggle = function(key, user, defaultVal, callback) { - config.logger.warn("toggle() is deprecated. Call 'variation' instead"); - return client.variation(key, user, defaultVal, callback); - } - - client.allFlags = function(user, callback) { - config.logger.warn("allFlags() is deprecated. Call 'allFlagsState' instead and call toJSON() on the result"); + client.allFlags = (user, callback) => { + config.logger.warn('allFlags() is deprecated. Call \'allFlagsState\' instead and call toJSON() on the result'); return wrapPromiseCallback( - client.allFlagsState(user).then(function(state) { - return state.allValues(); - }), + client.allFlagsState(user).then(state => state.allValues()), callback); - } + }; - client.allFlagsState = function(user, options, callback) { + client.allFlagsState = (user, options, callback) => { if (callback === undefined && typeof(options) === 'function') { callback = options; options = {}; } else { options = options || {}; } - return wrapPromiseCallback(new Promise(function(resolve, reject) { - if (this.isOffline()) { - config.logger.info("allFlagsState() called in offline mode. Returning empty state."); + return wrapPromiseCallback(new Promise((resolve, reject) => { + if (client.isOffline()) { + config.logger.info('allFlagsState() called in offline mode. Returning empty state.'); return resolve(FlagsStateBuilder(false).build()); } if (!user) { - config.logger.info("allFlagsState() called without user. Returning empty state."); + config.logger.info('allFlagsState() called without user. Returning empty state.'); return resolve(FlagsStateBuilder(false).build()); } - var builder = FlagsStateBuilder(true); - var clientOnly = options.clientSideOnly; - var withReasons = options.withReasons; - var detailsOnlyIfTracked = options.detailsOnlyForTrackedFlags; - config.featureStore.all(dataKind.features, function(flags) { - async.forEachOf(flags, function(flag, key, iterateeCb) { + const builder = FlagsStateBuilder(true); + const clientOnly = options.clientSideOnly; + const withReasons = options.withReasons; + const detailsOnlyIfTracked = options.detailsOnlyForTrackedFlags; + config.featureStore.all(dataKind.features, flags => { + safeAsyncEach(flags, (flag, iterateeCb) => { if (clientOnly && !flag.clientSide) { - setImmediate(iterateeCb); + iterateeCb(); } else { // At the moment, we don't send any events here - evaluate.evaluate(flag, user, config.featureStore, eventFactoryDefault, function(err, detail, events) { + evaluate.evaluate(flag, user, config.featureStore, eventFactoryDefault, (err, detail) => { if (err != null) { maybeReportError(new Error('Error for feature flag "' + flag.key + '" while evaluating all flags: ' + err)); } builder.addFlag(flag, detail.value, detail.variationIndex, withReasons ? detail.reason : null, detailsOnlyIfTracked); - setImmediate(iterateeCb); + iterateeCb(); }); } - }, function(err) { + }, err => { return err ? reject(err) : resolve(builder.build()); }); }); - }.bind(this)), callback); - } + }), callback); + }; - client.secureModeHash = function(user) { - var hmac = crypto.createHmac('sha256', sdkKey); + client.secureModeHash = user => { + const hmac = crypto.createHmac('sha256', sdkKey); hmac.update(user.key); return hmac.digest('hex'); - } + }; - client.close = function() { + client.close = () => { eventProcessor.close(); if (updateProcessor) { updateProcessor.close(); } config.featureStore.close(); - } + }; - client.isOffline = function() { - return config.offline; - } + client.isOffline = () => config.offline; - client.track = function(eventName, user, data, metricValue) { + client.track = (eventName, user, data, metricValue) => { if (!userExistsAndHasKey(user)) { config.logger.warn(messages.missingUserKeyNoEvent()); return; @@ -346,7 +332,7 @@ var newClient = function(sdkKey, config) { eventProcessor.sendEvent(eventFactoryDefault.newCustomEvent(eventName, user, data, metricValue)); }; - client.identify = function(user) { + client.identify = user => { if (!userExistsAndHasKey(user)) { config.logger.warn(messages.missingUserKeyNoEvent()); return; @@ -354,28 +340,29 @@ var newClient = function(sdkKey, config) { eventProcessor.sendEvent(eventFactoryDefault.newIdentifyEvent(user)); }; - client.flush = function(callback) { + client.flush = callback => { return eventProcessor.flush(callback); }; function userExistsAndHasKey(user) { if (user) { - var key = user.key; - return key !== undefined && key !== null && key !== ""; + const key = user.key; + return key !== undefined && key !== null && key !== ''; } return false; } function deprecatedMethod(oldName, newName) { - client[oldName] = function() { + client[oldName] = (...args) => { config.logger.warn(messages.deprecated(oldName, newName)); - return client[newName].apply(client, arguments); + return client[newName].apply(client, args); }; } deprecatedMethod('all_flags', 'allFlags'); deprecatedMethod('is_offline', 'isOffline'); deprecatedMethod('secure_mode_hash', 'secureModeHash'); + deprecatedMethod('toggle', 'variation'); return client; }; @@ -389,7 +376,7 @@ module.exports = { function createProxyAgent(config) { - var options = { + const options = { proxy: { host: config.proxyHost, port: config.proxyPort, @@ -399,7 +386,7 @@ function createProxyAgent(config) { if (config.proxyScheme === 'https') { if (!config.baseUri || config.baseUri.startsWith('https')) { - return tunnel.httpsOverHttps(options); + return tunnel.httpsOverHttps(options); } else { return tunnel.httpOverHttps(options); } diff --git a/messages.js b/messages.js index 81999ca..603cba9 100644 --- a/messages.js +++ b/messages.js @@ -1,16 +1,12 @@ -var errors = require('./errors'); +const errors = require('./errors'); -exports.deprecated = function(oldName, newName) { - return '"' + oldName + '" is deprecated, please use "' + newName + '"'; -}; +exports.deprecated = (oldName, newName) => + '"' + oldName + '" is deprecated, please use "' + newName + '"'; -exports.httpErrorMessage = function(status, context, retryMessage) { - return 'Received error ' + status +exports.httpErrorMessage = (status, context, retryMessage) => + 'Received error ' + status + (status == 401 ? ' (invalid SDK key)' : '') + ' for ' + context + ' - ' + (errors.isHttpErrorRecoverable(status) ? retryMessage : 'giving up permanently'); -}; -exports.missingUserKeyNoEvent = function() { - return 'User was unspecified or had no key; event will not be sent'; -}; +exports.missingUserKeyNoEvent = () => 'User was unspecified or had no key; event will not be sent'; diff --git a/operators.js b/operators.js index 6032ad2..c0a2e5b 100644 --- a/operators.js +++ b/operators.js @@ -1,26 +1,60 @@ +const semver = require('semver'); -var semver = require('semver'); +// Our reference SDK, Go, parses date/time strings with the time.RFC3339Nano format. This regex should match +// strings that are valid in that format, and no others. +// Acceptable: 2019-10-31T23:59:59Z, 2019-10-31T23:59:59.100Z, 2019-10-31T23:59:59-07, 2019-10-31T23:59:59-07:00, etc. +// Unacceptable: no "T", no time zone designation +const dateRegex = new RegExp('^\\d\\d\\d\\d-\\d\\d-\\d\\dT\\d\\d:\\d\\d:\\d\\d(\\.\\d\\d*)?(Z|[-+]\\d\\d(:\\d\\d)?)'); + +function stringOperator(f) { + return (userValue, clauseValue) => + typeof userValue === 'string' && typeof clauseValue === 'string' && f(userValue, clauseValue); +} + +function numericOperator(f) { + return (userValue, clauseValue) => + typeof userValue === 'number' && typeof clauseValue === 'number' && f(userValue, clauseValue); +} + +function dateOperator(f) { + return (userValue, clauseValue) => { + const userValueNum = parseDate(userValue); + const clauseValueNum = parseDate(clauseValue); + return userValueNum !== null && clauseValueNum !== null && f(userValueNum, clauseValueNum); + }; +} + +function parseDate(input) { + switch(typeof input) { + case 'number': + return input; + case 'string': + return dateRegex.test(input) ? Date.parse(input) : null; + default: + return null; + } +} function semVerOperator(fn) { - return function(a, b) { - var av = parseSemVer(a), bv = parseSemVer(b); + return (a, b) => { + const av = parseSemVer(a), bv = parseSemVer(b); return (av && bv) ? fn(av, bv) : false; }; } function parseSemVer(input) { - if (input.startsWith("v")) { + if (input.startsWith('v')) { // the semver library tolerates a leading "v", but the standard does not. return null; } - var ret = semver.parse(input); + let ret = semver.parse(input); if (!ret) { - var versionNumericComponents = new RegExp("^\\d+(\\.\\d+)?(\\.\\d+)?").exec(input); + const versionNumericComponents = new RegExp('^\\d+(\\.\\d+)?(\\.\\d+)?').exec(input); if (versionNumericComponents) { - var transformed = versionNumericComponents[0]; - for (var i = 1; i < versionNumericComponents.length; i++) { + let transformed = versionNumericComponents[0]; + for (let i = 1; i < versionNumericComponents.length; i++) { if (versionNumericComponents[i] == undefined) { - transformed = transformed + ".0"; + transformed = transformed + '.0'; } } transformed = transformed + input.substring(versionNumericComponents[0].length); @@ -30,68 +64,33 @@ function parseSemVer(input) { return ret; } -var operators = { - "in": function(a, b) { - return a === b; - }, - "endsWith": function(a, b) { - return typeof a === 'string' && a.endsWith(b); - }, - "startsWith": function(a, b) { - return typeof a === 'string' && a.startsWith(b); - }, - "matches": function(a, b) { - return typeof b === 'string' && new RegExp(b).test(a); - }, - "contains": function(a, b) { - return typeof a === 'string' && a.indexOf(b) > -1; - }, - "lessThan": function(a, b) { - return typeof a === 'number' && a < b; - }, - "lessThanOrEqual": function(a, b) { - return typeof a === 'number' && a <= b; - }, - "greaterThan": function(a, b) { - return typeof a === 'number' && a > b; - }, - "greaterThanOrEqual": function(a, b) { - return typeof a === 'number' && a >= b; - }, - "before": function(a, b) { - if (typeof a === 'string') { - a = Date.parse(a); - } - if (typeof b === 'string') { - b = Date.parse(b); - } - - if (typeof a === 'number' && typeof b === 'number') { - return a < b; - } +function safeRegexMatch(pattern, value) { + try { + return new RegExp(pattern).test(value); + } catch(e) { + // do not propagate this exception, just treat a bad regex as a non-match for consistency with other SDKs return false; - }, - "after": function(a, b) { - if (typeof a === 'string') { - a = Date.parse(a); - } - if (typeof b === 'string') { - b = Date.parse(b); - } + } +} - if (typeof a === 'number' && typeof b === 'number') { - return a > b; - } - return false; - }, - "semVerEqual": semVerOperator(function(a, b) { return a.compare(b) == 0; }), - "semVerLessThan": semVerOperator(function(a, b) { return a.compare(b) < 0; }), - "semVerGreaterThan": semVerOperator(function(a, b) { return a.compare(b) > 0; }) +const operators = { + in: (a, b) => a === b, + endsWith: stringOperator((a, b) => a.endsWith(b)), + startsWith: stringOperator((a, b) => a.startsWith(b)), + matches: stringOperator((a, b) => safeRegexMatch(b, a)), + contains: stringOperator((a, b) => a.indexOf(b) > -1), + lessThan: numericOperator((a, b) => a < b), + lessThanOrEqual: numericOperator((a, b) => a <= b), + greaterThan: numericOperator((a, b) => a > b), + greaterThanOrEqual: numericOperator((a, b) => a >= b), + before: dateOperator((a, b) => a < b), + after: dateOperator((a, b) => a > b), + semVerEqual: semVerOperator((a, b) => a.compare(b) == 0), + semVerLessThan: semVerOperator((a, b) => a.compare(b) < 0), + semVerGreaterThan: semVerOperator((a, b) => a.compare(b) > 0) }; -var operatorNone = function(a, b) { - return false; -} +const operatorNone = () => false; function fn(op) { return operators[op] || operatorNone; diff --git a/package-lock.json b/package-lock.json index 0614e57..9037371 100644 --- a/package-lock.json +++ b/package-lock.json @@ -49,6 +49,12 @@ "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.0.tgz", "integrity": "sha1-o1AIsg9BOD7sH7kU9M1d95omQoQ=", "dev": true + }, + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "dev": true } } }, @@ -878,6 +884,12 @@ "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.3.133.tgz", "integrity": "sha512-lyoC8aoqbbDqsprb6aPdt9n3DpOZZzdz/T4IZKsR0/dkZIxnJVUjjcpOSwA66jPRIOyDAamCTAUqweU05kKNSg==", "dev": true + }, + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "dev": true } } }, @@ -1214,41 +1226,6 @@ "resolved": "https://registry.npmjs.org/@types/events/-/events-1.2.0.tgz", "integrity": "sha1-gaZzHOTfQ2GeXIyUU4Oz5iqJ6oY=" }, - "@types/fs-extra": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/@types/fs-extra/-/fs-extra-5.1.0.tgz", - "integrity": "sha512-AInn5+UBFIK9FK5xc9yP5e3TQSPNNgjHByqYcj9g5elVBnDQcQL7PlO1CIRy2gWlbwK7UPYqi7vRvFA44dCmYQ==", - "dev": true, - "requires": { - "@types/node": "*" - } - }, - "@types/glob": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/@types/glob/-/glob-7.1.1.tgz", - "integrity": "sha512-1Bh06cbWJUHMC97acuD6UMG29nMt0Aqz1vF3guLfG+kHHJhy3AyohZFFxYk2f7Q1SQIrNwvncxAE0N/9s70F2w==", - "dev": true, - "requires": { - "@types/events": "*", - "@types/minimatch": "*", - "@types/node": "*" - } - }, - "@types/handlebars": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/@types/handlebars/-/handlebars-4.1.0.tgz", - "integrity": "sha512-gq9YweFKNNB1uFK71eRqsd4niVkXrxHugqWFQkeLRJvGjnxsLr16bYtcsG4tOFwmYi0Bax+wCkbf1reUfdl4kA==", - "dev": true, - "requires": { - "handlebars": "*" - } - }, - "@types/highlight.js": { - "version": "9.12.3", - "resolved": "https://registry.npmjs.org/@types/highlight.js/-/highlight.js-9.12.3.tgz", - "integrity": "sha512-pGF/zvYOACZ/gLGWdQH8zSwteQS1epp68yRcVLJMgUck/MjEn/FBYmPub9pXT8C1e4a8YZfHo1CKyV8q1vKUnQ==", - "dev": true - }, "@types/istanbul-lib-coverage": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.1.tgz", @@ -1274,24 +1251,6 @@ "@types/istanbul-lib-report": "*" } }, - "@types/lodash": { - "version": "4.14.144", - "resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.14.144.tgz", - "integrity": "sha512-ogI4g9W5qIQQUhXAclq6zhqgqNUr7UlFaqDHbch7WLSLeeM/7d3CRaw7GLajxvyFvhJqw4Rpcz5bhoaYtIx6Tg==", - "dev": true - }, - "@types/marked": { - "version": "0.4.2", - "resolved": "https://registry.npmjs.org/@types/marked/-/marked-0.4.2.tgz", - "integrity": "sha512-cDB930/7MbzaGF6U3IwSQp6XBru8xWajF5PV2YZZeV8DyiliTuld11afVztGI9+yJZ29il5E+NpGA6ooV/Cjkg==", - "dev": true - }, - "@types/minimatch": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/@types/minimatch/-/minimatch-3.0.3.tgz", - "integrity": "sha512-tHq6qdbT9U1IRSGf14CL0pUlULksvY9OZ+5eEgl1N7t+OA3tGvNpxJCzuKQlsNgCVwbAs670L1vcVQi8j9HjnA==", - "dev": true - }, "@types/node": { "version": "10.5.2", "resolved": "https://registry.npmjs.org/@types/node/-/node-10.5.2.tgz", @@ -1306,16 +1265,6 @@ "@types/node": "*" } }, - "@types/shelljs": { - "version": "0.8.5", - "resolved": "https://registry.npmjs.org/@types/shelljs/-/shelljs-0.8.5.tgz", - "integrity": "sha512-bZgjwIWu9gHCjirKJoOlLzGi5N0QgZ5t7EXEuoqyWCHTuSddURXo3FOBYDyRPNOWzZ6NbkLvZnVkn483Y/tvcQ==", - "dev": true, - "requires": { - "@types/glob": "*", - "@types/node": "*" - } - }, "@types/stack-utils": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/@types/stack-utils/-/stack-utils-1.0.1.tgz", @@ -1358,6 +1307,12 @@ } } }, + "acorn-jsx": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.1.0.tgz", + "integrity": "sha512-tMUqwBWfLFbJbizRmEcWSLw6HnFzfdJs2sOJEOwwtVPMoH/0Ay+E703oZz78VSXZiiDcZrQ5XKjPIUQixhmgVw==", + "dev": true + }, "acorn-walk": { "version": "6.1.1", "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-6.1.1.tgz", @@ -1406,6 +1361,15 @@ "normalize-path": "^2.1.1" } }, + "argparse": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", + "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", + "dev": true, + "requires": { + "sprintf-js": "~1.0.2" + } + }, "arr-diff": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/arr-diff/-/arr-diff-4.0.0.tgz", @@ -1468,12 +1432,9 @@ "dev": true }, "async": { - "version": "2.6.0", - "resolved": "https://registry.npmjs.org/async/-/async-2.6.0.tgz", - "integrity": "sha512-xAfGg1/NTLBBKlHFmnd7PlmUW9KhVQIUuSrYem9xzFUZy13ScvtyGGejaae9iAVRiRq9+Cx7DPFaAAhCpyxyPw==", - "requires": { - "lodash": "^4.14.0" - } + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/async/-/async-3.1.0.tgz", + "integrity": "sha512-4vx/aaY6j/j3Lw3fbCHNWP0pPaTCew3F6F3hYyl/tHs/ndmV1q7NW9T5yuJ2XAGwdQrP+6Wu20x06U4APo/iQQ==" }, "async-limiter": { "version": "1.0.0", @@ -1515,14 +1476,6 @@ "babel-preset-jest": "^24.6.0", "chalk": "^2.4.2", "slash": "^2.0.0" - }, - "dependencies": { - "slash": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/slash/-/slash-2.0.0.tgz", - "integrity": "sha512-ZYKh3Wh2z1PpEXWr0MpSBZ0V6mZHAQfYevttO11c51CaWjGTaadiKZ+wVt1PbMlDV5qhMFslpZCemhwOK7C89A==", - "dev": true - } } }, "babel-plugin-istanbul": { @@ -1769,6 +1722,12 @@ "supports-color": "^5.3.0" } }, + "chardet": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/chardet/-/chardet-0.7.0.tgz", + "integrity": "sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA==", + "dev": true + }, "check-error": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/check-error/-/check-error-1.0.2.tgz", @@ -1804,6 +1763,21 @@ } } }, + "cli-cursor": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-2.1.0.tgz", + "integrity": "sha1-s12sN2R5+sw+lHR9QdDQ9SOP/LU=", + "dev": true, + "requires": { + "restore-cursor": "^2.0.0" + } + }, + "cli-width": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/cli-width/-/cli-width-2.2.0.tgz", + "integrity": "sha1-/xnt6Kml5XkyQUewwR8PvLq+1jk=", + "dev": true + }, "cliui": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/cliui/-/cliui-4.1.0.tgz", @@ -1833,9 +1807,9 @@ } }, "clone": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/clone/-/clone-1.0.2.tgz", - "integrity": "sha1-Jgt6meux7f4kdTgXX3gyQ8sZ0Uk=" + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/clone/-/clone-2.1.2.tgz", + "integrity": "sha1-G39Ln1kfHo+DZwQBYANFoCiHQ18=" }, "co": { "version": "4.6.0", @@ -1888,9 +1862,9 @@ } }, "commander": { - "version": "2.17.1", - "resolved": "https://registry.npmjs.org/commander/-/commander-2.17.1.tgz", - "integrity": "sha512-wPMUt6FnH2yzG95SA6mzjQOEKUU3aLaDEmzs1ti+1E9h+CsrZghRlqEM/EJ4KscsQVG8uNN4uVreUeT8+drlgg==", + "version": "2.20.3", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", + "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==", "dev": true, "optional": true }, @@ -1992,6 +1966,14 @@ "semver": "^5.5.0", "shebang-command": "^1.2.0", "which": "^1.2.9" + }, + "dependencies": { + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "dev": true + } } }, "cssom": { @@ -2163,6 +2145,15 @@ "integrity": "sha512-xLqpez+Zj9GKSnPWS0WZw1igGocZ+uua8+y+5dDNTT934N3QuY1sp2LkHzwiaYQGz60hMq0pjAshdeXm5VUOEw==", "dev": true }, + "doctrine": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz", + "integrity": "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==", + "dev": true, + "requires": { + "esutils": "^2.0.2" + } + }, "domexception": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/domexception/-/domexception-1.0.1.tgz", @@ -2186,6 +2177,12 @@ "safer-buffer": "^2.1.0" } }, + "emoji-regex": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz", + "integrity": "sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==", + "dev": true + }, "end-of-stream": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.1.tgz", @@ -2257,12 +2254,148 @@ } } }, + "eslint": { + "version": "6.5.1", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-6.5.1.tgz", + "integrity": "sha512-32h99BoLYStT1iq1v2P9uwpyznQ4M2jRiFB6acitKz52Gqn+vPaMDUTB1bYi1WN4Nquj2w+t+bimYUG83DC55A==", + "dev": true, + "requires": { + "@babel/code-frame": "^7.0.0", + "ajv": "^6.10.0", + "chalk": "^2.1.0", + "cross-spawn": "^6.0.5", + "debug": "^4.0.1", + "doctrine": "^3.0.0", + "eslint-scope": "^5.0.0", + "eslint-utils": "^1.4.2", + "eslint-visitor-keys": "^1.1.0", + "espree": "^6.1.1", + "esquery": "^1.0.1", + "esutils": "^2.0.2", + "file-entry-cache": "^5.0.1", + "functional-red-black-tree": "^1.0.1", + "glob-parent": "^5.0.0", + "globals": "^11.7.0", + "ignore": "^4.0.6", + "import-fresh": "^3.0.0", + "imurmurhash": "^0.1.4", + "inquirer": "^6.4.1", + "is-glob": "^4.0.0", + "js-yaml": "^3.13.1", + "json-stable-stringify-without-jsonify": "^1.0.1", + "levn": "^0.3.0", + "lodash": "^4.17.14", + "minimatch": "^3.0.4", + "mkdirp": "^0.5.1", + "natural-compare": "^1.4.0", + "optionator": "^0.8.2", + "progress": "^2.0.0", + "regexpp": "^2.0.1", + "semver": "^6.1.2", + "strip-ansi": "^5.2.0", + "strip-json-comments": "^3.0.1", + "table": "^5.2.3", + "text-table": "^0.2.0", + "v8-compile-cache": "^2.0.3" + }, + "dependencies": { + "lodash": { + "version": "4.17.15", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.15.tgz", + "integrity": "sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==", + "dev": true + } + } + }, + "eslint-formatter-pretty": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/eslint-formatter-pretty/-/eslint-formatter-pretty-2.1.1.tgz", + "integrity": "sha512-gWfagucSWBn82WxzwFloBTLAcwYDgnpAfiV5pQfyAV5YpZikuLflRU8nc3Ts9wnNvLhwk4blzb42/C495Yw7BA==", + "dev": true, + "requires": { + "ansi-escapes": "^3.1.0", + "chalk": "^2.1.0", + "eslint-rule-docs": "^1.1.5", + "log-symbols": "^2.0.0", + "plur": "^3.0.1", + "string-width": "^2.0.0", + "supports-hyperlinks": "^1.0.1" + } + }, + "eslint-rule-docs": { + "version": "1.1.158", + "resolved": "https://registry.npmjs.org/eslint-rule-docs/-/eslint-rule-docs-1.1.158.tgz", + "integrity": "sha512-S4jQGXR245fsTtJXOwP7JLnXV0Nw+ZvWC1Vo9zILi/5CueV8yKi3Dr2eH4U8MXmBY90oMT32DyAVLymK2ioaog==", + "dev": true + }, + "eslint-scope": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.0.0.tgz", + "integrity": "sha512-oYrhJW7S0bxAFDvWqzvMPRm6pcgcnWc4QnofCAqRTRfQC0JcwenzGglTtsLyIuuWFfkqDG9vz67cnttSd53djw==", + "dev": true, + "requires": { + "esrecurse": "^4.1.0", + "estraverse": "^4.1.1" + } + }, + "eslint-utils": { + "version": "1.4.3", + "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-1.4.3.tgz", + "integrity": "sha512-fbBN5W2xdY45KulGXmLHZ3c3FHfVYmKg0IrAKGOkT/464PQsx2UeIzfz1RmEci+KLm1bBaAzZAh8+/E+XAeZ8Q==", + "dev": true, + "requires": { + "eslint-visitor-keys": "^1.1.0" + } + }, + "eslint-visitor-keys": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.1.0.tgz", + "integrity": "sha512-8y9YjtM1JBJU/A9Kc+SbaOV4y29sSWckBwMHa+FGtVj5gN/sbnKDf6xJUl+8g7FAij9LVaP8C24DUiH/f/2Z9A==", + "dev": true + }, + "espree": { + "version": "6.1.2", + "resolved": "https://registry.npmjs.org/espree/-/espree-6.1.2.tgz", + "integrity": "sha512-2iUPuuPP+yW1PZaMSDM9eyVf8D5P0Hi8h83YtZ5bPc/zHYjII5khoixIUTMO794NOY8F/ThF1Bo8ncZILarUTA==", + "dev": true, + "requires": { + "acorn": "^7.1.0", + "acorn-jsx": "^5.1.0", + "eslint-visitor-keys": "^1.1.0" + }, + "dependencies": { + "acorn": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.1.0.tgz", + "integrity": "sha512-kL5CuoXA/dgxlBbVrflsflzQ3PAas7RYZB52NOm/6839iVYJgKMJ3cQJD+t2i5+qFa8h3MDpEOJiS64E8JLnSQ==", + "dev": true + } + } + }, "esprima": { "version": "3.1.3", "resolved": "https://registry.npmjs.org/esprima/-/esprima-3.1.3.tgz", "integrity": "sha1-/cpRzuYTOJXjyI1TXOSdv/YqRjM=", "dev": true }, + "esquery": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.0.1.tgz", + "integrity": "sha512-SmiyZ5zIWH9VM+SRUReLS5Q8a7GxtRdxEBVZpm98rJM7Sb+A9DVCndXfkeFUd3byderg+EbDkfnevfCwynWaNA==", + "dev": true, + "requires": { + "estraverse": "^4.0.0" + } + }, + "esrecurse": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.2.1.tgz", + "integrity": "sha512-64RBB++fIOAXPw3P9cy89qfMlvZEXZkqqJkjqqXIvzP5ezRZjW+lPWjw35UX/3EhUPFYbg5ER4JYgDw4007/DQ==", + "dev": true, + "requires": { + "estraverse": "^4.1.0" + } + }, "estraverse": { "version": "4.2.0", "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.2.0.tgz", @@ -2386,6 +2519,17 @@ } } }, + "external-editor": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/external-editor/-/external-editor-3.1.0.tgz", + "integrity": "sha512-hMQ4CX1p1izmuLYyZqLMO/qGNw10wSv9QDCPfzXfyFrOaCSSoRfqE1Kf1s5an66J5JZC62NewG+mK49jOCtQew==", + "dev": true, + "requires": { + "chardet": "^0.7.0", + "iconv-lite": "^0.4.24", + "tmp": "^0.0.33" + } + }, "extglob": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/extglob/-/extglob-2.0.4.tgz", @@ -2486,6 +2630,24 @@ "bser": "^2.0.0" } }, + "figures": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/figures/-/figures-2.0.0.tgz", + "integrity": "sha1-OrGi0qYsi/tDGgyUy3l6L84nyWI=", + "dev": true, + "requires": { + "escape-string-regexp": "^1.0.5" + } + }, + "file-entry-cache": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-5.0.1.tgz", + "integrity": "sha512-bCg29ictuBaKUwwArK4ouCaqDgLZcysCFLmM/Yn/FDoqndh/9vNuQfXRDvTuXKLxfD/JtZQGKFT8MGcJBK644g==", + "dev": true, + "requires": { + "flat-cache": "^2.0.1" + } + }, "fill-range": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-4.0.0.tgz", @@ -2518,6 +2680,23 @@ "locate-path": "^3.0.0" } }, + "flat-cache": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-2.0.1.tgz", + "integrity": "sha512-LoQe6yDuUMDzQAEH8sgmh4Md6oZnc/7PjtwjNFSzveXqSHt6ka9fPBuso7IGf9Rz4uqnSnWiFH2B/zj24a5ReA==", + "dev": true, + "requires": { + "flatted": "^2.0.0", + "rimraf": "2.6.3", + "write": "1.0.3" + } + }, + "flatted": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/flatted/-/flatted-2.0.1.tgz", + "integrity": "sha512-a1hQMktqW9Nmqr5aktAux3JMNqaucxGcjtjWnZLHX7yyPCmlSV3M54nGYbqT8K+0GhF3NBgmJCc3ma+WOgX8Jg==", + "dev": true + }, "for-in": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/for-in/-/for-in-1.0.2.tgz", @@ -2548,17 +2727,6 @@ "map-cache": "^0.2.2" } }, - "fs-extra": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-7.0.1.tgz", - "integrity": "sha512-YJDaCJZEnBmcbw13fvdAM9AwNOJwOzrE4pqMqBq5nFiEqXUqHwlK4B+3pUw6JNvfSPtX05xFHtYy/1ni01eGCw==", - "dev": true, - "requires": { - "graceful-fs": "^4.1.2", - "jsonfile": "^4.0.0", - "universalify": "^0.1.0" - } - }, "fs.realpath": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", @@ -2585,7 +2753,8 @@ "ansi-regex": { "version": "2.1.1", "bundled": true, - "dev": true + "dev": true, + "optional": true }, "aproba": { "version": "1.2.0", @@ -2606,12 +2775,14 @@ "balanced-match": { "version": "1.0.0", "bundled": true, - "dev": true + "dev": true, + "optional": true }, "brace-expansion": { "version": "1.1.11", "bundled": true, "dev": true, + "optional": true, "requires": { "balanced-match": "^1.0.0", "concat-map": "0.0.1" @@ -2626,17 +2797,20 @@ "code-point-at": { "version": "1.1.0", "bundled": true, - "dev": true + "dev": true, + "optional": true }, "concat-map": { "version": "0.0.1", "bundled": true, - "dev": true + "dev": true, + "optional": true }, "console-control-strings": { "version": "1.1.0", "bundled": true, - "dev": true + "dev": true, + "optional": true }, "core-util-is": { "version": "1.0.2", @@ -2753,7 +2927,8 @@ "inherits": { "version": "2.0.3", "bundled": true, - "dev": true + "dev": true, + "optional": true }, "ini": { "version": "1.3.5", @@ -2765,6 +2940,7 @@ "version": "1.0.0", "bundled": true, "dev": true, + "optional": true, "requires": { "number-is-nan": "^1.0.0" } @@ -2779,6 +2955,7 @@ "version": "3.0.4", "bundled": true, "dev": true, + "optional": true, "requires": { "brace-expansion": "^1.1.7" } @@ -2786,12 +2963,14 @@ "minimist": { "version": "0.0.8", "bundled": true, - "dev": true + "dev": true, + "optional": true }, "minipass": { "version": "2.3.5", "bundled": true, "dev": true, + "optional": true, "requires": { "safe-buffer": "^5.1.2", "yallist": "^3.0.0" @@ -2810,6 +2989,7 @@ "version": "0.5.1", "bundled": true, "dev": true, + "optional": true, "requires": { "minimist": "0.0.8" } @@ -2890,7 +3070,8 @@ "number-is-nan": { "version": "1.0.1", "bundled": true, - "dev": true + "dev": true, + "optional": true }, "object-assign": { "version": "4.1.1", @@ -2902,6 +3083,7 @@ "version": "1.4.0", "bundled": true, "dev": true, + "optional": true, "requires": { "wrappy": "1" } @@ -2987,7 +3169,8 @@ "safe-buffer": { "version": "5.1.2", "bundled": true, - "dev": true + "dev": true, + "optional": true }, "safer-buffer": { "version": "2.1.2", @@ -3023,6 +3206,7 @@ "version": "1.0.2", "bundled": true, "dev": true, + "optional": true, "requires": { "code-point-at": "^1.0.0", "is-fullwidth-code-point": "^1.0.0", @@ -3042,6 +3226,7 @@ "version": "3.0.1", "bundled": true, "dev": true, + "optional": true, "requires": { "ansi-regex": "^2.0.0" } @@ -3085,12 +3270,14 @@ "wrappy": { "version": "1.0.2", "bundled": true, - "dev": true + "dev": true, + "optional": true }, "yallist": { "version": "3.0.3", "bundled": true, - "dev": true + "dev": true, + "optional": true } } }, @@ -3100,6 +3287,12 @@ "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==", "dev": true }, + "functional-red-black-tree": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz", + "integrity": "sha1-GwqzvVU7Kg1jmdKcDj6gslIHgyc=", + "dev": true + }, "get-caller-file": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-1.0.3.tgz", @@ -3149,6 +3342,15 @@ "path-is-absolute": "^1.0.0" } }, + "glob-parent": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.0.tgz", + "integrity": "sha512-qjtRgnIVmOfnKUE3NJAQEdk+lKrxfw8t5ke7SXtfMTHcjsBfOfWXCQfdb30zfDoZQ2IRSIiidmjtbHZPZ++Ihw==", + "dev": true, + "requires": { + "is-glob": "^4.0.1" + } + }, "globals": { "version": "11.12.0", "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", @@ -3168,9 +3370,9 @@ "dev": true }, "handlebars": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/handlebars/-/handlebars-4.1.2.tgz", - "integrity": "sha512-nvfrjqvt9xQ8Z/w0ijewdD/vvWDTOweBUm96NTr66Wfvo1mJenBLwcYmPs3TIBP5ruzYGD7Hx/DaM9RmhroGPw==", + "version": "4.4.5", + "resolved": "https://registry.npmjs.org/handlebars/-/handlebars-4.4.5.tgz", + "integrity": "sha512-0Ce31oWVB7YidkaTq33ZxEbN+UDxMMgThvCe8ptgQViymL5DPis9uLdTA13MiRPhgvqyxIegugrP97iK3JeBHg==", "dev": true, "requires": { "neo-async": "^2.6.0", @@ -3254,17 +3456,6 @@ } } }, - "highlight.js": { - "version": "9.15.10", - "resolved": "https://registry.npmjs.org/highlight.js/-/highlight.js-9.15.10.tgz", - "integrity": "sha512-RoV7OkQm0T3os3Dd2VHLNMoaoDVx77Wygln3n9l5YV172XonWG6rgQD3XnF/BuFFZw9A0TJgmMSO8FEWQgvcXw==", - "dev": true - }, - "hoek": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/hoek/-/hoek-4.2.1.tgz", - "integrity": "sha512-QLg82fGkfnJ/4iy1xZ81/9SIJiq1NGFUMGs6ParyjBZr6jW2Ufj/snDqTHixNlHdPNwN2RLVD0Pi3igeK9+JfA==" - }, "hosted-git-info": { "version": "2.7.1", "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.7.1.tgz", @@ -3299,6 +3490,30 @@ "safer-buffer": ">= 2.1.2 < 3" } }, + "ignore": { + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-4.0.6.tgz", + "integrity": "sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg==", + "dev": true + }, + "import-fresh": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.1.0.tgz", + "integrity": "sha512-PpuksHKGt8rXfWEr9m9EHIpgyyaltBy8+eF6GJM0QCAxMgxCfucMF3mjecK2QsJr0amJW7gTqh5/wht0z2UhEQ==", + "dev": true, + "requires": { + "parent-module": "^1.0.0", + "resolve-from": "^4.0.0" + }, + "dependencies": { + "resolve-from": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", + "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", + "dev": true + } + } + }, "import-local": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/import-local/-/import-local-2.0.0.tgz", @@ -3331,11 +3546,34 @@ "integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=", "dev": true }, - "interpret": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/interpret/-/interpret-1.2.0.tgz", - "integrity": "sha512-mT34yGKMNceBQUoVn7iCDKDntA7SC6gycMAWzGx1z/CMCTV7b2AAtXlo3nRyHZ1FelRkQbQjprHSYGwzLtkVbw==", - "dev": true + "inquirer": { + "version": "6.5.2", + "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-6.5.2.tgz", + "integrity": "sha512-cntlB5ghuB0iuO65Ovoi8ogLHiWGs/5yNrtUcKjFhSSiVeAIVpD7koaSU9RM8mpXw5YDi9RdYXGQMaOURB7ycQ==", + "dev": true, + "requires": { + "ansi-escapes": "^3.2.0", + "chalk": "^2.4.2", + "cli-cursor": "^2.1.0", + "cli-width": "^2.0.0", + "external-editor": "^3.0.3", + "figures": "^2.0.0", + "lodash": "^4.17.12", + "mute-stream": "0.0.7", + "run-async": "^2.2.0", + "rxjs": "^6.4.0", + "string-width": "^2.1.0", + "strip-ansi": "^5.1.0", + "through": "^2.3.6" + }, + "dependencies": { + "lodash": { + "version": "4.17.15", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.15.tgz", + "integrity": "sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==", + "dev": true + } + } }, "invariant": { "version": "2.2.4", @@ -3352,6 +3590,12 @@ "integrity": "sha512-wPVv/y/QQ/Uiirj/vh3oP+1Ww+AWehmi1g5fFWGPF6IpCBCDVrhgHRMvrLfdYcwDh3QJbGXDW4JAuzxElLSqKA==", "dev": true }, + "irregular-plurals": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/irregular-plurals/-/irregular-plurals-2.0.0.tgz", + "integrity": "sha512-Y75zBYLkh0lJ9qxeHlMjQ7bSbyiSqNW/UOPWDmzC7cXskL1hekSITh1Oc6JV0XCWWZ9DE8VYSB71xocLk3gmGw==", + "dev": true + }, "is-accessor-descriptor": { "version": "0.1.6", "resolved": "https://registry.npmjs.org/is-accessor-descriptor/-/is-accessor-descriptor-0.1.6.tgz", @@ -3450,6 +3694,12 @@ "integrity": "sha1-YrEQ4omkcUGOPsNqYX1HLjAd/Ik=", "dev": true }, + "is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha1-qIwCU1eR8C7TfHahueqXc8gz+MI=", + "dev": true + }, "is-fullwidth-code-point": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", @@ -3462,6 +3712,15 @@ "integrity": "sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ==", "dev": true }, + "is-glob": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.1.tgz", + "integrity": "sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg==", + "dev": true, + "requires": { + "is-extglob": "^2.1.1" + } + }, "is-number": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/is-number/-/is-number-3.0.0.tgz", @@ -3491,6 +3750,12 @@ "isobject": "^3.0.1" } }, + "is-promise": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-promise/-/is-promise-2.1.0.tgz", + "integrity": "sha1-eaKp7OfwlugPNtKy87wWwf9L8/o=", + "dev": true + }, "is-regex": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.0.4.tgz", @@ -4070,6 +4335,14 @@ "natural-compare": "^1.4.0", "pretty-format": "^24.8.0", "semver": "^5.5.0" + }, + "dependencies": { + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "dev": true + } } }, "jest-util": { @@ -4174,6 +4447,24 @@ "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", "dev": true }, + "js-yaml": { + "version": "3.13.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.13.1.tgz", + "integrity": "sha512-YfbcO7jXDdyj0DGxYVSlSeQNHbD7XPWvrVWeVUujrQEoZzWJIRrCPoyk6kL6IAjAG2IolMK4T0hNUe0HOUs5Jw==", + "dev": true, + "requires": { + "argparse": "^1.0.7", + "esprima": "^4.0.0" + }, + "dependencies": { + "esprima": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", + "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", + "dev": true + } + } + }, "jsbn": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz", @@ -4253,20 +4544,17 @@ "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==" }, + "json-stable-stringify-without-jsonify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", + "integrity": "sha1-nbe1lJatPzz+8wp1FC0tkwrXJlE=", + "dev": true + }, "json-stringify-safe": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", "integrity": "sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus=" }, - "jsonfile": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-4.0.0.tgz", - "integrity": "sha1-h3Gq4HmbZAdrdmQPygWPnBDjPss=", - "dev": true, - "requires": { - "graceful-fs": "^4.1.6" - } - }, "jsprim": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.1.tgz", @@ -4344,9 +4632,10 @@ } }, "lodash": { - "version": "4.17.11", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.11.tgz", - "integrity": "sha512-cQKh8igo5QUhZ7lg38DYWAxMvjSAKG0A8wGSVimP07SIUEK2UO+arSRKbRZWtelMtN5V0Hkwh5ryOto/SshYIg==" + "version": "4.17.15", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.15.tgz", + "integrity": "sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==", + "dev": true }, "lodash.assign": { "version": "4.2.0", @@ -4364,6 +4653,15 @@ "integrity": "sha1-7dFMgk4sycHgsKG0K7UhBRakJDg=", "dev": true }, + "log-symbols": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-2.2.0.tgz", + "integrity": "sha512-VeIAFslyIerEJLXHziedo2basKbMKtTw3vfn5IzG0XTjhAVEJyNHnL2p7vc+wBDSdQuUpNw3M2u6xb9QsAY5Eg==", + "dev": true, + "requires": { + "chalk": "^2.0.1" + } + }, "loose-envify": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz", @@ -4444,12 +4742,6 @@ "object-visit": "^1.0.0" } }, - "marked": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/marked/-/marked-0.4.0.tgz", - "integrity": "sha512-tMsdNBgOsrUophCAFQl0XPe6Zqk/uy9gnue+jIIKhykO51hxyu6uNx7zBPy0+y/WKYVZZMspV9YeXLNdKk+iYw==", - "dev": true - }, "mem": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/mem/-/mem-4.3.0.tgz", @@ -4526,9 +4818,9 @@ "dev": true }, "mixin-deep": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/mixin-deep/-/mixin-deep-1.3.1.tgz", - "integrity": "sha512-8ZItLHeEgaqEvd5lYBXfm4EZSFCX29Jb9K+lAHhDKzReKBQKj3R+7NOF6tjqYi9t4oI8VUfaWITJQm86wnXGNQ==", + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/mixin-deep/-/mixin-deep-1.3.2.tgz", + "integrity": "sha512-WRoDn//mXBiJ1H40rqa3vH0toePwSsGb45iInWlTySa+Uu4k3tYUSxa2v1KqAiLtvlrSzaExqS1gtk96A9zvEA==", "dev": true, "requires": { "for-in": "^1.0.2", @@ -4561,6 +4853,12 @@ "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=", "dev": true }, + "mute-stream": { + "version": "0.0.7", + "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.7.tgz", + "integrity": "sha1-MHXOk7whuPq0PhvE2n6BFe0ee6s=", + "dev": true + }, "nan": { "version": "2.13.2", "resolved": "https://registry.npmjs.org/nan/-/nan-2.13.2.tgz", @@ -4632,20 +4930,33 @@ } }, "lodash": { - "version": "4.17.11", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.11.tgz", - "integrity": "sha512-cQKh8igo5QUhZ7lg38DYWAxMvjSAKG0A8wGSVimP07SIUEK2UO+arSRKbRZWtelMtN5V0Hkwh5ryOto/SshYIg==", + "version": "4.17.15", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.15.tgz", + "integrity": "sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==", + "dev": true + }, + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", "dev": true } } }, "node-cache": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/node-cache/-/node-cache-3.2.1.tgz", - "integrity": "sha1-p5WNMqikLZEZziWYZWfqLF+WZ3M=", + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/node-cache/-/node-cache-4.2.1.tgz", + "integrity": "sha512-BOb67bWg2dTyax5kdef5WfU3X8xu4wPg+zHzkvls0Q/QpYycIFRLEEIdAx9Wma43DxG6Qzn4illdZoYseKWa4A==", "requires": { - "clone": "1.0.x", - "lodash": "4.x" + "clone": "2.x", + "lodash": "^4.17.15" + }, + "dependencies": { + "lodash": { + "version": "4.17.15", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.15.tgz", + "integrity": "sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==" + } } }, "node-forge": { @@ -4677,6 +4988,14 @@ "semver": "^5.5.0", "shellwords": "^0.1.1", "which": "^1.3.0" + }, + "dependencies": { + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "dev": true + } } }, "node-releases": { @@ -4686,6 +5005,14 @@ "dev": true, "requires": { "semver": "^5.3.0" + }, + "dependencies": { + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "dev": true + } } }, "node-sha1": { @@ -4703,6 +5030,14 @@ "resolve": "^1.10.0", "semver": "2 || 3 || 4 || 5", "validate-npm-package-license": "^3.0.1" + }, + "dependencies": { + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "dev": true + } } }, "normalize-path": { @@ -4814,6 +5149,23 @@ "wrappy": "1" } }, + "onetime": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-2.0.1.tgz", + "integrity": "sha1-BnQoIw/WdEOyeUsiu6UotoZ5YtQ=", + "dev": true, + "requires": { + "mimic-fn": "^1.0.0" + }, + "dependencies": { + "mimic-fn": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-1.2.0.tgz", + "integrity": "sha512-jf84uxzwiuiIVKiOLpfYk7N46TSy8ubTonmneY9vrpHNAnp0QBt2BxWV9dO3/j+BoVAb+a5G6YDPW3M5HOdMWQ==", + "dev": true + } + } + }, "optimist": { "version": "0.6.1", "resolved": "https://registry.npmjs.org/optimist/-/optimist-0.6.1.tgz", @@ -4920,6 +5272,15 @@ "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", "dev": true }, + "parent-module": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", + "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", + "dev": true, + "requires": { + "callsites": "^3.0.0" + } + }, "parse-json": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-4.0.0.tgz", @@ -5010,6 +5371,15 @@ "find-up": "^3.0.0" } }, + "plur": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/plur/-/plur-3.1.1.tgz", + "integrity": "sha512-t1Ax8KUvV3FFII8ltczPn2tJdjqbd1sIzu6t4JL7nQ3EyeL/lTrj5PWKb06ic5/6XYDr65rQ4uzQEGN70/6X5w==", + "dev": true, + "requires": { + "irregular-plurals": "^2.0.0" + } + }, "pn": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/pn/-/pn-1.1.0.tgz", @@ -5156,15 +5526,6 @@ "util.promisify": "^1.0.0" } }, - "rechoir": { - "version": "0.6.2", - "resolved": "https://registry.npmjs.org/rechoir/-/rechoir-0.6.2.tgz", - "integrity": "sha1-hSBLVNuoLVdC4oyWdW70OvUOM4Q=", - "dev": true, - "requires": { - "resolve": "^1.1.6" - } - }, "redis": { "version": "2.8.0", "resolved": "https://registry.npmjs.org/redis/-/redis-2.8.0.tgz", @@ -5216,6 +5577,12 @@ "integrity": "sha512-LFrA98Dw/heXqDojz7qKFdygZmFoiVlvE1Zp7Cq2cvF+ZA+03Gmhy0k0PQlsC1jvHPiTUSs+pDHEuSWv6+6D7w==", "dev": true }, + "regexpp": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-2.0.1.tgz", + "integrity": "sha512-lv0M6+TkDVniA3aD1Eg0DVpfU/booSu7Eev3TDO/mZKHBfVjgCGTV4t4buppESEYDtkArYFOxTJWv6S5C+iaNw==", + "dev": true + }, "remove-trailing-separator": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/remove-trailing-separator/-/remove-trailing-separator-1.1.0.tgz", @@ -5355,6 +5722,16 @@ "integrity": "sha1-LGN/53yJOv0qZj/iGqkIAGjiBSo=", "dev": true }, + "restore-cursor": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-2.0.0.tgz", + "integrity": "sha1-n37ih/gv0ybU/RYpI9YhKe7g368=", + "dev": true, + "requires": { + "onetime": "^2.0.0", + "signal-exit": "^3.0.2" + } + }, "ret": { "version": "0.1.15", "resolved": "https://registry.npmjs.org/ret/-/ret-0.1.15.tgz", @@ -5376,6 +5753,24 @@ "integrity": "sha512-6FomvYPfs+Jy9TfXmBpBuMWNH94SgCsZmJKcanySzgNNP6LjWxBvyLTa9KaMfDDM5oxRfrKDB0r/qeRsLwnBfA==", "dev": true }, + "run-async": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/run-async/-/run-async-2.3.0.tgz", + "integrity": "sha1-A3GrSuC91yDUFm19/aZP96RFpsA=", + "dev": true, + "requires": { + "is-promise": "^2.1.0" + } + }, + "rxjs": { + "version": "6.5.3", + "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-6.5.3.tgz", + "integrity": "sha512-wuYsAYYFdWTAnAaPoKGNhfpWwKZbJW+HgAJ+mImp+Epl7BG8oNWBCTyRM8gba9k4lk8BgWdoYm21Mo/RYhhbgA==", + "dev": true, + "requires": { + "tslib": "^1.9.0" + } + }, "safe-buffer": { "version": "5.1.1", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.1.tgz", @@ -5436,9 +5831,9 @@ } }, "semver": { - "version": "5.5.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.5.0.tgz", - "integrity": "sha512-4SJ3dm0WAwWy/NVeioZh5AntkdJoWKxHxcmyP622fOkgHa4z3R0TdBJICINyaSDE6uNwVc8gZr+ZinwZAH4xIA==" + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" }, "set-blocking": { "version": "2.0.0", @@ -5447,9 +5842,9 @@ "dev": true }, "set-value": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/set-value/-/set-value-2.0.0.tgz", - "integrity": "sha512-hw0yxk9GT/Hr5yJEYnHNKYXkIA8mVJgd9ditYZCe16ZczcaELYYcfvaXesNACk2O8O0nTiPQcQhGUQj8JLzeeg==", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/set-value/-/set-value-2.0.1.tgz", + "integrity": "sha512-JxHc1weCN68wRY0fhCoXpyK55m/XPHafOmK4UWD7m2CI14GMcFypt4w/0+NV5f/ZMby2F6S2wwA7fgynh9gWSw==", "dev": true, "requires": { "extend-shallow": "^2.0.1", @@ -5484,17 +5879,6 @@ "integrity": "sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM=", "dev": true }, - "shelljs": { - "version": "0.8.3", - "resolved": "https://registry.npmjs.org/shelljs/-/shelljs-0.8.3.tgz", - "integrity": "sha512-fc0BKlAWiLpwZljmOvAOTE/gXawtCoNrP5oaY7KIaQbbyHeQVg01pSEuEGvGh3HEdBU4baCD7wQBwADmM/7f7A==", - "dev": true, - "requires": { - "glob": "^7.0.0", - "interpret": "^1.0.0", - "rechoir": "^0.6.2" - } - }, "shellwords": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/shellwords/-/shellwords-0.1.1.tgz", @@ -5513,6 +5897,23 @@ "integrity": "sha512-N+z4pHB4AmUv0SjveWRd6q1Nj5w62m5jodv+GD8lvmbY/83T/rpbJGZOnK5T149OldDj4Db07BSv9xY4K6NTPQ==", "dev": true }, + "slash": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-2.0.0.tgz", + "integrity": "sha512-ZYKh3Wh2z1PpEXWr0MpSBZ0V6mZHAQfYevttO11c51CaWjGTaadiKZ+wVt1PbMlDV5qhMFslpZCemhwOK7C89A==", + "dev": true + }, + "slice-ansi": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-2.1.0.tgz", + "integrity": "sha512-Qu+VC3EwYLldKa1fCxuuvULvSJOKEgk9pi8dZeCVK7TqBfUNTH4sFkk4joj8afVSfAYgJoSOetjx9QWOJ5mYoQ==", + "dev": true, + "requires": { + "ansi-styles": "^3.2.0", + "astral-regex": "^1.0.0", + "is-fullwidth-code-point": "^2.0.0" + } + }, "snapdragon": { "version": "0.8.2", "resolved": "https://registry.npmjs.org/snapdragon/-/snapdragon-0.8.2.tgz", @@ -5713,6 +6114,12 @@ "extend-shallow": "^3.0.0" } }, + "sprintf-js": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", + "integrity": "sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=", + "dev": true + }, "sshpk": { "version": "1.16.1", "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.16.1.tgz", @@ -5851,6 +6258,12 @@ "integrity": "sha1-u0P/VZim6wXYm1n80SnJgzE2Br8=", "dev": true }, + "strip-json-comments": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.0.1.tgz", + "integrity": "sha512-VTyMAUfdm047mwKl+u79WIdrZxtFtn+nBxHeb844XBQ9uMNTuTHdx2hc5RiAJYqwTj3wc/xe5HLSdJSkJ+WfZw==", + "dev": true + }, "supports-color": { "version": "5.5.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", @@ -5860,12 +6273,73 @@ "has-flag": "^3.0.0" } }, + "supports-hyperlinks": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/supports-hyperlinks/-/supports-hyperlinks-1.0.1.tgz", + "integrity": "sha512-HHi5kVSefKaJkGYXbDuKbUGRVxqnWGn3J2e39CYcNJEfWciGq2zYtOhXLTlvrOZW1QU7VX67w7fMmWafHX9Pfw==", + "dev": true, + "requires": { + "has-flag": "^2.0.0", + "supports-color": "^5.0.0" + }, + "dependencies": { + "has-flag": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-2.0.0.tgz", + "integrity": "sha1-6CB68cx7MNRGzHC3NLXovhj4jVE=", + "dev": true + } + } + }, "symbol-tree": { "version": "3.2.2", "resolved": "https://registry.npmjs.org/symbol-tree/-/symbol-tree-3.2.2.tgz", "integrity": "sha1-rifbOPZgp64uHDt9G8KQgZuFGeY=", "dev": true }, + "table": { + "version": "5.4.6", + "resolved": "https://registry.npmjs.org/table/-/table-5.4.6.tgz", + "integrity": "sha512-wmEc8m4fjnob4gt5riFRtTu/6+4rSe12TpAELNSqHMfF3IqnA+CH37USM6/YR3qRZv7e56kAEAtd6nKZaxe0Ug==", + "dev": true, + "requires": { + "ajv": "^6.10.2", + "lodash": "^4.17.14", + "slice-ansi": "^2.1.0", + "string-width": "^3.0.0" + }, + "dependencies": { + "ajv": { + "version": "6.10.2", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.10.2.tgz", + "integrity": "sha512-TXtUUEYHuaTEbLZWIKUr5pmBuhDLy+8KYtPYdcV8qC+pOZL+NKqYwvWSRrVXHn+ZmRRAu8vJTAznH7Oag6RVRw==", + "dev": true, + "requires": { + "fast-deep-equal": "^2.0.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + } + }, + "lodash": { + "version": "4.17.15", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.15.tgz", + "integrity": "sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==", + "dev": true + }, + "string-width": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", + "integrity": "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==", + "dev": true, + "requires": { + "emoji-regex": "^7.0.1", + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^5.1.0" + } + } + } + }, "test-exclude": { "version": "5.2.3", "resolved": "https://registry.npmjs.org/test-exclude/-/test-exclude-5.2.3.tgz", @@ -5878,12 +6352,24 @@ "require-main-filename": "^2.0.0" } }, + "text-table": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", + "integrity": "sha1-f17oI66AUgfACvLfSoTsP8+lcLQ=", + "dev": true + }, "throat": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/throat/-/throat-4.1.0.tgz", "integrity": "sha1-iQN8vJLFarGJJua6TLsgDhVnKmo=", "dev": true }, + "through": { + "version": "2.3.8", + "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz", + "integrity": "sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU=", + "dev": true + }, "tmp": { "version": "0.0.33", "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.0.33.tgz", @@ -5979,6 +6465,12 @@ "integrity": "sha1-yy4SAwZ+DI3h9hQJS5/kVwTqYAM=", "dev": true }, + "tslib": { + "version": "1.10.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.10.0.tgz", + "integrity": "sha512-qOebF53frne81cf0S9B41ByenJ3/IuH8yJKngAX35CmiZySA0khhkovshKK+jGCaMnVomla7gVlIcc3EvKPbTQ==", + "dev": true + }, "tunnel": { "version": "0.0.6", "resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz", @@ -6012,45 +6504,6 @@ "integrity": "sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==", "dev": true }, - "typedoc": { - "version": "0.14.2", - "resolved": "https://registry.npmjs.org/typedoc/-/typedoc-0.14.2.tgz", - "integrity": "sha512-aEbgJXV8/KqaVhcedT7xG6d2r+mOvB5ep3eIz1KuB5sc4fDYXcepEEMdU7XSqLFO5hVPu0nllHi1QxX2h/QlpQ==", - "dev": true, - "requires": { - "@types/fs-extra": "^5.0.3", - "@types/handlebars": "^4.0.38", - "@types/highlight.js": "^9.12.3", - "@types/lodash": "^4.14.110", - "@types/marked": "^0.4.0", - "@types/minimatch": "3.0.3", - "@types/shelljs": "^0.8.0", - "fs-extra": "^7.0.0", - "handlebars": "^4.0.6", - "highlight.js": "^9.13.1", - "lodash": "^4.17.10", - "marked": "^0.4.0", - "minimatch": "^3.0.0", - "progress": "^2.0.0", - "shelljs": "^0.8.2", - "typedoc-default-themes": "^0.5.0", - "typescript": "3.2.x" - }, - "dependencies": { - "typescript": { - "version": "3.2.4", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-3.2.4.tgz", - "integrity": "sha512-0RNDbSdEokBeEAkgNbxJ+BLwSManFy9TeXz8uW+48j/xhEXv1ePME60olyzw2XzUqUBNAYFeJadIqAgNqIACwg==", - "dev": true - } - } - }, - "typedoc-default-themes": { - "version": "0.5.0", - "resolved": "https://registry.npmjs.org/typedoc-default-themes/-/typedoc-default-themes-0.5.0.tgz", - "integrity": "sha1-bcJDPnjti+qOiHo6zeLzF4W9Yic=", - "dev": true - }, "typescript": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/typescript/-/typescript-3.0.1.tgz", @@ -6058,13 +6511,13 @@ "dev": true }, "uglify-js": { - "version": "3.4.9", - "resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-3.4.9.tgz", - "integrity": "sha512-8CJsbKOtEbnJsTyv6LE6m6ZKniqMiFWmm9sRbopbkGs3gMPPfd3Fh8iIA4Ykv5MgaTbqHr4BaoGLJLZNhsrW1Q==", + "version": "3.6.3", + "resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-3.6.3.tgz", + "integrity": "sha512-KfQUgOqTkLp2aZxrMbCuKCDGW9slFYu2A23A36Gs7sGzTLcRBDORdOi5E21KWHFIfkY8kzgi/Pr1cXCh0yIp5g==", "dev": true, "optional": true, "requires": { - "commander": "~2.17.1", + "commander": "~2.20.3", "source-map": "~0.6.1" }, "dependencies": { @@ -6106,46 +6559,17 @@ "dev": true }, "union-value": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/union-value/-/union-value-1.0.0.tgz", - "integrity": "sha1-XHHDTLW61dzr4+oM0IIHulqhrqQ=", + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/union-value/-/union-value-1.0.1.tgz", + "integrity": "sha512-tJfXmxMeWYnczCVs7XAEvIV7ieppALdyepWMkHkwciRpZraG/xwT+s2JN8+pr1+8jCRf80FFzvr+MpQeeoF4Xg==", "dev": true, "requires": { "arr-union": "^3.1.0", "get-value": "^2.0.6", "is-extendable": "^0.1.1", - "set-value": "^0.4.3" - }, - "dependencies": { - "extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", - "dev": true, - "requires": { - "is-extendable": "^0.1.0" - } - }, - "set-value": { - "version": "0.4.3", - "resolved": "https://registry.npmjs.org/set-value/-/set-value-0.4.3.tgz", - "integrity": "sha1-fbCPnT0i3H945Trzw79GZuzfzPE=", - "dev": true, - "requires": { - "extend-shallow": "^2.0.1", - "is-extendable": "^0.1.1", - "is-plain-object": "^2.0.1", - "to-object-path": "^0.3.0" - } - } + "set-value": "^2.0.1" } }, - "universalify": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/universalify/-/universalify-0.1.2.tgz", - "integrity": "sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==", - "dev": true - }, "unset-value": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/unset-value/-/unset-value-1.0.0.tgz", @@ -6234,6 +6658,12 @@ "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz", "integrity": "sha512-yXJmeNaw3DnnKAOKJE51sL/ZaYfWJRl1pK9dr19YFCu0ObS231AB1/LbqTKRAQ5kw8A90rA6fr4riOUpTZvQZA==" }, + "v8-compile-cache": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/v8-compile-cache/-/v8-compile-cache-2.1.0.tgz", + "integrity": "sha512-usZBT3PW+LOjM25wbqIlZwPeJV+3OSz3M1k1Ws8snlW39dZyYL9lOGC5FgPVHfk0jKmjiDV8Z0mIbVQPiwFs7g==", + "dev": true + }, "validate-npm-package-license": { "version": "3.0.4", "resolved": "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz", @@ -6398,6 +6828,15 @@ "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=", "dev": true }, + "write": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/write/-/write-1.0.3.tgz", + "integrity": "sha512-/lg70HAjtkUgWPVZhZcm+T4hkL8Zbtp1nFNOn3lRrxnlv50SRBv7cR7RqR+GMsd3hUXy9hWBo4CHTbFTcOYwig==", + "dev": true, + "requires": { + "mkdirp": "^0.5.1" + } + }, "write-file-atomic": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-2.4.1.tgz", diff --git a/package.json b/package.json index 67bb624..eed9191 100644 --- a/package.json +++ b/package.json @@ -5,7 +5,8 @@ "main": "index.js", "scripts": { "test": "jest --ci --forceExit", - "check-typescript": "node_modules/typescript/bin/tsc" + "check-typescript": "node_modules/typescript/bin/tsc", + "lint": "eslint --format 'node_modules/eslint-formatter-pretty' --ignore-path .eslintignore ." }, "types": "./index.d.ts", "repository": { @@ -24,15 +25,14 @@ "homepage": "https://github.com/launchdarkly/node-server-sdk", "dependencies": { "@types/redis": "2.8.6", - "async": "2.6.0", - "hoek": "4.2.1", + "async": "3.1.0", "lrucache": "^1.0.3", - "node-cache": "^3.2.1", + "node-cache": "^4.2.0", "node-sha1": "0.0.1", "redis": "^2.6.0-2", "request": "2.88.0", "request-etag": "^2.0.3", - "semver": "5.5.0", + "semver": "^6.3.0", "tunnel": "0.0.6", "winston": "2.4.1", "yaml": "1.0.1" @@ -44,12 +44,13 @@ "@babel/core": "7.4.3", "@babel/preset-env": "7.4.3", "babel-jest": "24.7.1", + "eslint": "^6.5.1", + "eslint-formatter-pretty": "2.1.1", "jest": "24.7.1", "jest-junit": "6.3.0", "nock": "9.2.3", "selfsigned": "1.10.4", "tmp": "0.0.33", - "typedoc": "0.14.2", "typescript": "3.0.1" }, "jest": { diff --git a/polling.js b/polling.js index 009d374..5630ef2 100644 --- a/polling.js +++ b/polling.js @@ -1,14 +1,14 @@ -var errors = require('./errors'); -var messages = require('./messages'); -var dataKind = require('./versioned_data_kind'); +const errors = require('./errors'); +const messages = require('./messages'); +const dataKind = require('./versioned_data_kind'); function PollingProcessor(config, requestor) { - var processor = {}, - featureStore = config.featureStore, - stopped = false; + const processor = {}, + featureStore = config.featureStore; + let stopped = false; function poll(cb) { - var startTime, delta; + let startTime; cb = cb || function(){}; @@ -17,45 +17,46 @@ function PollingProcessor(config, requestor) { } startTime = new Date().getTime(); - config.logger.debug("Polling LaunchDarkly for feature flag updates"); - requestor.requestAllData(function(err, resp) { + config.logger.debug('Polling LaunchDarkly for feature flag updates'); + requestor.requestAllData((err, resp) => { const elapsed = new Date().getTime() - startTime; const sleepFor = Math.max(config.pollInterval * 1000 - elapsed, 0); - config.logger.debug("Elapsed: %d ms, sleeping for %d ms", elapsed, sleepFor); + config.logger.debug('Elapsed: %d ms, sleeping for %d ms', elapsed, sleepFor); if (err) { - const message = err.status || err.message; - cb(new errors.LDPollingError(messages.httpErrorMessage(message, 'polling request', 'will retry'))); - if (!errors.isHttpErrorRecoverable(err.status)) { - config.logger.error('Received 401 error, no further polling requests will be made since SDK key is invalid'); + if (err.status && !errors.isHttpErrorRecoverable(err.status)) { + const message = messages.httpErrorMessage(err.status, 'polling request'); + config.logger.error(message); + cb(new errors.LDPollingError(message)); } else { + config.logger.warn(messages.httpErrorMessage(err.status || err.message, 'polling request', 'will retry')); // Recursively call poll after the appropriate delay - setTimeout(function() { poll(cb); }, sleepFor); + setTimeout(() => { poll(cb); }, sleepFor); } } else { - var allData = JSON.parse(resp); - var initData = {}; + const allData = JSON.parse(resp); + const initData = {}; initData[dataKind.features.namespace] = allData.flags; initData[dataKind.segments.namespace] = allData.segments; - featureStore.init(initData, function() { + featureStore.init(initData, () => { cb(); // Recursively call poll after the appropriate delay - setTimeout(function() { poll(cb); }, sleepFor); + setTimeout(() => { poll(cb); }, sleepFor); }); } }); - }; + } - processor.start = function(cb) { + processor.start = cb => { poll(cb); - } + }; - processor.stop = function() { + processor.stop = () => { stopped = true; - } + }; - processor.close = function() { - this.stop(); - } + processor.close = () => { + processor.stop(); + }; return processor; } diff --git a/redis_feature_store.js b/redis_feature_store.js index da1aecb..07cdea5 100644 --- a/redis_feature_store.js +++ b/redis_feature_store.js @@ -1,10 +1,9 @@ -var redis = require('redis'), - winston = require('winston'), - dataKind = require('./versioned_data_kind'), - CachingStoreWrapper = require('./caching_store_wrapper'); +const redis = require('redis'), + winston = require('winston'), + dataKind = require('./versioned_data_kind'), + CachingStoreWrapper = require('./caching_store_wrapper'); - -var noop = function(){}; +const noop = function(){}; function RedisFeatureStore(redisOpts, cacheTTL, prefix, logger) { @@ -13,10 +12,10 @@ function RedisFeatureStore(redisOpts, cacheTTL, prefix, logger) { function redisFeatureStoreInternal(redisOpts, prefix, logger) { - var client = redis.createClient(redisOpts), - store = {}, - itemsPrefix = (prefix || "launchdarkly") + ":", - initedKey = itemsPrefix + "$inited"; + const client = redis.createClient(redisOpts), + store = {}, + itemsPrefix = (prefix || 'launchdarkly') + ':', + initedKey = itemsPrefix + '$inited'; logger = (logger || new winston.Logger({ @@ -27,25 +26,25 @@ function redisFeatureStoreInternal(redisOpts, prefix, logger) { }) ); - var connected = false; - var initialConnect = true; - client.on('error', function(err) { + let connected = false; + let initialConnect = true; + client.on('error', err => { // Note that we *must* have an error listener or else any connection error will trigger an // uncaught exception. logger.error('Redis error - ' + err); }); - client.on('reconnecting', function(info) { + client.on('reconnecting', info => { logger.info('Attempting to reconnect to Redis (attempt #' + info.attempt + ', delay: ' + info.delay + 'ms)'); }); - client.on('connect', function() { + client.on('connect', () => { if (!initialConnect) { logger.warn('Reconnected to Redis'); } initialConnect = false; connected = true; }); - client.on('end', function() { + client.on('end', () => { connected = false; }); @@ -58,7 +57,6 @@ function redisFeatureStoreInternal(redisOpts, prefix, logger) { // A helper that performs a get with the redis client function doGet(kind, key, cb) { - var item; cb = cb || noop; if (!connected) { @@ -67,20 +65,20 @@ function redisFeatureStoreInternal(redisOpts, prefix, logger) { return; } - client.hget(itemsKey(kind), key, function(err, obj) { + client.hget(itemsKey(kind), key, (err, obj) => { if (err) { - logger.error("Error fetching key " + key + " from Redis in '" + kind.namespace + "'", err); + logger.error('Error fetching key ' + key + ' from Redis in \'' + kind.namespace + '\'', err); cb(null); } else { - item = JSON.parse(obj); + const item = JSON.parse(obj); cb(item); } }); } - store.getInternal = function(kind, key, cb) { + store.getInternal = (kind, key, cb) => { cb = cb || noop; - doGet(kind, key, function(item) { + doGet(kind, key, item => { if (item && !item.deleted) { cb(item); } else { @@ -89,7 +87,7 @@ function redisFeatureStoreInternal(redisOpts, prefix, logger) { }); }; - store.getAllInternal = function(kind, cb) { + store.getAllInternal = (kind, cb) => { cb = cb || noop; if (!connected) { logger.warn('Attempted to fetch all keys while Redis connection is down'); @@ -97,15 +95,15 @@ function redisFeatureStoreInternal(redisOpts, prefix, logger) { return; } - client.hgetall(itemsKey(kind), function(err, obj) { + client.hgetall(itemsKey(kind), (err, obj) => { if (err) { - logger.error("Error fetching '" + kind.namespace + "'' from Redis", err); + logger.error('Error fetching \'' + kind.namespace + '\' from Redis', err); cb(null); } else { - var results = {}, - items = obj; + const results = {}, + items = obj; - for (var key in items) { + for (let key in items) { if (Object.hasOwnProperty.call(items, key)) { results[key] = JSON.parse(items[key]); } @@ -115,17 +113,17 @@ function redisFeatureStoreInternal(redisOpts, prefix, logger) { }); }; - store.initInternal = function(allData, cb) { - var multi = client.multi(); + store.initInternal = (allData, cb) => { + const multi = client.multi(); - for (var kindNamespace in allData) { + for (let kindNamespace in allData) { if (Object.hasOwnProperty.call(allData, kindNamespace)) { - var kind = dataKind[kindNamespace]; - var baseKey = itemsKey(kind); - var items = allData[kindNamespace]; - var stringified = {}; + const kind = dataKind[kindNamespace]; + const baseKey = itemsKey(kind); + const items = allData[kindNamespace]; + const stringified = {}; multi.del(baseKey); - for (var key in items) { + for (let key in items) { if (Object.hasOwnProperty.call(items, key)) { stringified[key] = JSON.stringify(items[key]); } @@ -137,41 +135,41 @@ function redisFeatureStoreInternal(redisOpts, prefix, logger) { } } - multi.set(initedKey, ""); + multi.set(initedKey, ''); - multi.exec(function(err, replies) { + multi.exec(err => { if (err) { - logger.error("Error initializing Redis store", err); + logger.error('Error initializing Redis store', err); } cb(); }); }; - store.upsertInternal = function(kind, item, cb) { - updateItemWithVersioning(kind, item, function(err, attemptedWrite) { + store.upsertInternal = (kind, item, cb) => { + updateItemWithVersioning(kind, item, (err, attemptedWrite) => { if (err) { - logger.error("Error upserting key " + key + " in '" + kind.namespace + "'", err); + logger.error('Error upserting key ' + item.key + ' in \'' + kind.namespace + '\'', err); } cb(err, attemptedWrite); }); - } + }; function updateItemWithVersioning(kind, newItem, cb) { client.watch(itemsKey(kind)); - var multi = client.multi(); + const multi = client.multi(); // testUpdateHook is instrumentation, used only by the unit tests - var prepare = store.testUpdateHook || function(prepareCb) { prepareCb(); }; - prepare(function() { - doGet(kind, newItem.key, function(oldItem) { + const prepare = store.testUpdateHook || function(prepareCb) { prepareCb(); }; + prepare(() => { + doGet(kind, newItem.key, oldItem => { if (oldItem && oldItem.version >= newItem.version) { multi.discard(); cb(null, oldItem); } else { multi.hset(itemsKey(kind), newItem.key, JSON.stringify(newItem)); - multi.exec(function(err, replies) { + multi.exec((err, replies) => { if (!err && replies === null) { // This means the EXEC failed because someone modified the watched key - logger.debug("Concurrent modification detected, retrying"); + logger.debug('Concurrent modification detected, retrying'); updateItemWithVersioning(kind, newItem, cb); } else { cb(err, newItem); @@ -182,14 +180,14 @@ function redisFeatureStoreInternal(redisOpts, prefix, logger) { }); } - store.initializedInternal = function(cb) { + store.initializedInternal = cb => { cb = cb || noop; client.exists(initedKey, function(err, obj) { cb(Boolean(!err && obj)); }); }; - store.close = function() { + store.close = () => { client.quit(); }; diff --git a/requestor.js b/requestor.js index f2ae94b..ec569c3 100644 --- a/requestor.js +++ b/requestor.js @@ -1,4 +1,5 @@ -var ETagRequest = require('request-etag'); +const ETagRequest = require('request-etag'); + /** * Creates a new Requestor object, which handles remote requests to fetch feature flags or segments for LaunchDarkly. * This is never called synchronously when requesting a feature flag for a user (e.g. via the toggle) call. @@ -11,20 +12,20 @@ var ETagRequest = require('request-etag'); * @param {Object} the LaunchDarkly client configuration object **/ function Requestor(sdkKey, config) { - var requestor = {}; + const requestor = {}; - var cacheConfig = { + const cacheConfig = { max: 100, // LRUCache passes each cached item through the "length" function to determine how many units it should // count for toward "max". We want our cache limit to be based on the number of responses, not their // size; that is in fact the default behavior of LRUCache, but request-etag overrides it unless we do this: length: function() { return 1; } }; - var requestWithETagCaching = new ETagRequest(cacheConfig); + const requestWithETagCaching = new ETagRequest(cacheConfig); function makeRequest(resource) { - var requestParams = Object.assign({}, config.tlsParams, { - method: "GET", + const requestParams = Object.assign({}, config.tlsParams, { + method: 'GET', url: config.baseUri + resource, headers: { 'Authorization': sdkKey, @@ -34,8 +35,8 @@ function Requestor(sdkKey, config) { agent: config.proxyAgent }); - return function(cb, errCb) { - requestWithETagCaching(requestParams, function(err, resp, body) { + return (cb, errCb) => { + requestWithETagCaching(requestParams, (err, resp, body) => { // Note that when request-etag gives us a cached response, the body will only be in the "body" // callback parameter -- not in resp.getBody(). For a fresh response, it'll be in both. if (err) { @@ -48,9 +49,9 @@ function Requestor(sdkKey, config) { } function processResponse(cb) { - return function(response, body) { + return (response, body) => { if (response.statusCode !== 200 && response.statusCode != 304) { - var err = new Error('Unexpected status code: ' + response.statusCode); + const err = new Error('Unexpected status code: ' + response.statusCode); err.status = response.statusCode; cb(err, null); } else { @@ -60,26 +61,26 @@ function Requestor(sdkKey, config) { } function processErrorResponse(cb) { - return function(err) { + return err => { cb(err, null); - } + }; } - requestor.requestObject = function(kind, key, cb) { - var req = makeRequest(kind.requestPath + key); + requestor.requestObject = (kind, key, cb) => { + const req = makeRequest(kind.requestPath + key); req( processResponse(cb), processErrorResponse(cb) ); - } + }; - requestor.requestAllData = function(cb) { - var req = makeRequest('/sdk/latest-all'); + requestor.requestAllData = cb => { + const req = makeRequest('/sdk/latest-all'); req( processResponse(cb), processErrorResponse(cb) ); - } + }; return requestor; } diff --git a/scripts/better-audit.sh b/scripts/better-audit.sh new file mode 100755 index 0000000..b835456 --- /dev/null +++ b/scripts/better-audit.sh @@ -0,0 +1,76 @@ +#!/bin/bash + +# This script processes the output of "npm audit" to make it more useful, as follows: +# - For each flagged vulnerability, it looks at the "path" field and extracts both the flagged +# package (the last element in the path) and the topmost dependency that led to it (the first +# element in the path). +# - It sorts these and eliminates duplicates. +# - It then compares each of the topmost dependencies to package.json to see if it is from +# "dependencies", "peerDependencies", or "devDependencies". If it is either of the first two +# then this is a real runtime vulnerability, and must be fixed by updating the topmost +# dependency. If it is from devDependencies, then it can be safely fixed with "npm audit fix". + +set -e + +function readPackages() { + inCategory=$1 + jq -r ".${inCategory} | keys | .[]" package.json 2>/dev/null || true +} + +function isInList() { + item=$1 + shift + for x in $@; do + if [ "$item" == "$x" ]; then + true + return + fi + done + false +} + +dependencies=$(readPackages dependencies) +devDependencies=$(readPackages devDependencies) +peerDependencies=$(readPackages peerDependencies) + +function processItems() { + flaggedRuntime=0 + flaggedDev=0 + while read -r badPackage topLevelDep; do + echo -n "flagged package \"$badPackage\", referenced via \"$topLevelDep\" " + for category in dependencies peerDependencies devDependencies; do + if isInList $topLevelDep ${!category}; then + if [ "$category" == "devDependencies" ]; then + echo "-- from \"$category\"" + flaggedDev=1 + else + echo "-- from \"$category\" (RUNTIME) ***" + flaggedRuntime=1 + fi + break + fi + done + done + echo + if [ "$flaggedRuntime" == "1" ]; then + echo "*** At least one runtime dependency was flagged. These must be fixed by updating package.json." + echo "Do not use 'npm audit fix'." + exit 1 # return an error, causing the build to fail + elif [ "$flaggedDev" == "1" ]; then + echo "Only development dependencies were flagged. You may safely run 'npm audit fix', which will" + echo "fix these by adding overrides to package-lock.json." + else + echo "Congratulations! No dependencies were flagged by 'npm audit'." + fi +} + +echo "Running npm audit..." +echo + +npm audit --json \ + | grep '"path":' \ + | sort | uniq \ + | sed -n -e 's#.*"path": "\([^"]*\)".*#\1#p' \ + | awk -F '>' '{ print $NF,$1 }' \ + | sort | uniq \ + | processItems diff --git a/streaming.js b/streaming.js index 7301ff0..b184bf2 100644 --- a/streaming.js +++ b/streaming.js @@ -1,12 +1,12 @@ -var errors = require('./errors'); - -var EventSource = require('./eventsource'); -var dataKind = require('./versioned_data_kind'); +const errors = require('./errors'); +const messages = require('./messages'); +const EventSource = require('./eventsource'); +const dataKind = require('./versioned_data_kind'); function StreamProcessor(sdkKey, config, requestor, eventSourceFactory) { - var processor = {}, - featureStore = config.featureStore, - es; + const processor = {}, + featureStore = config.featureStore; + let es; eventSourceFactory = eventSourceFactory || EventSource; @@ -14,17 +14,25 @@ function StreamProcessor(sdkKey, config, requestor, eventSourceFactory) { return path.startsWith(kind.streamApiPath) ? path.substring(kind.streamApiPath.length) : null; } - processor.start = function(fn) { - var cb = fn || function(){}; - es = new eventSourceFactory(config.streamUri + "/all", + processor.start = fn => { + const cb = fn || function(){}; + es = new eventSourceFactory(config.streamUri + '/all', { agent: config.proxyAgent, headers: {'Authorization': sdkKey,'User-Agent': config.userAgent}, tlsParams: config.tlsParams }); - es.onerror = function(err) { - cb(new errors.LDStreamingError(err.message, err.code)); + es.onerror = err => { + if (err.status && !errors.isHttpErrorRecoverable(err.status)) { + const message = messages.httpErrorMessage(err.status, 'streaming request'); + config.logger.error(message); + es && es.close(); + cb(new errors.LDStreamingError(err.message, err.status)); + } else { + const message = messages.httpErrorMessage(err.status, 'streaming request', 'will retry'); + config.logger.warn(message); + } }; function reportJsonError(type, data) { @@ -33,20 +41,20 @@ function StreamProcessor(sdkKey, config, requestor, eventSourceFactory) { cb(new errors.LDStreamingError('Malformed JSON data in event stream')); } - es.addEventListener('put', function(e) { + es.addEventListener('put', e => { config.logger.debug('Received put event'); if (e && e.data) { - var all; + let all; try { all = JSON.parse(e.data); } catch (err) { reportJsonError('put', e.data); return; } - var initData = {}; + const initData = {}; initData[dataKind.features.namespace] = all.data.flags; initData[dataKind.segments.namespace] = all.data.segments; - featureStore.init(initData, function() { + featureStore.init(initData, () => { cb(); }); } else { @@ -54,19 +62,19 @@ function StreamProcessor(sdkKey, config, requestor, eventSourceFactory) { } }); - es.addEventListener('patch', function(e) { + es.addEventListener('patch', e => { config.logger.debug('Received patch event'); if (e && e.data) { - var patch; + let patch; try { patch = JSON.parse(e.data); } catch (err) { reportJsonError('patch', e.data); return; } - for (var k in dataKind) { - var kind = dataKind[k]; - var key = getKeyFromPath(kind, patch.path); + for (let k in dataKind) { + const kind = dataKind[k]; + const key = getKeyFromPath(kind, patch.path); if (key != null) { config.logger.debug('Updating ' + key + ' in ' + kind.namespace); featureStore.upsert(kind, patch.data); @@ -78,10 +86,10 @@ function StreamProcessor(sdkKey, config, requestor, eventSourceFactory) { } }); - es.addEventListener('delete', function(e) { + es.addEventListener('delete', e => { config.logger.debug('Received delete event'); if (e && e.data) { - var data, version; + let data, version; try { data = JSON.parse(e.data); } catch (err) { @@ -89,9 +97,9 @@ function StreamProcessor(sdkKey, config, requestor, eventSourceFactory) { return; } version = data.version; - for (var k in dataKind) { - var kind = dataKind[k]; - var key = getKeyFromPath(kind, data.path); + for (let k in dataKind) { + const kind = dataKind[k]; + const key = getKeyFromPath(kind, data.path); if (key != null) { config.logger.debug('Deleting ' + key + ' in ' + kind.namespace); featureStore.delete(kind, key, version); @@ -103,32 +111,32 @@ function StreamProcessor(sdkKey, config, requestor, eventSourceFactory) { } }); - es.addEventListener('indirect/put', function(e) { - config.logger.debug('Received indirect put event') - requestor.requestAllData(function (err, resp) { + es.addEventListener('indirect/put', () => { + config.logger.debug('Received indirect put event'); + requestor.requestAllData((err, resp) => { if (err) { cb(err); } else { - var all = JSON.parse(resp); - var initData = {}; + const all = JSON.parse(resp); + const initData = {}; initData[dataKind.features.namespace] = all.flags; initData[dataKind.segments.namespace] = all.segments; - featureStore.init(initData, function() { + featureStore.init(initData, () => { cb(); }); } - }) + }); }); - es.addEventListener('indirect/patch', function(e) { - config.logger.debug('Received indirect patch event') + es.addEventListener('indirect/patch', e => { + config.logger.debug('Received indirect patch event'); if (e && e.data) { - var path = e.data; - for (var k in dataKind) { - var kind = dataKind[k]; - var key = getKeyFromPath(kind, path); + const path = e.data; + for (let k in dataKind) { + const kind = dataKind[k]; + const key = getKeyFromPath(kind, path); if (key != null) { - requestor.requestObject(kind, key, function(err, resp) { + requestor.requestObject(kind, key, (err, resp) => { if (err) { cb(new errors.LDStreamingError('Unexpected error requesting ' + key + ' in ' + kind.namespace)); } else { @@ -143,18 +151,17 @@ function StreamProcessor(sdkKey, config, requestor, eventSourceFactory) { cb(new errors.LDStreamingError('Unexpected payload from event stream')); } }); - } + }; - processor.stop = function() { + processor.stop = () => { if (es) { es.close(); } - } - - processor.close = function() { - this.stop(); - } + }; + processor.close = () => { + processor.stop(); + }; return processor; } diff --git a/test.js b/test.js new file mode 100644 index 0000000..d5764f7 --- /dev/null +++ b/test.js @@ -0,0 +1,32 @@ +var ld = require('./index.js'); +var winston = require('winston'); + +var logger = new winston.Logger({ +level: 'debug', +transports: [ + new (winston.transports.Console)(({ + formatter: function(options) { + return '[LaunchDarkly] ' + (options.message ? options.message : ''); + } + })), +] +}); + +var fileSource = ld.FileDataSource({ paths: [ 'test.yml' ], autoUpdate: true, logger: logger }); + +var config = { + baseUri: 'https://ld-stg.launchdarkly.com', + streamUri: 'https://stream-stg.launchdarkly.com', + eventsUri: 'https://events-stg.launchdarkly.com', + sendEvents: false, + //updateProcessor: fileSource, + logger: logger +}; + +var client = ld.init('sdk-0acc1044-9cf7-40ea-919f-7b5a8540d9d8', config); + +client.on('ready', function() { + client.variation('catflag', {key: 'user'}, 'bear', function(err, value) { + console.log('catflag: ' + value); + }); +}); diff --git a/test/LDClient-tls-test.js b/test/LDClient-tls-test.js index ec03248..5b24dc6 100644 --- a/test/LDClient-tls-test.js +++ b/test/LDClient-tls-test.js @@ -1,6 +1,7 @@ import * as selfsigned from 'selfsigned'; import * as LDClient from '../index'; +import { sleepAsync } from './async_utils'; import * as httpServer from './http_server'; import * as stubs from './stubs'; @@ -57,7 +58,9 @@ describe('LDClient TLS configuration', () => { logger: stubs.stubLogger(), }; const client = LDClient.init(sdkKey, config); - await expect(client.waitForInitialization()).rejects.toThrow(/self signed/); + await sleepAsync(300); // the client won't signal an unrecoverable error, but it should log a message + expect(config.logger.warn.mock.calls.length).toEqual(2); + expect(config.logger.warn.mock.calls[1][0]).toMatch(/self signed/); }); it('can use custom TLS options for streaming as well as polling', async () => { diff --git a/test/async_utils.js b/test/async_utils.js index c3795f0..f748172 100644 --- a/test/async_utils.js +++ b/test/async_utils.js @@ -17,8 +17,37 @@ function sleepAsync(millis) { }); } +function AsyncQueue() { + const items = []; + const awaiters = []; + + return { + add: item => { + if (awaiters.length) { + awaiters.shift()(item); + } else { + items.push(item); + } + }, + + take: () => { + if (items.length) { + return Promise.resolve(items.shift()); + } + return new Promise(resolve => { + awaiters.push(resolve); + }); + }, + + isEmpty: () => { + return items.length === 0; + } + }; +} + module.exports = { asyncify: asyncify, asyncifyNode: asyncifyNode, - sleepAsync: sleepAsync + sleepAsync: sleepAsync, + AsyncQueue: AsyncQueue }; diff --git a/test/configuration-test.js b/test/configuration-test.js index 8d839fb..e9cfec3 100644 --- a/test/configuration-test.js +++ b/test/configuration-test.js @@ -2,7 +2,7 @@ var configuration = require('../configuration'); describe('configuration', function() { function checkDefault(name, value) { - it('applies defaults correctly for "' + name + "'", function() { + it('applies defaults correctly for "' + name + '"', function() { var configWithUnspecifiedValue = {}; expect(configuration.validate(configWithUnspecifiedValue)[name]).toEqual(value); var configWithNullValue = {}; diff --git a/test/evaluate_flag-test.js b/test/evaluate_flag-test.js index df09e09..796bba9 100644 --- a/test/evaluate_flag-test.js +++ b/test/evaluate_flag-test.js @@ -12,16 +12,14 @@ function defineFeatures(features, cb) { for (var i in features) { data[dataKind.features.namespace][features[i].key] = features[i]; } - featureStore.init(data); - setTimeout(cb, 0); + featureStore.init(data, cb) } function defineSegment(segment, cb) { var data = {}; data[dataKind.segments.namespace] = {}; data[dataKind.segments.namespace][segment.key] = segment; - featureStore.init(data); - setTimeout(cb, 0); + featureStore.init(data, cb); } function makeFlagWithRules(rules, fallthrough) { @@ -46,7 +44,7 @@ function makeBooleanFlagWithRules(rules) { prerequisites: [], rules: rules, targets: [], - salt: "", + salt: '', fallthrough: { variation: 0 }, offVariation: 0, variations: [ false, true ], @@ -62,9 +60,9 @@ function makeFlagWithSegmentMatch(segment) { return makeBooleanFlagWithOneClause({ attribute: '', op: 'segmentMatch', values: [ segment.key ]}); } -describe('evaluate', function() { +describe('evaluate', () => { - it('returns off variation if flag is off', function(done) { + it('returns off variation if flag is off', done => { var flag = { key: 'feature', on: false, @@ -73,14 +71,14 @@ describe('evaluate', function() { variations: ['a', 'b', 'c'] }; var user = { key: 'x' }; - evaluate.evaluate(flag, user, featureStore, eventFactory, function(err, detail, events) { + evaluate.evaluate(flag, user, featureStore, eventFactory, (err, detail, events) => { expect(detail).toMatchObject({ value: 'b', variationIndex: 1, reason: { kind: 'OFF' } }); expect(events).toMatchObject([]); done(); }); }); - it('returns null if flag is off and off variation is unspecified', function(done) { + it('returns null if flag is off and off variation is unspecified', done => { var flag = { key: 'feature', on: false, @@ -88,14 +86,14 @@ describe('evaluate', function() { variations: ['a', 'b', 'c'] }; var user = { key: 'x' }; - evaluate.evaluate(flag, user, featureStore, eventFactory, function(err, detail, events) { + evaluate.evaluate(flag, user, featureStore, eventFactory, (err, detail, events) => { expect(detail).toMatchObject({ value: null, variationIndex: null, reason: { kind: 'OFF' } }); expect(events).toMatchObject([]); done(); }); }); - it('returns error if off variation is too high', function(done) { + it('returns error if off variation is too high', done => { var flag = { key: 'feature', on: false, @@ -104,7 +102,7 @@ describe('evaluate', function() { variations: ['a', 'b', 'c'] }; var user = { key: 'x' }; - evaluate.evaluate(flag, user, featureStore, eventFactory, function(err, detail, events) { + evaluate.evaluate(flag, user, featureStore, eventFactory, (err, detail, events) => { expect(err).toEqual(Error('Invalid variation index in flag')); expect(detail).toMatchObject({ value: null, variationIndex: null, reason: { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' } }); expect(events).toMatchObject([]); @@ -112,7 +110,7 @@ describe('evaluate', function() { }); }); - it('returns error if off variation is negative', function(done) { + it('returns error if off variation is negative', done => { var flag = { key: 'feature', on: false, @@ -121,7 +119,7 @@ describe('evaluate', function() { variations: ['a', 'b', 'c'] }; var user = { key: 'x' }; - evaluate.evaluate(flag, user, featureStore, eventFactory, function(err, detail, events) { + evaluate.evaluate(flag, user, featureStore, eventFactory, (err, detail, events) => { expect(err).toEqual(Error('Invalid variation index in flag')); expect(detail).toMatchObject({ value: null, variationIndex: null, reason: { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' } }); expect(events).toMatchObject([]); @@ -129,22 +127,22 @@ describe('evaluate', function() { }); }); - it('returns fallthrough variation if flag is on and no rules match', function(done) { + it('returns fallthrough variation if flag is on and no rules match', done => { var rule = { id: 'id', clauses: [{ attribute: 'key', op: 'in', values: ['other'] }], variation: 2 }; var flag = makeFlagWithRules([rule], { variation: 0 }); var user = { key: 'x' }; - evaluate.evaluate(flag, user, featureStore, eventFactory, function(err, detail, events) { + evaluate.evaluate(flag, user, featureStore, eventFactory, (err, detail, events) => { expect(detail).toMatchObject({ value: 'a', variationIndex: 0, reason: { kind: 'FALLTHROUGH' } }); expect(events).toMatchObject([]); done(); }); }); - it('returns error if fallthrough variation is too high', function(done) { + it('returns error if fallthrough variation is too high', done => { var rule = { id: 'id', clauses: [{ attribute: 'key', op: 'in', values: ['other'] }], variation: 99 }; var flag = makeFlagWithRules([rule], { variation: 99 }); var user = { key: 'x' }; - evaluate.evaluate(flag, user, featureStore, eventFactory, function(err, detail, events) { + evaluate.evaluate(flag, user, featureStore, eventFactory, (err, detail, events) => { expect(err).toEqual(Error('Invalid variation index in flag')); expect(detail).toMatchObject({ value: null, variationIndex: null, reason: { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }}); expect(events).toMatchObject([]); @@ -152,11 +150,11 @@ describe('evaluate', function() { }); }); - it('returns error if fallthrough variation is negative', function(done) { + it('returns error if fallthrough variation is negative', done => { var rule = { id: 'id', clauses: [{ attribute: 'key', op: 'in', values: ['other'] }], variation: 99 }; var flag = makeFlagWithRules([rule], { variation: -1 }); var user = { key: 'x' }; - evaluate.evaluate(flag, user, featureStore, eventFactory, function(err, detail, events) { + evaluate.evaluate(flag, user, featureStore, eventFactory, (err, detail, events) => { expect(err).toEqual(Error('Invalid variation index in flag')); expect(detail).toMatchObject({ value: null, variationIndex: null, reason: { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }}); expect(events).toMatchObject([]); @@ -164,11 +162,11 @@ describe('evaluate', function() { }); }); - it('returns error if fallthrough has no variation or rollout', function(done) { + it('returns error if fallthrough has no variation or rollout', done => { var rule = { id: 'id', clauses: [{ attribute: 'key', op: 'in', values: ['other'] }], variation: 99 }; var flag = makeFlagWithRules([rule], { }); var user = { key: 'x' }; - evaluate.evaluate(flag, user, featureStore, eventFactory, function(err, detail, events) { + evaluate.evaluate(flag, user, featureStore, eventFactory, (err, detail, events) => { expect(err).toEqual(Error('Variation/rollout object with no variation or rollout')); expect(detail).toMatchObject({ value: null, variationIndex: null, reason: { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }}); expect(events).toMatchObject([]); @@ -176,11 +174,11 @@ describe('evaluate', function() { }); }); - it('returns error if fallthrough has rollout with no variations', function(done) { + it('returns error if fallthrough has rollout with no variations', done => { var rule = { id: 'id', clauses: [{ attribute: 'key', op: 'in', values: ['other'] }], variation: 99 }; var flag = makeFlagWithRules([rule], { rollout: { variations: [] } }); var user = { key: 'x' }; - evaluate.evaluate(flag, user, featureStore, eventFactory, function(err, detail, events) { + evaluate.evaluate(flag, user, featureStore, eventFactory, (err, detail, events) => { expect(err).toEqual(Error('Variation/rollout object with no variation or rollout')); expect(detail).toMatchObject({ value: null, variationIndex: null, reason: { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }}); expect(events).toMatchObject([]); @@ -188,7 +186,7 @@ describe('evaluate', function() { }); }); - it('returns off variation if prerequisite is not found', function(done) { + it('returns off variation if prerequisite is not found', done => { var flag = { key: 'feature0', on: true, @@ -198,7 +196,7 @@ describe('evaluate', function() { variations: ['a', 'b', 'c'] }; var user = { key: 'x' }; - evaluate.evaluate(flag, user, featureStore, eventFactory, function(err, detail, events) { + evaluate.evaluate(flag, user, featureStore, eventFactory, (err, detail, events) => { expect(detail).toMatchObject({ value: 'b', variationIndex: 1, reason: { kind: 'PREREQUISITE_FAILED', prerequisiteKey: 'badfeature' } }); expect(events).toMatchObject([]); @@ -206,7 +204,7 @@ describe('evaluate', function() { }); }); - it('returns off variation and event if prerequisite is off', function(done) { + it('returns off variation and event if prerequisite is off', done => { var flag = { key: 'feature0', on: true, @@ -229,12 +227,12 @@ describe('evaluate', function() { variations: ['d', 'e'], version: 2 }; - defineFeatures([flag, flag1], function() { + defineFeatures([flag, flag1], () => { var user = { key: 'x' }; var eventsShouldBe = [ { kind: 'feature', key: 'feature1', variation: 1, value: 'e', version: 2, prereqOf: 'feature0' } ]; - evaluate.evaluate(flag, user, featureStore, eventFactory, function(err, detail, events) { + evaluate.evaluate(flag, user, featureStore, eventFactory, (err, detail, events) => { expect(detail).toMatchObject({ value: 'b', variationIndex: 1, reason: { kind: 'PREREQUISITE_FAILED', prerequisiteKey: 'feature1' } }); expect(events).toMatchObject(eventsShouldBe); @@ -243,7 +241,7 @@ describe('evaluate', function() { }); }); - it('returns off variation and event if prerequisite is not met', function(done) { + it('returns off variation and event if prerequisite is not met', done => { var flag = { key: 'feature0', on: true, @@ -264,12 +262,12 @@ describe('evaluate', function() { variations: ['d', 'e'], version: 2 }; - defineFeatures([flag, flag1], function() { + defineFeatures([flag, flag1], () => { var user = { key: 'x' }; var eventsShouldBe = [ { kind: 'feature', key: 'feature1', variation: 0, value: 'd', version: 2, prereqOf: 'feature0' } ]; - evaluate.evaluate(flag, user, featureStore, eventFactory, function(err, detail, events) { + evaluate.evaluate(flag, user, featureStore, eventFactory, (err, detail, events) => { expect(detail).toMatchObject({ value: 'b', variationIndex: 1, reason: { kind: 'PREREQUISITE_FAILED', prerequisiteKey: 'feature1' } }); expect(events).toMatchObject(eventsShouldBe); @@ -278,7 +276,7 @@ describe('evaluate', function() { }); }); - it('returns fallthrough variation and event if prerequisite is met and there are no rules', function(done) { + it('returns fallthrough variation and event if prerequisite is met and there are no rules', done => { var flag = { key: 'feature0', on: true, @@ -299,12 +297,12 @@ describe('evaluate', function() { variations: ['d', 'e'], version: 2 }; - defineFeatures([flag, flag1], function() { + defineFeatures([flag, flag1], () => { var user = { key: 'x' }; var eventsShouldBe = [ { kind: 'feature', key: 'feature1', variation: 1, value: 'e', version: 2, prereqOf: 'feature0' } ]; - evaluate.evaluate(flag, user, featureStore, eventFactory, function(err, detail, events) { + evaluate.evaluate(flag, user, featureStore, eventFactory, (err, detail, events) => { expect(detail).toMatchObject({ value: 'a', variationIndex: 0, reason: { kind: 'FALLTHROUGH' } }); expect(events).toMatchObject(eventsShouldBe); done(); @@ -312,12 +310,12 @@ describe('evaluate', function() { }); }); - it('matches user from rules', function(done) { + it('matches user from rules', done => { var rule0 = { id: 'id0', clauses: [{ attribute: 'key', op: 'in', values: ['nope'] }], variation: 1 }; var rule1 = { id: 'id1', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], variation: 2 }; var flag = makeFlagWithRules([rule0, rule1]); var user = { key: 'userkey' }; - evaluate.evaluate(flag, user, featureStore, eventFactory, function(err, detail, events) { + evaluate.evaluate(flag, user, featureStore, eventFactory, (err, detail, events) => { expect(detail).toMatchObject({ value: 'c', variationIndex: 2, reason: { kind: 'RULE_MATCH', ruleIndex: 1, ruleId: 'id1' } }); expect(events).toMatchObject([]); @@ -325,11 +323,11 @@ describe('evaluate', function() { }); }); - it('returns error if rule variation is too high', function(done) { + it('returns error if rule variation is too high', done => { var rule = { id: 'id', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], variation: 99 }; var flag = makeFlagWithRules([rule]); var user = { key: 'userkey' }; - evaluate.evaluate(flag, user, featureStore, eventFactory, function(err, detail, events) { + evaluate.evaluate(flag, user, featureStore, eventFactory, (err, detail, events) => { expect(err).toEqual(Error('Invalid variation index in flag')); expect(detail).toMatchObject({ value: null, variationIndex: null, reason: { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }}); expect(events).toMatchObject([]); @@ -337,11 +335,11 @@ describe('evaluate', function() { }); }); - it('returns error if rule variation is negative', function(done) { + it('returns error if rule variation is negative', done => { var rule = { id: 'id', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], variation: -1 }; var flag = makeFlagWithRules([rule]); var user = { key: 'userkey' }; - evaluate.evaluate(flag, user, featureStore, eventFactory, function(err, detail, events) { + evaluate.evaluate(flag, user, featureStore, eventFactory, (err, detail, events) => { expect(err).toEqual(Error('Invalid variation index in flag')); expect(detail).toMatchObject({ value: null, variationIndex: null, reason: { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }}); expect(events).toMatchObject([]); @@ -349,11 +347,11 @@ describe('evaluate', function() { }); }); - it('returns error if rule has no variation or rollout', function(done) { + it('returns error if rule has no variation or rollout', done => { var rule = { id: 'id', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }] }; var flag = makeFlagWithRules([rule]); var user = { key: 'userkey' }; - evaluate.evaluate(flag, user, featureStore, eventFactory, function(err, detail, events) { + evaluate.evaluate(flag, user, featureStore, eventFactory, (err, detail, events) => { expect(err).toEqual(Error('Variation/rollout object with no variation or rollout')); expect(detail).toMatchObject({ value: null, variationIndex: null, reason: { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }}); expect(events).toMatchObject([]); @@ -361,11 +359,11 @@ describe('evaluate', function() { }); }); - it('returns error if rule has rollout with no variations', function(done) { + it('returns error if rule has rollout with no variations', done => { var rule = { id: 'id', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], rollout: { variations: [] } }; var flag = makeFlagWithRules([rule]); var user = { key: 'userkey' }; - evaluate.evaluate(flag, user, featureStore, eventFactory, function(err, detail, events) { + evaluate.evaluate(flag, user, featureStore, eventFactory, (err, detail, events) => { expect(err).toEqual(Error('Variation/rollout object with no variation or rollout')); expect(detail).toMatchObject({ value: null, variationIndex: null, reason: { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }}); expect(events).toMatchObject([]); @@ -373,38 +371,38 @@ describe('evaluate', function() { }); }); - it('coerces user key to string', function(done) { + it('coerces user key to string', done => { var clause = { 'attribute': 'key', 'op': 'in', 'values': [ '999' ] }; var flag = makeBooleanFlagWithOneClause(clause); var user = { 'key': 999 }; - evaluate.evaluate(flag, user, featureStore, eventFactory, function(err, detail, events) { + evaluate.evaluate(flag, user, featureStore, eventFactory, (err, detail) => { expect(detail.value).toBe(true); done(); }); }); - it('coerces secondary key to string', function(done) { + it('coerces secondary key to string', done => { // We can't really verify that the rollout calculation works correctly, but we can at least // make sure it doesn't error out if there's a non-string secondary value (ch35189) var rule = { id: 'ruleid', clauses: [ - { attribute: 'key', op: 'in', values: [ 'userkey' ] } + { attribute: 'key', op: 'in', values: [ 'userkey' ] } ], rollout: { - salt: '', - variations: [ { weight: 100000, variation: 1 } ] + salt: '', + variations: [ { weight: 100000, variation: 1 } ] } }; var flag = makeBooleanFlagWithRules([ rule ]); var user = { key: 'userkey', secondary: 999 }; - evaluate.evaluate(flag, user, featureStore, eventFactory, function(err, detail, events) { + evaluate.evaluate(flag, user, featureStore, eventFactory, (err, detail) => { expect(detail.value).toBe(true); done(); }); }); - it('matches user from targets', function(done) { + it('matches user from targets', done => { var flag = { key: 'feature0', on: true, @@ -420,7 +418,7 @@ describe('evaluate', function() { variations: ['a', 'b', 'c'] }; var user = { key: 'userkey' }; - evaluate.evaluate(flag, user, featureStore, eventFactory, function(err, detail, events) { + evaluate.evaluate(flag, user, featureStore, eventFactory, (err, detail, events) => { expect(detail).toMatchObject({ value: 'c', variationIndex: 2, reason: { kind: 'TARGET_MATCH' } }); expect(events).toMatchObject([]); done(); @@ -429,102 +427,102 @@ describe('evaluate', function() { function testClauseMatch(clause, user, shouldBe, done) { var flag = makeBooleanFlagWithOneClause(clause); - evaluate.evaluate(flag, user, featureStore, eventFactory, function(err, detail, events) { + evaluate.evaluate(flag, user, featureStore, eventFactory, (err, detail) => { expect(detail.value).toBe(shouldBe); done(); }); } - it('can match built-in attribute', function(done) { + it('can match built-in attribute', done => { var user = { key: 'x', name: 'Bob' }; var clause = { attribute: 'name', op: 'in', values: ['Bob'] }; testClauseMatch(clause, user, true, done); }); - it('can match custom attribute', function(done) { + it('can match custom attribute', done => { var user = { key: 'x', name: 'Bob', custom: { legs: 4 } }; var clause = { attribute: 'legs', op: 'in', values: [4] }; testClauseMatch(clause, user, true, done); }); - it('does not match missing attribute', function(done) { + it('does not match missing attribute', done => { var user = { key: 'x', name: 'Bob' }; var clause = { attribute: 'legs', op: 'in', values: [4] }; testClauseMatch(clause, user, false, done); }); - it('can have a negated clause', function(done) { + it('can have a negated clause', done => { var user = { key: 'x', name: 'Bob' }; var clause = { attribute: 'name', op: 'in', values: ['Bob'], negate: true }; testClauseMatch(clause, user, false, done); }); - it('matches segment with explicitly included user', function(done) { + it('matches segment with explicitly included user', done => { var segment = { key: 'test', included: [ 'foo' ], version: 1 }; - defineSegment(segment, function() { + defineSegment(segment, () => { var flag = makeFlagWithSegmentMatch(segment); var user = { key: 'foo' }; - evaluate.evaluate(flag, user, featureStore, eventFactory, function(err, detail) { + evaluate.evaluate(flag, user, featureStore, eventFactory, (err, detail) => { expect(detail.value).toBe(true); done(); }); }); }); - it('does not match segment with explicitly excluded user', function(done) { + it('does not match segment with explicitly excluded user', done => { var segment = { key: 'test', excluded: [ 'foo' ], version: 1 }; - defineSegment(segment, function() { + defineSegment(segment, () => { var flag = makeFlagWithSegmentMatch(segment); var user = { key: 'foo' }; - evaluate.evaluate(flag, user, featureStore, eventFactory, function(err, detail) { + evaluate.evaluate(flag, user, featureStore, eventFactory, (err, detail) => { expect(detail.value).toBe(false); done(); }); }); }); - it('does not match segment with unknown user', function(done) { + it('does not match segment with unknown user', done => { var segment = { key: 'test', included: [ 'foo' ], version: 1 }; - defineSegment(segment, function() { + defineSegment(segment, () => { var flag = makeFlagWithSegmentMatch(segment); var user = { key: 'bar' }; - evaluate.evaluate(flag, user, featureStore, eventFactory, function(err, detail) { + evaluate.evaluate(flag, user, featureStore, eventFactory, (err, detail) => { expect(detail.value).toBe(false); done(); }); }); }); - it('matches segment with user who is both included and excluded', function(done) { + it('matches segment with user who is both included and excluded', done => { var segment = { key: 'test', included: [ 'foo' ], excluded: [ 'foo' ], version: 1 }; - defineSegment(segment, function() { + defineSegment(segment, () => { var flag = makeFlagWithSegmentMatch(segment); var user = { key: 'foo' }; - evaluate.evaluate(flag, user, featureStore, eventFactory, function(err, detail) { + evaluate.evaluate(flag, user, featureStore, eventFactory, (err, detail) => { expect(detail.value).toBe(true); done(); }); }); }); - it('matches segment with rule with full rollout', function(done) { + it('matches segment with rule with full rollout', done => { var segment = { key: 'test', rules: [ @@ -541,17 +539,17 @@ describe('evaluate', function() { ], version: 1 }; - defineSegment(segment, function() { + defineSegment(segment, () => { var flag = makeFlagWithSegmentMatch(segment); var user = { key: 'foo', email: 'test@example.com' }; - evaluate.evaluate(flag, user, featureStore, eventFactory, function(err, detail) { + evaluate.evaluate(flag, user, featureStore, eventFactory, (err, detail) => { expect(detail.value).toBe(true); done(); }); }); }); - it('does not match segment with rule with zero rollout', function(done) { + it('does not match segment with rule with zero rollout', done => { var segment = { key: 'test', rules: [ @@ -568,17 +566,17 @@ describe('evaluate', function() { ], version: 1 }; - defineSegment(segment, function() { + defineSegment(segment, () => { var flag = makeFlagWithSegmentMatch(segment); var user = { key: 'foo', email: 'test@example.com' }; - evaluate.evaluate(flag, user, featureStore, eventFactory, function(err, detail) { + evaluate.evaluate(flag, user, featureStore, eventFactory, (err, detail) => { expect(detail.value).toBe(false); done(); }); }); }); - it('matches segment with multiple matching clauses', function(done) { + it('matches segment with multiple matching clauses', done => { var segment = { key: 'test', rules: [ @@ -599,17 +597,17 @@ describe('evaluate', function() { ], version: 1 }; - defineSegment(segment, function() { + defineSegment(segment, () => { var flag = makeFlagWithSegmentMatch(segment); var user = { key: 'foo', email: 'test@example.com', name: 'bob' }; - evaluate.evaluate(flag, user, featureStore, eventFactory, function(err, detail) { + evaluate.evaluate(flag, user, featureStore, eventFactory, (err, detail) => { expect(detail.value).toBe(true); done(); }); }); }); - it('does not match segment if one clause does not match', function(done) { + it('does not match segment if one clause does not match', done => { var segment = { key: 'test', rules: [ @@ -630,17 +628,17 @@ describe('evaluate', function() { ], version: 1 }; - defineSegment(segment, function() { + defineSegment(segment, () => { var flag = makeFlagWithSegmentMatch(segment); var user = { key: 'foo', email: 'test@example.com', name: 'bob' }; - evaluate.evaluate(flag, user, featureStore, eventFactory, function(err, detail) { + evaluate.evaluate(flag, user, featureStore, eventFactory, (err, detail) => { expect(detail.value).toBe(false); done(); }); }); }); - it('does not overflow the call stack when evaluating a huge number of rules', function(done) { + it('does not overflow the call stack when evaluating a huge number of rules', done => { var ruleCount = 5000; var flag = { key: 'flag', @@ -661,14 +659,14 @@ describe('evaluate', function() { rules.push({ clauses: [clause], variation: 1 }); } flag.rules = rules; - evaluate.evaluate(flag, {key: 'user'}, featureStore, eventFactory, function(err, detail) { + evaluate.evaluate(flag, {key: 'user'}, featureStore, eventFactory, (err, detail) => { expect(err).toEqual(null); expect(detail.value).toEqual(false); done(); }); }); - it('does not overflow the call stack when evaluating a huge number of clauses', function(done) { + it('does not overflow the call stack when evaluating a huge number of clauses', done => { var clauseCount = 5000; var flag = { key: 'flag', @@ -690,7 +688,7 @@ describe('evaluate', function() { } var rule = { clauses: clauses, variation: 1 }; flag.rules = [rule]; - evaluate.evaluate(flag, {key: 'user'}, featureStore, eventFactory, function(err, detail) { + evaluate.evaluate(flag, {key: 'user'}, featureStore, eventFactory, (err, detail) => { expect(err).toEqual(null); expect(detail.value).toEqual(true); done(); @@ -698,8 +696,8 @@ describe('evaluate', function() { }); }); -describe('bucketUser', function() { - it('gets expected bucket values for specific keys', function() { +describe('bucketUser', () => { + it('gets expected bucket values for specific keys', () => { var user = { key: 'userKeyA' }; var bucket = evaluate.bucketUser(user, 'hashKey', 'key', 'saltyA'); expect(bucket).toBeCloseTo(0.42157587, 7); @@ -713,7 +711,7 @@ describe('bucketUser', function() { expect(bucket).toBeCloseTo(0.10343106, 7); }); - it('can bucket by int value (equivalent to string)', function() { + it('can bucket by int value (equivalent to string)', () => { var user = { key: 'userKey', custom: { @@ -727,7 +725,7 @@ describe('bucketUser', function() { expect(bucket2).toBe(bucket); }); - it('cannot bucket by float value', function() { + it('cannot bucket by float value', () => { var user = { key: 'userKey', custom: { diff --git a/test/event_processor-test.js b/test/event_processor-test.js index 9c8e4d8..4978ccc 100644 --- a/test/event_processor-test.js +++ b/test/event_processor-test.js @@ -1,12 +1,12 @@ -var nock = require('nock'); -var EventProcessor = require('../event_processor'); +const nock = require('nock'); +const EventProcessor = require('../event_processor'); -describe('EventProcessor', function() { +describe('EventProcessor', () => { - var ep; - var eventsUri = 'http://example.com'; - var sdkKey = 'SDK_KEY'; - var defaultConfig = { + let ep; + const eventsUri = 'http://example.com'; + const sdkKey = 'SDK_KEY'; + const defaultConfig = { eventsUri: eventsUri, capacity: 100, flushInterval: 30, @@ -17,14 +17,14 @@ describe('EventProcessor', function() { warn: jest.fn() } }; - var user = { key: 'userKey', name: 'Red' }; - var filteredUser = { key: 'userKey', privateAttrs: [ 'name' ] }; - var numericUser = { key: 1, secondary: 2, ip: 3, country: 4, email: 5, firstName: 6, lastName: 7, + const user = { key: 'userKey', name: 'Red' }; + const filteredUser = { key: 'userKey', privateAttrs: [ 'name' ] }; + const numericUser = { key: 1, secondary: 2, ip: 3, country: 4, email: 5, firstName: 6, lastName: 7, avatar: 8, name: 9, anonymous: false, custom: { age: 99 } }; - var stringifiedNumericUser = { key: '1', secondary: '2', ip: '3', country: '4', email: '5', firstName: '6', + const stringifiedNumericUser = { key: '1', secondary: '2', ip: '3', country: '4', email: '5', firstName: '6', lastName: '7', avatar: '8', name: '9', anonymous: false, custom: { age: 99 } }; - afterEach(function() { + afterEach(() => { if (ep) { ep.close(); } @@ -32,10 +32,10 @@ describe('EventProcessor', function() { }); function flushAndGetRequest(options, cb) { - var callback = cb || options; + const callback = cb || options; options = cb ? options : {}; - var requestBody; - var requestHeaders; + let requestBody; + let requestHeaders; nock(eventsUri).post('/bulk') .reply(function(uri, body) { requestBody = body; @@ -43,10 +43,10 @@ describe('EventProcessor', function() { return [ options.status || 200, '', options.headers || {} ]; }); ep.flush().then( - function() { + () => { callback(requestBody, requestHeaders); }, - function(error) { + error => { callback(requestBody, requestHeaders, error); }); } @@ -94,12 +94,12 @@ describe('EventProcessor', function() { expect(e.kind).toEqual('summary'); } - it('queues identify event', function(done) { + it('queues identify event', done => { ep = EventProcessor(sdkKey, defaultConfig); - var e = { kind: 'identify', creationDate: 1000, user: user }; + const e = { kind: 'identify', creationDate: 1000, user: user }; ep.sendEvent(e); - flushAndGetRequest(function(output) { + flushAndGetRequest(output => { expect(output).toEqual([{ kind: 'identify', creationDate: 1000, @@ -110,13 +110,13 @@ describe('EventProcessor', function() { }); }); - it('filters user in identify event', function(done) { - var config = Object.assign({}, defaultConfig, { allAttributesPrivate: true }); + it('filters user in identify event', done => { + const config = Object.assign({}, defaultConfig, { allAttributesPrivate: true }); ep = EventProcessor(sdkKey, config); - var e = { kind: 'identify', creationDate: 1000, user: user }; + const e = { kind: 'identify', creationDate: 1000, user: user }; ep.sendEvent(e); - flushAndGetRequest(function(output) { + flushAndGetRequest(output => { expect(output).toEqual([{ kind: 'identify', creationDate: 1000, @@ -127,12 +127,12 @@ describe('EventProcessor', function() { }); }); - it('stringifies user attributes in identify event', function(done) { + it('stringifies user attributes in identify event', done => { ep = EventProcessor(sdkKey, defaultConfig); - var e = { kind: 'identify', creationDate: 1000, user: numericUser }; + const e = { kind: 'identify', creationDate: 1000, user: numericUser }; ep.sendEvent(e); - flushAndGetRequest(function(output) { + flushAndGetRequest(output => { expect(output).toEqual([{ kind: 'identify', creationDate: 1000, @@ -143,13 +143,13 @@ describe('EventProcessor', function() { }); }); - it('queues individual feature event with index event', function(done) { + it('queues individual feature event with index event', done => { ep = EventProcessor(sdkKey, defaultConfig); - var e = { kind: 'feature', creationDate: 1000, user: user, key: 'flagkey', + const e = { kind: 'feature', creationDate: 1000, user: user, key: 'flagkey', version: 11, variation: 1, value: 'value', trackEvents: true }; ep.sendEvent(e); - flushAndGetRequest(function(output) { + flushAndGetRequest(output => { expect(output.length).toEqual(3); checkIndexEvent(output[0], e, user); checkFeatureEvent(output[1], e, false); @@ -158,14 +158,14 @@ describe('EventProcessor', function() { }); }); - it('filters user in index event', function(done) { - var config = Object.assign({}, defaultConfig, { allAttributesPrivate: true }); + it('filters user in index event', done => { + const config = Object.assign({}, defaultConfig, { allAttributesPrivate: true }); ep = EventProcessor(sdkKey, config); - var e = { kind: 'feature', creationDate: 1000, user: user, key: 'flagkey', + const e = { kind: 'feature', creationDate: 1000, user: user, key: 'flagkey', version: 11, variation: 1, value: 'value', trackEvents: true }; ep.sendEvent(e); - flushAndGetRequest(function(output) { + flushAndGetRequest(output => { expect(output.length).toEqual(3); checkIndexEvent(output[0], e, filteredUser); checkFeatureEvent(output[1], e, false); @@ -174,13 +174,13 @@ describe('EventProcessor', function() { }); }); - it('stringifies user attributes in index event', function(done) { + it('stringifies user attributes in index event', done => { ep = EventProcessor(sdkKey, defaultConfig); - var e = { kind: 'feature', creationDate: 1000, user: numericUser, key: 'flagkey', + const e = { kind: 'feature', creationDate: 1000, user: numericUser, key: 'flagkey', version: 11, variation: 1, value: 'value', trackEvents: true }; ep.sendEvent(e); - flushAndGetRequest(function(output) { + flushAndGetRequest(output => { expect(output.length).toEqual(3); checkIndexEvent(output[0], e, stringifiedNumericUser); checkFeatureEvent(output[1], e, false); @@ -189,14 +189,14 @@ describe('EventProcessor', function() { }); }); - it('can include inline user in feature event', function(done) { - var config = Object.assign({}, defaultConfig, { inlineUsersInEvents: true }); + it('can include inline user in feature event', done => { + const config = Object.assign({}, defaultConfig, { inlineUsersInEvents: true }); ep = EventProcessor(sdkKey, config); - var e = { kind: 'feature', creationDate: 1000, user: user, key: 'flagkey', + const e = { kind: 'feature', creationDate: 1000, user: user, key: 'flagkey', version: 11, variation: 1, value: 'value', trackEvents: true }; ep.sendEvent(e); - flushAndGetRequest(function(output) { + flushAndGetRequest(output => { expect(output.length).toEqual(2); checkFeatureEvent(output[0], e, false, user); checkSummaryEvent(output[1]); @@ -204,15 +204,15 @@ describe('EventProcessor', function() { }); }); - it('filters user in feature event', function(done) { - var config = Object.assign({}, defaultConfig, { allAttributesPrivate: true, + it('filters user in feature event', done => { + const config = Object.assign({}, defaultConfig, { allAttributesPrivate: true, inlineUsersInEvents: true }); ep = EventProcessor(sdkKey, config); - var e = { kind: 'feature', creationDate: 1000, user: user, key: 'flagkey', + const e = { kind: 'feature', creationDate: 1000, user: user, key: 'flagkey', version: 11, variation: 1, value: 'value', trackEvents: true }; ep.sendEvent(e); - flushAndGetRequest(function(output) { + flushAndGetRequest(output => { expect(output.length).toEqual(2); checkFeatureEvent(output[0], e, false, filteredUser); checkSummaryEvent(output[1]); @@ -220,14 +220,14 @@ describe('EventProcessor', function() { }); }); - it('stringifies user attributes in feature event', function(done) { - var config = Object.assign({}, defaultConfig, { inlineUsersInEvents: true }); + it('stringifies user attributes in feature event', done => { + const config = Object.assign({}, defaultConfig, { inlineUsersInEvents: true }); ep = EventProcessor(sdkKey, config); - var e = { kind: 'feature', creationDate: 1000, user: numericUser, key: 'flagkey', + const e = { kind: 'feature', creationDate: 1000, user: numericUser, key: 'flagkey', version: 11, variation: 1, value: 'value', trackEvents: true }; ep.sendEvent(e); - flushAndGetRequest(function(output) { + flushAndGetRequest(output => { expect(output.length).toEqual(2); checkFeatureEvent(output[0], e, false, stringifiedNumericUser); checkSummaryEvent(output[1]); @@ -235,15 +235,15 @@ describe('EventProcessor', function() { }); }); - it('can include reason in feature event', function(done) { - var config = Object.assign({}, defaultConfig, { inlineUsersInEvents: true }); + it('can include reason in feature event', done => { + const config = Object.assign({}, defaultConfig, { inlineUsersInEvents: true }); ep = EventProcessor(sdkKey, config); - var e = { kind: 'feature', creationDate: 1000, user: user, key: 'flagkey', + const e = { kind: 'feature', creationDate: 1000, user: user, key: 'flagkey', version: 11, variation: 1, value: 'value', trackEvents: true, reason: { kind: 'FALLTHROUGH' } }; ep.sendEvent(e); - flushAndGetRequest(function(output) { + flushAndGetRequest(output => { expect(output.length).toEqual(2); checkFeatureEvent(output[0], e, false, user); checkSummaryEvent(output[1]); @@ -251,14 +251,14 @@ describe('EventProcessor', function() { }); }); - it('still generates index event if inlineUsers is true but feature event is not tracked', function(done) { - var config = Object.assign({}, defaultConfig, { inlineUsersInEvents: true }); + it('still generates index event if inlineUsers is true but feature event is not tracked', done => { + const config = Object.assign({}, defaultConfig, { inlineUsersInEvents: true }); ep = EventProcessor(sdkKey, config); - var e = { kind: 'feature', creationDate: 1000, user: user, key: 'flagkey', + const e = { kind: 'feature', creationDate: 1000, user: user, key: 'flagkey', version: 11, variation: 1, value: 'value', trackEvents: false }; ep.sendEvent(e); - flushAndGetRequest(function(output) { + flushAndGetRequest(output => { expect(output.length).toEqual(2); checkIndexEvent(output[0], e, user); checkSummaryEvent(output[1]); @@ -266,14 +266,14 @@ describe('EventProcessor', function() { }); }); - it('sets event kind to debug if event is temporarily in debug mode', function(done) { + it('sets event kind to debug if event is temporarily in debug mode', done => { ep = EventProcessor(sdkKey, defaultConfig); var futureTime = new Date().getTime() + 1000000; - var e = { kind: 'feature', creationDate: 1000, user: user, key: 'flagkey', + const e = { kind: 'feature', creationDate: 1000, user: user, key: 'flagkey', version: 11, variation: 1, value: 'value', trackEvents: false, debugEventsUntilDate: futureTime }; ep.sendEvent(e); - flushAndGetRequest(function(output) { + flushAndGetRequest(output => { expect(output.length).toEqual(3); checkIndexEvent(output[0], e, user); checkFeatureEvent(output[1], e, true, user); @@ -282,14 +282,14 @@ describe('EventProcessor', function() { }); }); - it('can both track and debug an event', function(done) { + it('can both track and debug an event', done => { ep = EventProcessor(sdkKey, defaultConfig); var futureTime = new Date().getTime() + 1000000; - var e = { kind: 'feature', creationDate: 1000, user: user, key: 'flagkey', + const e = { kind: 'feature', creationDate: 1000, user: user, key: 'flagkey', version: 11, variation: 1, value: 'value', trackEvents: true, debugEventsUntilDate: futureTime }; ep.sendEvent(e); - flushAndGetRequest(function(output) { + flushAndGetRequest(output => { expect(output.length).toEqual(4); checkIndexEvent(output[0], e, user); checkFeatureEvent(output[1], e, false); @@ -299,7 +299,7 @@ describe('EventProcessor', function() { }); }); - it('expires debug mode based on client time if client time is later than server time', function(done) { + it('expires debug mode based on client time if client time is later than server time', done => { ep = EventProcessor(sdkKey, defaultConfig); // Pick a server time that is somewhat behind the client time @@ -311,12 +311,12 @@ describe('EventProcessor', function() { // Now send an event with debug mode on, with a "debug until" time that is further in // the future than the server time, but in the past compared to the client. var debugUntil = serverTime + 1000; - var e = { kind: 'feature', creationDate: 1000, user: user, key: 'flagkey', + const e = { kind: 'feature', creationDate: 1000, user: user, key: 'flagkey', version: 11, variation: 1, value: 'value', trackEvents: false, debugEventsUntilDate: debugUntil }; ep.sendEvent(e); // Should get a summary event only, not a full feature event - flushAndGetRequest(function(output) { + flushAndGetRequest(output => { expect(output.length).toEqual(2); checkIndexEvent(output[0], e, user); checkSummaryEvent(output[1]); @@ -325,7 +325,7 @@ describe('EventProcessor', function() { }); }); - it('expires debug mode based on server time if server time is later than client time', function(done) { + it('expires debug mode based on server time if server time is later than client time', done => { ep = EventProcessor(sdkKey, defaultConfig); // Pick a server time that is somewhat ahead of the client time @@ -337,12 +337,12 @@ describe('EventProcessor', function() { // Now send an event with debug mode on, with a "debug until" time that is further in // the future than the client time, but in the past compared to the server. var debugUntil = serverTime - 1000; - var e = { kind: 'feature', creationDate: 1000, user: user, key: 'flagkey', + const e = { kind: 'feature', creationDate: 1000, user: user, key: 'flagkey', version: 11, variation: 1, value: 'value', trackEvents: false, debugEventsUntilDate: debugUntil }; ep.sendEvent(e); // Should get a summary event only, not a full feature event - flushAndGetRequest(function(output) { + flushAndGetRequest(output => { expect(output.length).toEqual(2); checkIndexEvent(output[0], e, user); checkSummaryEvent(output[1]); @@ -351,7 +351,7 @@ describe('EventProcessor', function() { }); }); - it('generates only one index event from two feature events for same user', function(done) { + it('generates only one index event from two feature events for same user', done => { ep = EventProcessor(sdkKey, defaultConfig); var e1 = { kind: 'feature', creationDate: 1000, user: user, key: 'flagkey1', version: 11, variation: 1, value: 'value', trackEvents: true }; @@ -360,7 +360,7 @@ describe('EventProcessor', function() { ep.sendEvent(e1); ep.sendEvent(e2); - flushAndGetRequest(function(output) { + flushAndGetRequest(output => { expect(output.length).toEqual(4); checkIndexEvent(output[0], e1, user); checkFeatureEvent(output[1], e1, false); @@ -370,7 +370,7 @@ describe('EventProcessor', function() { }); }); - it('summarizes nontracked events', function(done) { + it('summarizes nontracked events', done => { ep = EventProcessor(sdkKey, defaultConfig); var e1 = { kind: 'feature', creationDate: 1000, user: user, key: 'flagkey1', version: 11, variation: 1, value: 'value1', default: 'default1', trackEvents: false }; @@ -379,7 +379,7 @@ describe('EventProcessor', function() { ep.sendEvent(e1); ep.sendEvent(e2); - flushAndGetRequest(function(output) { + flushAndGetRequest(output => { expect(output.length).toEqual(2); checkIndexEvent(output[0], e1, user); var se = output[1]; @@ -400,13 +400,13 @@ describe('EventProcessor', function() { }); }); - it('queues custom event with user', function(done) { + it('queues custom event with user', done => { ep = EventProcessor(sdkKey, defaultConfig); - var e = { kind: 'custom', creationDate: 1000, user: user, key: 'eventkey', + const e = { kind: 'custom', creationDate: 1000, user: user, key: 'eventkey', data: { thing: 'stuff' } }; ep.sendEvent(e); - flushAndGetRequest(function(output) { + flushAndGetRequest(output => { expect(output.length).toEqual(2); checkIndexEvent(output[0], e, user); checkCustomEvent(output[1], e); @@ -414,13 +414,13 @@ describe('EventProcessor', function() { }); }); - it('can include metric value in custom event', function(done) { + it('can include metric value in custom event', done => { ep = EventProcessor(sdkKey, defaultConfig); - var e = { kind: 'custom', creationDate: 1000, user: user, key: 'eventkey', + const e = { kind: 'custom', creationDate: 1000, user: user, key: 'eventkey', data: { thing: 'stuff' }, metricValue: 1.5 }; ep.sendEvent(e); - flushAndGetRequest(function(output) { + flushAndGetRequest(output => { expect(output.length).toEqual(2); checkIndexEvent(output[0], e, user); checkCustomEvent(output[1], e); @@ -428,50 +428,50 @@ describe('EventProcessor', function() { }); }); - it('can include inline user in custom event', function(done) { - var config = Object.assign({}, defaultConfig, { inlineUsersInEvents: true }); + it('can include inline user in custom event', done => { + const config = Object.assign({}, defaultConfig, { inlineUsersInEvents: true }); ep = EventProcessor(sdkKey, config); - var e = { kind: 'custom', creationDate: 1000, user: user, key: 'eventkey', + const e = { kind: 'custom', creationDate: 1000, user: user, key: 'eventkey', data: { thing: 'stuff' } }; ep.sendEvent(e); - flushAndGetRequest(function(output) { + flushAndGetRequest(output => { expect(output.length).toEqual(1); checkCustomEvent(output[0], e, user); done(); }); }); - it('stringifies user attributes in custom event', function(done) { - var config = Object.assign({}, defaultConfig, { inlineUsersInEvents: true }); + it('stringifies user attributes in custom event', done => { + const config = Object.assign({}, defaultConfig, { inlineUsersInEvents: true }); ep = EventProcessor(sdkKey, config); - var e = { kind: 'custom', creationDate: 1000, user: numericUser, key: 'eventkey', + const e = { kind: 'custom', creationDate: 1000, user: numericUser, key: 'eventkey', data: { thing: 'stuff' } }; ep.sendEvent(e); - flushAndGetRequest(function(output) { + flushAndGetRequest(output => { expect(output.length).toEqual(1); checkCustomEvent(output[0], e, stringifiedNumericUser); done(); }); }); - it('filters user in custom event', function(done) { - var config = Object.assign({}, defaultConfig, { allAttributesPrivate: true, + it('filters user in custom event', done => { + const config = Object.assign({}, defaultConfig, { allAttributesPrivate: true, inlineUsersInEvents: true }); ep = EventProcessor(sdkKey, config); - var e = { kind: 'custom', creationDate: 1000, user: user, key: 'eventkey', + const e = { kind: 'custom', creationDate: 1000, user: user, key: 'eventkey', data: { thing: 'stuff' } }; ep.sendEvent(e); - flushAndGetRequest(function(output) { + flushAndGetRequest(output => { expect(output.length).toEqual(1); checkCustomEvent(output[0], e, filteredUser); done(); }); }); - it('sends nothing if there are no events', function(done) { + it('sends nothing if there are no events', done => { ep = EventProcessor(sdkKey, defaultConfig); ep.flush(function() { // Nock will generate an error if we sent a request we didn't explicitly listen for. @@ -479,9 +479,9 @@ describe('EventProcessor', function() { }); }); - it('sends SDK key', function(done) { + it('sends SDK key', done => { ep = EventProcessor(sdkKey, defaultConfig); - var e = { kind: 'identify', creationDate: 1000, user: user }; + const e = { kind: 'identify', creationDate: 1000, user: user }; ep.sendEvent(e); flushAndGetRequest(function(requestBody, requestHeaders) { @@ -492,11 +492,11 @@ describe('EventProcessor', function() { function verifyUnrecoverableHttpError(done, status) { ep = EventProcessor(sdkKey, defaultConfig); - var e = { kind: 'identify', creationDate: 1000, user: user }; + const e = { kind: 'identify', creationDate: 1000, user: user }; ep.sendEvent(e); - flushAndGetRequest({ status: status }, function(body, headers, error) { - expect(error.message).toContain("error " + status); + flushAndGetRequest({ status: status }, (body, headers, error) => { + expect(error.message).toContain('error ' + status); ep.sendEvent(e); @@ -504,7 +504,7 @@ describe('EventProcessor', function() { // no HTTP request should have been done here - Nock will error out if there was one function() { }, function(err) { - expect(err.message).toContain("SDK key is invalid"); + expect(err.message).toContain('SDK key is invalid'); done(); }); }); @@ -527,7 +527,7 @@ describe('EventProcessor', function() { ep.sendEvent(e1); // this second event should go through - flushAndGetRequest(function(output) { + flushAndGetRequest(output => { expect(output.length).toEqual(1); expect(output[0].creationDate).toEqual(1001); @@ -536,35 +536,35 @@ describe('EventProcessor', function() { }); } - it('retries after a 400 error', function(done) { + it('retries after a 400 error', done => { verifyRecoverableHttpError(done, 400); }); - it('stops sending events after a 401 error', function(done) { + it('stops sending events after a 401 error', done => { verifyUnrecoverableHttpError(done, 401); }); - it('stops sending events after a 403 error', function(done) { + it('stops sending events after a 403 error', done => { verifyUnrecoverableHttpError(done, 403); }); - it('retries after a 408 error', function(done) { + it('retries after a 408 error', done => { verifyRecoverableHttpError(done, 408); }); - it('retries after a 429 error', function(done) { + it('retries after a 429 error', done => { verifyRecoverableHttpError(done, 429); }); - it('retries after a 503 error', function(done) { + it('retries after a 503 error', done => { verifyRecoverableHttpError(done, 503); }); - it('swallows errors from failed background flush', function(done) { + it('swallows errors from failed background flush', done => { // This test verifies that when a background flush fails, we don't emit an unhandled // promise rejection. Jest will fail the test if we do that. - var config = Object.assign({}, defaultConfig, { flushInterval: 0.25 }); + const config = Object.assign({}, defaultConfig, { flushInterval: 0.25 }); ep = EventProcessor(sdkKey, config); ep.sendEvent({ kind: 'identify', creationDate: 1000, user: user }); @@ -573,10 +573,10 @@ describe('EventProcessor', function() { // unfortunately we must wait for both the flush interval and the 1-second retry interval var delay = 1500; - setTimeout(function() { - expect(req1.isDone()).toEqual(true); - expect(req2.isDone()).toEqual(true); - done(); - }, delay); + setTimeout(() => { + expect(req1.isDone()).toEqual(true); + expect(req2.isDone()).toEqual(true); + done(); + }, delay); }); }); diff --git a/test/feature_store-test.js b/test/feature_store-test.js index 71ab1db..30dea69 100644 --- a/test/feature_store-test.js +++ b/test/feature_store-test.js @@ -1,8 +1,8 @@ -var InMemoryFeatureStore = require('../feature_store'); -var testBase = require('./feature_store_test_base'); +const InMemoryFeatureStore = require('../feature_store'); +const testBase = require('./feature_store_test_base'); -describe('InMemoryFeatureStore', function() { - testBase.baseFeatureStoreTests(function() { +describe('InMemoryFeatureStore', () => { + testBase.baseFeatureStoreTests(() => { return new InMemoryFeatureStore(); - }) + }); }); diff --git a/test/feature_store_event_wrapper-test.js b/test/feature_store_event_wrapper-test.js new file mode 100644 index 0000000..da9c9c1 --- /dev/null +++ b/test/feature_store_event_wrapper-test.js @@ -0,0 +1,182 @@ +const EventEmitter = require('events').EventEmitter; +const FeatureStoreEventWrapper = require('../feature_store_event_wrapper'); +const InMemoryFeatureStore = require('../feature_store'); +const dataKind = require('../versioned_data_kind'); +const { AsyncQueue, asyncify } = require('./async_utils'); + +describe('FeatureStoreEventWrapper', () => { + function listenAndStoreEvents(emitter, queue, eventName) { + emitter.on(eventName, arg => { + queue.add([eventName, arg]); + }); + } + + it('sends events for init of empty store', async () => { + const store = InMemoryFeatureStore(); + const allData = { + features: { + a: { key: 'a', version: 1 }, + b: { key: 'b', version: 1 } + }, + segments: {} + }; + const emitter = new EventEmitter(); + const queue = AsyncQueue(); + listenAndStoreEvents(emitter, queue, 'update'); + listenAndStoreEvents(emitter, queue, 'update:a'); + listenAndStoreEvents(emitter, queue, 'update:b'); + + const wrapper = FeatureStoreEventWrapper(store, emitter); + + await asyncify(f => wrapper.init(allData, f)); + + expect(await queue.take()).toEqual(['update', { key: 'a' }]); + expect(await queue.take()).toEqual(['update:a', { key: 'a' }]); + expect(await queue.take()).toEqual(['update', { key: 'b' }]); + expect(await queue.take()).toEqual(['update:b', { key: 'b' }]); + expect(queue.isEmpty()).toEqual(true); + }); + + it('sends events for reinit of non-empty store', async () => { + const store = InMemoryFeatureStore(); + const allData0 = { + features: { + a: { key: 'a', version: 1 }, + b: { key: 'b', version: 1 }, + c: { key: 'c', version: 1 } + }, + segments: {} + }; + const allData1 = { + features: { + a: { key: 'a', version: 1 }, + b: { key: 'b', version: 2 } + }, + segments: {} + }; + const emitter = new EventEmitter(); + const queue = AsyncQueue(); + listenAndStoreEvents(emitter, queue, 'update'); + listenAndStoreEvents(emitter, queue, 'update:a'); + listenAndStoreEvents(emitter, queue, 'update:b'); + listenAndStoreEvents(emitter, queue, 'update:c'); + + const wrapper = FeatureStoreEventWrapper(store, emitter); + + await asyncify(f => wrapper.init(allData0, f)); + + expect(await queue.take()).toEqual(['update', { key: 'a' }]); + expect(await queue.take()).toEqual(['update:a', { key: 'a' }]); + expect(await queue.take()).toEqual(['update', { key: 'b' }]); + expect(await queue.take()).toEqual(['update:b', { key: 'b' }]); + expect(await queue.take()).toEqual(['update', { key: 'c' }]); + expect(await queue.take()).toEqual(['update:c', { key: 'c' }]); + expect(queue.isEmpty()).toEqual(true); + + await asyncify(f => wrapper.init(allData1, f)); + expect(await queue.take()).toEqual(['update', { key: 'b' }]); // b was updated to version 2 + expect(await queue.take()).toEqual(['update:b', { key: 'b' }]); + expect(await queue.take()).toEqual(['update', { key: 'c' }]); // c was deleted + expect(await queue.take()).toEqual(['update:c', { key: 'c' }]); + expect(queue.isEmpty()).toEqual(true); + }); + + it('sends event for update', async () => { + const store = InMemoryFeatureStore(); + const allData = { + features: { + a: { key: 'a', version: 1 } + }, + segments: {} + }; + const emitter = new EventEmitter(); + const queue = AsyncQueue(); + listenAndStoreEvents(emitter, queue, 'update'); + listenAndStoreEvents(emitter, queue, 'update:a'); + + const wrapper = FeatureStoreEventWrapper(store, emitter); + + await asyncify(f => wrapper.init(allData, f)); + + expect(await queue.take()).toEqual(['update', { key: 'a' }]); + expect(await queue.take()).toEqual(['update:a', { key: 'a' }]); + expect(queue.isEmpty()).toEqual(true); + + await asyncify(f => wrapper.upsert(dataKind.features, { key: 'a', version: 2 }, f)); + await asyncify(f => wrapper.upsert(dataKind.features, { key: 'a', version: 2 }, f)); // no event for this one + expect(await queue.take()).toEqual(['update', { key: 'a' }]); + expect(await queue.take()).toEqual(['update:a', { key: 'a' }]); + expect(queue.isEmpty()).toEqual(true); + }); + + it('sends event for delete', async () => { + const store = InMemoryFeatureStore(); + const allData = { + features: { + a: { key: 'a', version: 1 } + }, + segments: {} + }; + const emitter = new EventEmitter(); + const queue = AsyncQueue(); + listenAndStoreEvents(emitter, queue, 'update'); + listenAndStoreEvents(emitter, queue, 'update:a'); + + const wrapper = FeatureStoreEventWrapper(store, emitter); + + await asyncify(f => wrapper.init(allData, f)); + + expect(await queue.take()).toEqual(['update', { key: 'a' }]); + expect(await queue.take()).toEqual(['update:a', { key: 'a' }]); + expect(queue.isEmpty()).toEqual(true); + + await asyncify(f => wrapper.delete(dataKind.features, 'a', 2, f)); + expect(await queue.take()).toEqual(['update', { key: 'a' }]); + expect(await queue.take()).toEqual(['update:a', { key: 'a' }]); + expect(queue.isEmpty()).toEqual(true); + }); + + it('sends update events for transitive dependencies', async () => { + const store = InMemoryFeatureStore(); + const allData = { + features: { + a: { key: 'a', version: 1 }, + b: { key: 'b', version: 1, prerequisites: [ { key: 'c' }, { key: 'e' } ] }, + c: { key: 'c', version: 1, prerequisites: [ { key: 'd' } ], + rules: [ + { clauses: [ { op: 'segmentMatch', values: [ 's0' ] } ] } + ] + }, + d: { key: 'd', version: 1, prerequisites: [ { key: 'e' } ] }, + e: { key: 'e', version: 1 } + }, + segments: { + s0: { key: 's0', version: 1 } + } + }; + const emitter = new EventEmitter(); + const queue = AsyncQueue(); + listenAndStoreEvents(emitter, queue, 'update'); + + const wrapper = FeatureStoreEventWrapper(store, emitter); + + await asyncify(f => wrapper.init(allData, f)); + + expect(await queue.take()).toEqual(['update', { key: 'a' }]); + expect(await queue.take()).toEqual(['update', { key: 'b' }]); + expect(await queue.take()).toEqual(['update', { key: 'c' }]); + expect(await queue.take()).toEqual(['update', { key: 'd' }]); + expect(await queue.take()).toEqual(['update', { key: 'e' }]); + expect(queue.isEmpty()).toEqual(true); + + await asyncify(f => wrapper.upsert(dataKind.features, + { key: 'd', version: 2, prerequisites: [ { key: 'e' } ] }, f)); + expect(await queue.take()).toEqual(['update', { key: 'b' }]); + expect(await queue.take()).toEqual(['update', { key: 'c' }]); + expect(await queue.take()).toEqual(['update', { key: 'd' }]); + + await asyncify(f => wrapper.upsert(dataKind.segments, { key: 's0', version: 2 }, f)); + expect(await queue.take()).toEqual(['update', { key: 'b' }]); + expect(await queue.take()).toEqual(['update', { key: 'c' }]); + }); +}); diff --git a/test/operators-test.js b/test/operators-test.js index 589912d..1767b40 100644 --- a/test/operators-test.js +++ b/test/operators-test.js @@ -2,97 +2,100 @@ var assert = require('assert'); var operators = require('../operators'); describe('operators', function() { - var paramsTable = [ - // numeric comparisons - [ "in", 99, 99, true ], - [ "in", 99.0001, 99.0001, true ], - [ "in", 99, 99.0001, false ], - [ "in", 99.0001, 99, false ], - [ "lessThan", 99, 99.0001, true ], - [ "lessThan", 99.0001, 99, false ], - [ "lessThan", 99, 99, false ], - [ "lessThanOrEqual", 99, 99.0001, true ], - [ "lessThanOrEqual", 99.0001, 99, false ], - [ "lessThanOrEqual", 99, 99, true ], - [ "greaterThan", 99.0001, 99, true ], - [ "greaterThan", 99, 99.0001, false ], - [ "greaterThan", 99, 99, false ], - [ "greaterThanOrEqual", 99.0001, 99, true ], - [ "greaterThanOrEqual", 99, 99.0001, false ], - [ "greaterThanOrEqual", 99, 99, true ], + const paramsTable = [ + // numeric comparisons + [ 'in', 99, 99, true ], + [ 'in', 99.0001, 99.0001, true ], + [ 'in', 99, 99.0001, false ], + [ 'in', 99.0001, 99, false ], + [ 'lessThan', 99, 99.0001, true ], + [ 'lessThan', 99.0001, 99, false ], + [ 'lessThan', 99, 99, false ], + [ 'lessThanOrEqual', 99, 99.0001, true ], + [ 'lessThanOrEqual', 99.0001, 99, false ], + [ 'lessThanOrEqual', 99, 99, true ], + [ 'greaterThan', 99.0001, 99, true ], + [ 'greaterThan', 99, 99.0001, false ], + [ 'greaterThan', 99, 99, false ], + [ 'greaterThanOrEqual', 99.0001, 99, true ], + [ 'greaterThanOrEqual', 99, 99.0001, false ], + [ 'greaterThanOrEqual', 99, 99, true ], - // string comparisons - [ "in", "x", "x", true ], - [ "in", "x", "xyz", false ], - [ "startsWith", "xyz", "x", true ], - [ "startsWith", "x", "xyz", false ], - [ "endsWith", "xyz", "z", true ], - [ "endsWith", "z", "xyz", false ], - [ "contains", "xyz", "y", true ], - [ "contains", "y", "xyz", false ], + // string comparisons + [ 'in', 'x', 'x', true ], + [ 'in', 'x', 'xyz', false ], + [ 'startsWith', 'xyz', 'x', true ], + [ 'startsWith', 'x', 'xyz', false ], + [ 'endsWith', 'xyz', 'z', true ], + [ 'endsWith', 'z', 'xyz', false ], + [ 'contains', 'xyz', 'y', true ], + [ 'contains', 'y', 'xyz', false ], - // mixed strings and numbers - [ "in", "99", 99, false ], - [ "in", 99, "99", false ], - // [ "contains", "99", 99, false ], // currently returns true - inconsistent with other SDKs - // [ "startsWith", "99", 99, false ], // currently returns true - inconsistent with other SDKs - // [ "endsWith", "99", 99, false ], // currently returns true - inconsistent with other SDKs - [ "lessThanOrEqual", "99", 99, false ], - // [ "lessThanOrEqual", 99, "99", false ], // currently returns true - inconsistent with other SDKs - [ "greaterThanOrEqual", "99", 99, false ], - // [ "greaterThanOrEqual", 99, "99", false ], // currently returns true - inconsistent with other SDKs + // mixed strings and numbers + [ 'in', '99', 99, false ], + [ 'in', 99, '99', false ], + [ 'contains', '99', 99, false ], + [ 'startsWith', '99', 99, false ], + [ 'endsWith', '99', 99, false ], + [ 'lessThanOrEqual', '99', 99, false ], + [ 'lessThanOrEqual', 99, '99', false ], + [ 'greaterThanOrEqual', '99', 99, false ], + [ 'greaterThanOrEqual', 99, '99', false ], - // regex - [ "matches", "hello world", "hello.*rld", true ], - [ "matches", "hello world", "hello.*rl", true ], - [ "matches", "hello world", "l+", true ], - [ "matches", "hello world", "(world|planet)", true ], - [ "matches", "hello world", "aloha", false ], - // [ "matches", "hello world", "***not a regex", false ], // currently throws an exception + // regex + [ 'matches', 'hello world', 'hello.*rld', true ], + [ 'matches', 'hello world', 'hello.*rl', true ], + [ 'matches', 'hello world', 'l+', true ], + [ 'matches', 'hello world', '(world|planet)', true ], + [ 'matches', 'hello world', 'aloha', false ], + [ 'matches', 'hello world', '***not a regex', false ], + [ 'matches', 'hello world', 3, false ], + [ 'matches', 3, 'hello', false ], - // dates - [ "before", 0, 1, true ], - [ "before", -100, 0, true ], - [ "before", "1970-01-01T00:00:00Z", 1000, true ], - [ "before", "1970-01-01T00:00:00.500Z", 1000, true ], - [ "before", true, 1000, false ], // wrong type - [ "after", "1970-01-01T00:00:02.500Z", 1000, true ], - // [ "after", "1970-01-01 00:00:02.500Z", 1000, false ], // malformed timestamp - but is currently parsed as valid - [ "before", "1970-01-01T00:00:02+01:00", 1000, true ], - [ "before", -1000, 1000, true ], - [ "after", "1970-01-01T00:00:01.001Z", 1000, true ], - [ "after", "1970-01-01T00:00:00-01:00", 1000, true ], + // dates + [ 'before', 0, 1, true ], + [ 'before', -100, 0, true ], + [ 'before', '1970-01-01T00:00:00Z', 1000, true ], + [ 'before', '1970-01-01T00:00:00.500Z', 1000, true ], + [ 'before', true, 1000, false ], // wrong type + [ 'after', '1970-01-01T00:00:02.500Z', 1000, true ], + [ 'after', '1970-01-01 00:00:02.500Z', 1000, false ], // malformed timestamp + [ 'before', '1970-01-01T00:00:02+01:00', 1000, true ], + [ 'before', -1000, 1000, true ], + [ 'after', '1970-01-01T00:00:01.001Z', 1000, true ], + [ 'after', '1970-01-01T00:00:00-01:00', 1000, true ], - // semver - [ "semVerEqual", "2.0.1", "2.0.1", true ], - [ "semVerEqual", "2.0.1", "02.0.1", false ], // leading zeroes should be disallowed - [ "semVerEqual", "2.0", "2.0.0", true ], - [ "semVerEqual", "2", "2.0.0", true ], - [ "semVerEqual", "2-rc1", "2.0.0-rc1", true ], - [ "semVerEqual", "2+build2", "2.0.0+build2", true ], - [ "semVerEqual", "2.0.0", "2.0.0+build2", true ], // build metadata should be ignored in comparison - [ "semVerEqual", "2.0.0", "2.0.0-rc1", false ], // prerelease should not be ignored - [ "semVerEqual", "2.0.0", "2.0.0+build_2", false ], // enforce allowable character set in build metadata - [ "semVerEqual", "2.0.0", "v2.0.0", false ], // disallow leading "v" - [ "semVerLessThan", "2.0.0", "2.0.1", true ], - [ "semVerLessThan", "2.0", "2.0.1", true ], - [ "semVerLessThan", "2.0.1", "2.0.0", false ], - [ "semVerLessThan", "2.0.1", "2.0", false ], - [ "semVerLessThan", "2.0.0-rc", "2.0.0-rc.beta", true ], - [ "semVerLessThan", "2.0.0-rc", "2.0.0", true ], - [ "semVerLessThan", "2.0.0-rc.3", "2.0.0-rc.29", true ], - [ "semVerLessThan", "2.0.0-rc.x29", "2.0.0-rc.x3", true ], - [ "semVerGreaterThan", "2.0.1", "2.0.0", true ], - [ "semVerGreaterThan", "2.0.1", "2.0", true ], - [ "semVerGreaterThan", "2.0.0", "2.0.1", false ], - [ "semVerGreaterThan", "2.0", "2.0.1", false ], - [ "semVerGreaterThan", "2.0.0-rc.1", "2.0.0-rc.0", true ], - [ "semVerLessThan", "2.0.1", "xbad%ver", false ], - [ "semVerGreaterThan", "2.0.1", "xbad%ver", false ] - ]; + // semver + [ 'semVerEqual', '2.0.1', '2.0.1', true ], + [ 'semVerEqual', '2.0.1', '02.0.1', false ], // leading zeroes should be disallowed + [ 'semVerEqual', '2.0', '2.0.0', true ], + [ 'semVerEqual', '2', '2.0.0', true ], + [ 'semVerEqual', '2-rc1', '2.0.0-rc1', true ], + [ 'semVerEqual', '2+build2', '2.0.0+build2', true ], + [ 'semVerEqual', '2.0.0', '2.0.0+build2', true ], // build metadata should be ignored in comparison + [ 'semVerEqual', '2.0.0', '2.0.0-rc1', false ], // prerelease should not be ignored + [ 'semVerEqual', '2.0.0', '2.0.0+build_2', false ], // enforce allowable character set in build metadata + [ 'semVerEqual', '2.0.0', 'v2.0.0', false ], // disallow leading 'v' + [ 'semVerLessThan', '2.0.0', '2.0.1', true ], + [ 'semVerLessThan', '2.0', '2.0.1', true ], + [ 'semVerLessThan', '2.0.1', '2.0.0', false ], + [ 'semVerLessThan', '2.0.1', '2.0', false ], + [ 'semVerLessThan', '2.0.0-rc', '2.0.0-rc.beta', true ], + [ 'semVerLessThan', '2.0.0-rc', '2.0.0', true ], + [ 'semVerLessThan', '2.0.0-rc.3', '2.0.0-rc.29', true ], + [ 'semVerLessThan', '2.0.0-rc.x29', '2.0.0-rc.x3', true ], + [ 'semVerGreaterThan', '2.0.1', '2.0.0', true ], + [ 'semVerGreaterThan', '2.0.1', '2.0', true ], + [ 'semVerGreaterThan', '2.0.0', '2.0.1', false ], + [ 'semVerGreaterThan', '2.0', '2.0.1', false ], + [ 'semVerGreaterThan', '2.0.0-rc.1', '2.0.0-rc.0', true ], + [ 'semVerLessThan', '2.0.1', 'xbad%ver', false ], + [ 'semVerGreaterThan', '2.0.1', 'xbad%ver', false ] + ]; paramsTable.forEach(function(params) { - it('result is ' + params[3] + ' for ' + params[1] + ' ' + params[0] + ' ' + params[2], function() { + it('result is ' + params[3] + ' for ' + JSON.stringify(params[1]) + ' ' + params[0] + ' ' + + JSON.stringify(params[2]), function() { assert.equal(operators.fn(params[0])(params[1], params[2]), params[3]); }); }); diff --git a/test/polling-test.js b/test/polling-test.js index c918130..56f4c7d 100644 --- a/test/polling-test.js +++ b/test/polling-test.js @@ -2,6 +2,7 @@ const InMemoryFeatureStore = require('../feature_store'); const PollingProcessor = require('../polling'); const dataKind = require('../versioned_data_kind'); const { asyncify, asyncifyNode, sleepAsync } = require('./async_utils'); +const stubs = require('./stubs'); describe('PollingProcessor', () => { const longInterval = 100000; @@ -14,20 +15,13 @@ describe('PollingProcessor', () => { beforeEach(() => { store = InMemoryFeatureStore(); - config = { featureStore: store, pollInterval: longInterval, logger: fakeLogger() }; + config = { featureStore: store, pollInterval: longInterval, logger: stubs.stubLogger() }; }); afterEach(() => { processor && processor.stop(); }); - function fakeLogger() { - return { - debug: jest.fn(), - error: jest.fn() - }; - } - it('makes no request before start', () => { const requestor = { requestAllData: jest.fn() @@ -57,16 +51,6 @@ describe('PollingProcessor', () => { await asyncifyNode(cb => processor.start(cb)); // didn't throw -> success }); - it('calls callback with error on failure', async () => { - const err = new Error('sorry'); - const requestor = { - requestAllData: cb => cb(err) - }; - processor = PollingProcessor(config, requestor); - - await expect(asyncifyNode(cb => processor.start(cb))).rejects.toThrow(/sorry.*will retry/); - }); - it('initializes feature store', async () => { const requestor = { requestAllData: cb => cb(null, jsonData) @@ -94,22 +78,26 @@ describe('PollingProcessor', () => { expect(requestor.requestAllData.mock.calls.length).toBeGreaterThanOrEqual(4); }); - function testRecoverableHttpError(status) { - it('continues polling after error ' + status, async () => { - const err = new Error('sorry'); - err.status = status; - const requestor = { - requestAllData: jest.fn(cb => cb(err)) - }; - config.pollInterval = 0.1; - processor = PollingProcessor(config, requestor); + async function testRecoverableError(err) { + const requestor = { + requestAllData: jest.fn(cb => cb(err)) + }; + config.pollInterval = 0.1; + processor = PollingProcessor(config, requestor); - processor.start(() => {}); - await sleepAsync(300); + let errReceived; + processor.start(e => { errReceived = e; }); + await sleepAsync(300); - expect(requestor.requestAllData.mock.calls.length).toBeGreaterThanOrEqual(2); - expect(config.logger.error).not.toHaveBeenCalled(); - }); + expect(requestor.requestAllData.mock.calls.length).toBeGreaterThanOrEqual(2); + expect(config.logger.error).not.toHaveBeenCalled(); + expect(errReceived).toBeUndefined(); + } + + function testRecoverableHttpError(status) { + const err = new Error('sorry'); + err.status = status; + it('continues polling after error ' + status, async () => await testRecoverableError(err)); } testRecoverableHttpError(400); @@ -118,6 +106,8 @@ describe('PollingProcessor', () => { testRecoverableHttpError(500); testRecoverableHttpError(503); + it('continues polling after I/O error', async () => await testRecoverableError(new Error('sorry'))); + function testUnrecoverableHttpError(status) { it('stops polling after error ' + status, async () => { const err = new Error('sorry'); @@ -128,11 +118,13 @@ describe('PollingProcessor', () => { config.pollInterval = 0.1; processor = PollingProcessor(config, requestor); - processor.start(() => {}); + let errReceived; + processor.start(e => { errReceived = e; }); await sleepAsync(300); expect(requestor.requestAllData.mock.calls.length).toEqual(1); expect(config.logger.error).toHaveBeenCalledTimes(1); + expect(errReceived).not.toBeUndefined(); }); } diff --git a/test/redis_feature_store-test.js b/test/redis_feature_store-test.js index cfc92be..37b18f0 100644 --- a/test/redis_feature_store-test.js +++ b/test/redis_feature_store-test.js @@ -1,15 +1,14 @@ -var RedisFeatureStore = require('../redis_feature_store'); -var testBase = require('./feature_store_test_base'); -var dataKind = require('../versioned_data_kind'); -var redis = require('redis'); +const RedisFeatureStore = require('../redis_feature_store'); +const testBase = require('./feature_store_test_base'); +const redis = require('redis'); const shouldSkip = (process.env.LD_SKIP_DATABASE_TESTS === '1'); -(shouldSkip ? describe.skip : describe)('RedisFeatureStore', function() { - var redisOpts = { url: 'redis://localhost:6379' }; +(shouldSkip ? describe.skip : describe)('RedisFeatureStore', () => { + const redisOpts = { url: 'redis://localhost:6379' }; - var extraRedisClient = redis.createClient(redisOpts); + const extraRedisClient = redis.createClient(redisOpts); function makeCachedStore() { return new RedisFeatureStore(redisOpts, 30); @@ -31,13 +30,13 @@ const shouldSkip = (process.env.LD_SKIP_DATABASE_TESTS === '1'); testBase.baseFeatureStoreTests(makeUncachedStore, clearExistingData, false, makeStoreWithPrefix); testBase.concurrentModificationTests(makeUncachedStore, - function(hook) { - var store = makeCachedStore(); + hook => { + const store = makeCachedStore(); store.underlyingStore.testUpdateHook = hook; return store; }); - afterAll(function() { + afterAll(() => { extraRedisClient.quit(); }); }); diff --git a/test/streaming-test.js b/test/streaming-test.js index f07adfc..ae02022 100644 --- a/test/streaming-test.js +++ b/test/streaming-test.js @@ -2,29 +2,27 @@ const InMemoryFeatureStore = require('../feature_store'); const StreamProcessor = require('../streaming'); const dataKind = require('../versioned_data_kind'); const { asyncify, sleepAsync } = require('./async_utils'); +const stubs = require('./stubs'); -describe('StreamProcessor', function() { - var sdkKey = 'SDK_KEY'; +describe('StreamProcessor', () => { + const sdkKey = 'SDK_KEY'; function fakeEventSource() { var es = { handlers: {} }; es.constructor = function(url, options) { es.url = url; es.options = options; - this.addEventListener = function(type, handler) { + this.addEventListener = (type, handler) => { es.handlers[type] = handler; }; + this.close = () => { + es.closed = true; + }; + es.instance = this; }; return es; } - function fakeLogger() { - return { - debug: jest.fn(), - error: jest.fn() - }; - } - function expectJsonError(err, config) { expect(err).not.toBe(undefined); expect(err.message).toEqual('Malformed JSON data in event stream'); @@ -62,7 +60,7 @@ describe('StreamProcessor', function() { it('causes flags and segments to be stored', async () => { var featureStore = InMemoryFeatureStore(); - var config = { featureStore: featureStore, logger: fakeLogger() }; + var config = { featureStore: featureStore, logger: stubs.stubLogger() }; var es = fakeEventSource(); var sp = StreamProcessor(sdkKey, config, null, es.constructor); sp.start(); @@ -80,7 +78,7 @@ describe('StreamProcessor', function() { it('calls initialization callback', async () => { var featureStore = InMemoryFeatureStore(); - var config = { featureStore: featureStore, logger: fakeLogger() }; + var config = { featureStore: featureStore, logger: stubs.stubLogger() }; var es = fakeEventSource(); var sp = StreamProcessor(sdkKey, config, null, es.constructor); @@ -92,7 +90,7 @@ describe('StreamProcessor', function() { it('passes error to callback if data is invalid', async () => { var featureStore = InMemoryFeatureStore(); - var config = { featureStore: featureStore, logger: fakeLogger() }; + var config = { featureStore: featureStore, logger: stubs.stubLogger() }; var es = fakeEventSource(); var sp = StreamProcessor(sdkKey, config, null, es.constructor); @@ -106,7 +104,7 @@ describe('StreamProcessor', function() { describe('patch message', function() { it('updates flag', async () => { var featureStore = InMemoryFeatureStore(); - var config = { featureStore: featureStore, logger: fakeLogger() }; + var config = { featureStore: featureStore, logger: stubs.stubLogger() }; var es = fakeEventSource(); var sp = StreamProcessor(sdkKey, config, null, es.constructor); @@ -124,7 +122,7 @@ describe('StreamProcessor', function() { it('updates segment', async () => { var featureStore = InMemoryFeatureStore(); - var config = { featureStore: featureStore, logger: fakeLogger() }; + var config = { featureStore: featureStore, logger: stubs.stubLogger() }; var es = fakeEventSource(); var sp = StreamProcessor(sdkKey, config, null, es.constructor); @@ -142,7 +140,7 @@ describe('StreamProcessor', function() { it('passes error to callback if data is invalid', async () => { var featureStore = InMemoryFeatureStore(); - var config = { featureStore: featureStore, logger: fakeLogger() }; + var config = { featureStore: featureStore, logger: stubs.stubLogger() }; var es = fakeEventSource(); var sp = StreamProcessor(sdkKey, config, null, es.constructor); @@ -156,7 +154,7 @@ describe('StreamProcessor', function() { describe('delete message', function() { it('deletes flag', async () => { var featureStore = InMemoryFeatureStore(); - var config = { featureStore: featureStore, logger: fakeLogger() }; + var config = { featureStore: featureStore, logger: stubs.stubLogger() }; var es = fakeEventSource(); var sp = StreamProcessor(sdkKey, config, null, es.constructor); @@ -176,7 +174,7 @@ describe('StreamProcessor', function() { it('deletes segment', async () => { var featureStore = InMemoryFeatureStore(); - var config = { featureStore: featureStore, logger: fakeLogger() }; + var config = { featureStore: featureStore, logger: stubs.stubLogger() }; var es = fakeEventSource(); var sp = StreamProcessor(sdkKey, config, null, es.constructor); @@ -196,7 +194,7 @@ describe('StreamProcessor', function() { it('passes error to callback if data is invalid', async () => { var featureStore = InMemoryFeatureStore(); - var config = { featureStore: featureStore, logger: fakeLogger() }; + var config = { featureStore: featureStore, logger: stubs.stubLogger() }; var es = fakeEventSource(); var sp = StreamProcessor(sdkKey, config, null, es.constructor); @@ -224,7 +222,7 @@ describe('StreamProcessor', function() { it('requests and stores flags and segments', async () => { var featureStore = InMemoryFeatureStore(); - var config = { featureStore: featureStore, logger: fakeLogger() }; + var config = { featureStore: featureStore, logger: stubs.stubLogger() }; var es = fakeEventSource(); var sp = StreamProcessor(sdkKey, config, fakeRequestor, es.constructor); @@ -254,7 +252,7 @@ describe('StreamProcessor', function() { }; var featureStore = InMemoryFeatureStore(); - var config = { featureStore: featureStore, logger: fakeLogger() }; + var config = { featureStore: featureStore, logger: stubs.stubLogger() }; var es = fakeEventSource(); var sp = StreamProcessor(sdkKey, config, fakeRequestor, es.constructor); @@ -278,7 +276,7 @@ describe('StreamProcessor', function() { }; var featureStore = InMemoryFeatureStore(); - var config = { featureStore: featureStore, logger: fakeLogger() }; + var config = { featureStore: featureStore, logger: stubs.stubLogger() }; var es = fakeEventSource(); var sp = StreamProcessor(sdkKey, config, fakeRequestor, es.constructor); @@ -291,4 +289,59 @@ describe('StreamProcessor', function() { expect(s.version).toEqual(1); }); }); + + async function testRecoverableError(err) { + const featureStore = InMemoryFeatureStore(); + const config = { featureStore: featureStore, logger: stubs.stubLogger() }; + const es = fakeEventSource(); + const sp = StreamProcessor(sdkKey, config, null, es.constructor); + + let errReceived; + sp.start(e => { errReceived = e; }); + + es.instance.onerror(err); + await sleepAsync(300); + + expect(config.logger.error).not.toHaveBeenCalled(); + expect(errReceived).toBeUndefined(); + expect(es.closed).not.toEqual(true); + } + + function testRecoverableHttpError(status) { + const err = new Error('sorry'); + err.status = status; + it('continues retrying after error ' + status, async () => await testRecoverableError(err)); + } + + testRecoverableHttpError(400); + testRecoverableHttpError(408); + testRecoverableHttpError(429); + testRecoverableHttpError(500); + testRecoverableHttpError(503); + + it('continues retrying after I/O error', async () => await testRecoverableError(new Error('sorry'))); + + function testUnrecoverableHttpError(status) { + it('stops retrying after error ' + status, async () => { + const err = new Error('sorry'); + err.status = status; + const featureStore = InMemoryFeatureStore(); + const config = { featureStore: featureStore, logger: stubs.stubLogger() }; + const es = fakeEventSource(); + const sp = StreamProcessor(sdkKey, config, null, es.constructor); + + let errReceived; + sp.start(e => { errReceived = e; }); + + es.instance.onerror(err); + await sleepAsync(300); + + expect(config.logger.error).toHaveBeenCalledTimes(1); + expect(errReceived).not.toBeUndefined(); + expect(es.closed).toEqual(true); + }); + } + + testUnrecoverableHttpError(401); + testUnrecoverableHttpError(403); }); diff --git a/test/update_queue-test.js b/test/update_queue-test.js index b182855..f80b778 100644 --- a/test/update_queue-test.js +++ b/test/update_queue-test.js @@ -56,6 +56,6 @@ describe('UpdateQueue', function() { } }); }); - }; + } }); }); diff --git a/test/user_filter-test.js b/test/user_filter-test.js index 4b5d41b..121f3d9 100644 --- a/test/user_filter-test.js +++ b/test/user_filter-test.js @@ -40,7 +40,7 @@ describe('user_filter', function() { var userWithSomeAttrsHidden = { 'key': 'abc', 'custom': { - 'dizzle': 'ghi' + 'dizzle': 'ghi' }, 'privateAttrs': [ 'bizzle', 'firstName' ] }; diff --git a/update_queue.js b/update_queue.js index 7c57fa4..65ad3e4 100644 --- a/update_queue.js +++ b/update_queue.js @@ -1,8 +1,8 @@ function UpdateQueue() { - var updateQueue = []; - this.enqueue = function(updateFn, fnArgs, cb) { - updateQueue.push(arguments); + const updateQueue = []; + this.enqueue = (updateFn, fnArgs, cb) => { + updateQueue.push([ updateFn, fnArgs, cb ]); if (updateQueue.length === 1) { // if nothing else is in progress, we can start this one right away executePendingUpdates(); @@ -14,7 +14,7 @@ function UpdateQueue() { const fn = entry[0]; const args = entry[1]; const cb = entry[2]; - const newCb = function() { + const newCb = () => { updateQueue.shift(); if (updateQueue.length > 0) { setImmediate(executePendingUpdates); diff --git a/user_filter.js b/user_filter.js index ccd6b86..e09c0e6 100644 --- a/user_filter.js +++ b/user_filter.js @@ -1,4 +1,3 @@ -var messages = require('./messages'); /** * The UserFilter object transforms user objects into objects suitable to be sent as JSON to @@ -7,24 +6,22 @@ var messages = require('./messages'); * @param {Object} the LaunchDarkly client configuration object **/ function UserFilter(config) { - var filter = {}; + const filter = {}; const allAttributesPrivate = config.allAttributesPrivate; const privateAttributeNames = config.privateAttributeNames || []; - var ignoreAttrs = { key: true, custom: true, anonymous: true }; - var allowedTopLevelAttrs = { key: true, secondary: true, ip: true, country: true, email: true, - firstName: true, lastName: true, avatar: true, name: true, anonymous: true, custom: true }; + const ignoreAttrs = { key: true, custom: true, anonymous: true }; + const allowedTopLevelAttrs = { key: true, secondary: true, ip: true, country: true, email: true, + firstName: true, lastName: true, avatar: true, name: true, anonymous: true, custom: true }; - filter.filterUser = function(user) { - var allPrivateAttrs = {}; - var userPrivateAttrs = user.privateAttributeNames || []; + filter.filterUser = user => { + const userPrivateAttrs = user.privateAttributeNames || []; - var isPrivateAttr = function(name) { - return !ignoreAttrs[name] && ( + const isPrivateAttr = name => + !ignoreAttrs[name] && ( allAttributesPrivate || userPrivateAttrs.indexOf(name) !== -1 || privateAttributeNames.indexOf(name) !== -1); - } - var filterAttrs = function(props, isAttributeAllowed) { - return Object.keys(props).reduce(function(acc, name) { + const filterAttrs = (props, isAttributeAllowed) => + Object.keys(props).reduce((acc, name) => { if (isAttributeAllowed(name)) { if (isPrivateAttr(name)) { // add to hidden list @@ -35,22 +32,22 @@ function UserFilter(config) { } return acc; }, [{}, {}]); - } - var result = filterAttrs(user, function(key) { return allowedTopLevelAttrs[key]; }); - var filteredProps = result[0]; - var removedAttrs = result[1]; + + const result = filterAttrs(user, key => allowedTopLevelAttrs[key]); + const filteredProps = result[0]; + const removedAttrs = result[1]; if (user.custom) { - var customResult = filterAttrs(user.custom, function(key) { return true; }); + const customResult = filterAttrs(user.custom, () => true); filteredProps.custom = customResult[0]; Object.assign(removedAttrs, customResult[1]); } - var removedAttrNames = Object.keys(removedAttrs); + const removedAttrNames = Object.keys(removedAttrs); if (removedAttrNames.length) { removedAttrNames.sort(); filteredProps.privateAttrs = removedAttrNames; } return filteredProps; - } + }; return filter; } diff --git a/utils/__tests__/wrapPromiseCallback-test.js b/utils/__tests__/wrapPromiseCallback-test.js index 2cfae6a..8bf4344 100644 --- a/utils/__tests__/wrapPromiseCallback-test.js +++ b/utils/__tests__/wrapPromiseCallback-test.js @@ -1,31 +1,29 @@ -var wrapPromiseCallback = require('../wrapPromiseCallback'); +const wrapPromiseCallback = require('../wrapPromiseCallback'); -const wait = ms => new Promise(function(resolve) { setTimeout(resolve, ms); }); - -describe('wrapPromiseCallback',function() { - it('should resolve to the value', function() { +describe('wrapPromiseCallback', () => { + it('should resolve to the value', () => { const promise = wrapPromiseCallback(Promise.resolve('woohoo')); return expect(promise).resolves.toBe('woohoo'); }); - it('should reject with the error', function() { + it('should reject with the error', () => { const error = new Error('something went wrong'); const promise = wrapPromiseCallback(Promise.reject(error)); return expect(promise).rejects.toBe(error); }); - it('should call the callback with a value if the promise resolves', function(done) { - const promise = wrapPromiseCallback(Promise.resolve('woohoo'), function(error, value) { + it('should call the callback with a value if the promise resolves', done => { + const promise = wrapPromiseCallback(Promise.resolve('woohoo'), (error, value) => { expect(promise).toBeUndefined(); expect(error).toBeNull(); expect(value).toBe('woohoo'); - done() + done(); }); }); - it('should call the callback with an error if the promise rejects', function(done) { + it('should call the callback with an error if the promise rejects', done => { const actualError = new Error('something went wrong'); - const promise = wrapPromiseCallback(Promise.reject(actualError), function(error, value) { + const promise = wrapPromiseCallback(Promise.reject(actualError), (error, value) => { expect(promise).toBeUndefined(); expect(error).toBe(actualError); expect(value).toBeNull(); diff --git a/utils/asyncUtils.js b/utils/asyncUtils.js new file mode 100644 index 0000000..24fc719 --- /dev/null +++ b/utils/asyncUtils.js @@ -0,0 +1,32 @@ +const async = require('async'); + +// The safeAsync functions allow us to use async collection functions as efficiently as possible +// while avoiding stack overflows. When the async utilities call our iteratee, they provide a +// callback for delivering a result. Calling that callback directly is efficient (we are not +// really worried about blocking a thread by doing too many computations without yielding; our +// flag evaluations are pretty fast, and if we end up having to do any I/O, that will cause us +// to yield anyway)... but, if there are many items in the collection, it will result in too +// many nested calls. So, we'll pick an arbitrary threshold of how many items can be in the +// collection before we switch over to deferring the callbacks with setImmediate(). + +const maxNestedCalls = 50; + +function safeIteratee(collection, iteratee) { + if (!collection || collection.length <= maxNestedCalls) { + return iteratee; + } + return (value, callback) => iteratee(value, (...args) => setImmediate(callback, ...args)); +} + +function safeAsyncEach(collection, iteratee, resultCallback) { + return async.each(collection, safeIteratee(collection, iteratee), resultCallback); +} + +function safeAsyncEachSeries(collection, iteratee, resultCallback) { + return async.eachSeries(collection, safeIteratee(collection, iteratee), resultCallback); +} + +module.exports = { + safeAsyncEach: safeAsyncEach, + safeAsyncEachSeries: safeAsyncEachSeries +}; diff --git a/utils/stringifyAttrs.js b/utils/stringifyAttrs.js index ee77864..2aaa49a 100644 --- a/utils/stringifyAttrs.js +++ b/utils/stringifyAttrs.js @@ -3,14 +3,14 @@ module.exports = function stringifyAttrs(object, attrs) { if (!object) { return object; } - var newObject; - for (var i in attrs) { - var attr = attrs[i]; - var value = object[attr]; + let newObject; + for (let i in attrs) { + const attr = attrs[i]; + const value = object[attr]; if (value !== undefined && typeof value !== 'string') { newObject = newObject || Object.assign({}, object); newObject[attr] = String(value); } } return newObject || object; -} +}; diff --git a/utils/wrapPromiseCallback.js b/utils/wrapPromiseCallback.js index 909c17c..e77d84c 100644 --- a/utils/wrapPromiseCallback.js +++ b/utils/wrapPromiseCallback.js @@ -13,15 +13,15 @@ */ module.exports = function wrapPromiseCallback(promise, callback) { const ret = promise.then( - function(value) { + value => { if (callback) { - setTimeout(function() { callback(null, value); }, 0); + setImmediate(() => { callback(null, value); }); } return value; }, - function(error) { + error => { if (callback) { - setTimeout(function() { callback(error, null); }, 0); + setImmediate(() => { callback(error, null); }); } else { return Promise.reject(error); } @@ -29,4 +29,4 @@ module.exports = function wrapPromiseCallback(promise, callback) { ); return !callback ? ret : undefined; -} \ No newline at end of file +}; diff --git a/versioned_data_kind.js b/versioned_data_kind.js index d71b2c3..da6190f 100644 --- a/versioned_data_kind.js +++ b/versioned_data_kind.js @@ -9,20 +9,20 @@ module.exports. */ -var features = { +const features = { namespace: 'features', streamApiPath: '/flags/', requestPath: '/sdk/latest-flags/', priority: 1, - getDependencyKeys: function(flag) { + getDependencyKeys: flag => { if (!flag.prerequisites || !flag.prerequisites.length) { return []; } - return flag.prerequisites.map(function(p) { return p.key; }); + return flag.prerequisites.map(p => p.key); } }; -var segments = { +const segments = { namespace: 'segments', streamApiPath: '/segments/', requestPath: '/sdk/latest-segments/',