diff --git a/.eslintrc.yaml b/.eslintrc.yaml index bb152ab..4934d7f 100644 --- a/.eslintrc.yaml +++ b/.eslintrc.yaml @@ -75,6 +75,12 @@ rules: - error - props: true + # https://eslint.org/docs/rules/no-restricted-syntax + no-restricted-syntax: + - error + - selector: ForInStatement + message: avoid for...in which can pick up properties from prototypes; use for...of, or plain for if you need array indices + # https://eslint.org/docs/rules/no-return-assign no-return-assign: error diff --git a/.ldrelease/config.yml b/.ldrelease/config.yml index 122dd96..1335123 100644 --- a/.ldrelease/config.yml +++ b/.ldrelease/config.yml @@ -1,8 +1,10 @@ +version: 2 + repo: public: node-server-sdk private: node-server-sdk-private -releasableBranches: +branches: - name: master description: 6.x - name: 5.x @@ -11,11 +13,14 @@ publications: - url: https://www.npmjs.com/package/launchdarkly-node-server-sdk description: npm -template: - name: npm +jobs: + - docker: + image: node:12-buster + template: + name: npm documentation: - githubPages: true + gitHubPages: true title: LaunchDarkly Server-Side Node SDK sdk: diff --git a/big_segments.js b/big_segments.js index bc2eea7..8b64912 100644 --- a/big_segments.js +++ b/big_segments.js @@ -37,7 +37,7 @@ function BigSegmentStoreManager(store, config, logger) { return lastStatus; }; - // Called by the evaluator when it needs to get the big segment membership state for a user. + // Called by the evaluator when it needs to get the Big Segment membership state for a user. // // If there is a cached membership state for the user, it returns the cached state. Otherwise, // it converts the user key into the hash string used by the BigSegmentStore, queries the store, @@ -59,7 +59,7 @@ function BigSegmentStoreManager(store, config, logger) { } cache.set(userKey, membership); } catch (e) { - logger.error('Big segment store membership query returned error: ' + e); + logger.error('Big Segment store membership query returned error: ' + e); return [null, 'STORE_ERROR']; } cache.set(userKey, membership); @@ -78,18 +78,18 @@ function BigSegmentStoreManager(store, config, logger) { lastStatus = { available: false, stale: false }; return; } - logger.debug('Querying big segment store status'); + logger.debug('Querying Big Segment store status'); let newStatus; try { const metadata = await store.getMetadata(); newStatus = { available: true, stale: !metadata || !metadata.lastUpToDate || isStale(metadata.lastUpToDate) }; } catch (e) { - logger.error('Big segment store status query returned error: ' + e); + logger.error('Big Segment store status query returned error: ' + e); newStatus = { available: false, stale: false }; } if (!lastStatus || lastStatus.available !== newStatus.available || lastStatus.stale !== newStatus.stale) { logger.debug( - 'Big segment store status changed from %s to %s', + 'Big Segment store status changed from %s to %s', JSON.stringify(lastStatus), JSON.stringify(newStatus) ); diff --git a/caching_store_wrapper.js b/caching_store_wrapper.js index 1d5799a..12f9084 100644 --- a/caching_store_wrapper.js +++ b/caching_store_wrapper.js @@ -195,8 +195,8 @@ function CachingStoreWrapper(underlyingStore, ttl, description) { if (!cache) { return; } - for (const kindNamespace in dataKind) { - cache.del(allCacheKey(dataKind[kindNamespace])); + for (const eachKind of Object.values(dataKind)) { + cache.del(allCacheKey(eachKind)); } } diff --git a/evaluator.js b/evaluator.js index 98b7ef3..8801463 100644 --- a/evaluator.js +++ b/evaluator.js @@ -250,7 +250,7 @@ function segmentMatchUser(segment, user, queries, stateOut, cb) { } if (!segment.generation) { - // Big segment queries can only be done if the generation is known. If it's unset, + // Big Segment queries can only be done if the generation is known. If it's unset, // that probably means the data store was populated by an older SDK that doesn't know // about the generation property and therefore dropped it from the JSON data. We'll treat // that as a "not configured" condition. @@ -261,7 +261,7 @@ function segmentMatchUser(segment, user, queries, stateOut, cb) { if (stateOut.bigSegmentsStatus) { // We've already done the query at some point during the flag evaluation and stored // the result (if any) in stateOut.bigSegmentsMembership, so we don't need to do it - // again. Even if multiple big segments are being referenced, the membership includes + // again. Even if multiple Big Segments are being referenced, the membership includes // *all* of the user's segment memberships. return cb(bigSegmentMatchUser(stateOut.bigSegmentsMembership, segment, user)); } @@ -461,7 +461,7 @@ function sha1Hex(input) { } function makeBigSegmentRef(segment) { - // The format of big segment references is independent of what store implementation is being + // The format of Big Segment references is independent of what store implementation is being // used; the store implementation receives only this string and does not know the details of // the data model. The Relay Proxy will use the same format when writing to the store. return segment.key + '.g' + segment.generation; diff --git a/event_summarizer.js b/event_summarizer.js index 1edc453..c395915 100644 --- a/event_summarizer.js +++ b/event_summarizer.js @@ -37,8 +37,7 @@ function EventSummarizer() { es.getSummary = () => { const flagsOut = {}; - for (const i in counters) { - const c = counters[i]; + for (const c of Object.values(counters)) { let flag = flagsOut[c.key]; if (!flag) { flag = { diff --git a/feature_store.js b/feature_store.js index a83d244..d4f6e9c 100644 --- a/feature_store.js +++ b/feature_store.js @@ -44,12 +44,9 @@ function InMemoryFeatureStore() { const results = {}; const items = allData[kind.namespace] || {}; - for (const key in items) { - if (Object.hasOwnProperty.call(items, key)) { - const item = items[key]; - if (item && !item.deleted) { - results[key] = item; - } + for (const [key, item] of Object.entries(items)) { + if (item && !item.deleted) { + results[key] = item; } } diff --git a/feature_store_event_wrapper.js b/feature_store_event_wrapper.js index 432241f..f17fa7b 100644 --- a/feature_store_event_wrapper.js +++ b/feature_store_event_wrapper.js @@ -29,11 +29,9 @@ function NamespacedDataSet() { } function enumerate(callback) { - for (const ns in itemsByNamespace) { - const items = itemsByNamespace[ns]; + for (const [ns, items] of Object.entries(itemsByNamespace)) { const keys = Object.keys(items).sort(); // sort to make tests determinate - for (const i in keys) { - const key = keys[i]; + for (const key of keys) { callback(ns, key, items[key]); } } @@ -138,16 +136,14 @@ function FeatureStoreEventWrapper(featureStore, emitter) { function computeDependencies(kind, item) { const ret = NamespacedDataSet(); if (kind === dataKind.features) { - for (const i in item.prerequisites || []) { - ret.set(dataKind.features.namespace, item.prerequisites[i].key, true); + for (const prereq of item.prerequisites || []) { + ret.set(dataKind.features.namespace, prereq.key, true); } - for (const i in item.rules || []) { - const rule = item.rules[i]; - for (const j in rule.clauses || []) { - const clause = rule.clauses[j]; + for (const rule of item.rules || []) { + for (const clause of rule.clauses || []) { if (clause.op === 'segmentMatch') { - for (const k in clause.values) { - ret.set(dataKind.segments.namespace, clause.values[k], true); + for (const value of clause.values) { + ret.set(dataKind.segments.namespace, value, true); } } } @@ -168,10 +164,9 @@ function FeatureStoreEventWrapper(featureStore, emitter) { featureStore.init(newData, () => { dependencyTracker.reset(); - for (const namespace in newData) { - const items = newData[namespace]; + for (const [namespace, items] of Object.entries(newData)) { const kind = dataKind[namespace]; - for (const key in items) { + for (const key of Object.keys(items || {})) { const item = items[key]; dependencyTracker.updateDependenciesFrom(namespace, key, computeDependencies(kind, item)); } @@ -179,11 +174,11 @@ function FeatureStoreEventWrapper(featureStore, emitter) { if (checkForChanges) { const updatedItems = NamespacedDataSet(); - for (const namespace in newData) { + for (const namespace of Object.keys(newData)) { const oldDataForKind = oldData[namespace]; const newDataForKind = newData[namespace]; const mergedData = Object.assign({}, oldDataForKind, newDataForKind); - for (const key in mergedData) { + for (const key of Object.keys(mergedData)) { addIfModified( namespace, key, diff --git a/index.d.ts b/index.d.ts index ac79477..4815b67 100644 --- a/index.d.ts +++ b/index.d.ts @@ -152,16 +152,16 @@ declare module 'launchdarkly-node-server-sdk' { inExperiment?: boolean; /** - * Describes the validity of big segment information, if and only if the flag evaluation - * required querying at least one big segment. + * Describes the validity of Big Segment information, if and only if the flag evaluation + * required querying at least one Big Segment. * - * - `'HEALTHY'`: The big segment query involved in the flag evaluation was successful, and + * - `'HEALTHY'`: The Big Segment query involved in the flag evaluation was successful, and * the segment state is considered up to date. - * - `'STALE'`: The big segment query involved in the flag evaluation was successful, but + * - `'STALE'`: The Big Segment query involved in the flag evaluation was successful, but * the segment state may not be up to date - * - `'NOT_CONFIGURED'`: Big segments could not be queried for the flag evaluation because - * the SDK configuration did not include a big segment store. - * - `'STORE_ERROR'`: The big segment query involved in the flag evaluation failed, for + * - `'NOT_CONFIGURED'`: Big Segments could not be queried for the flag evaluation because + * the SDK configuration did not include a Big Segment store. + * - `'STORE_ERROR'`: The Big Segment query involved in the flag evaluation failed, for * instance due to a database error. */ bigSegmentsStatus?: "HEALTHY" | "STALE" | "NOT_CONFIGURED" | "STORE_ERROR"; @@ -259,14 +259,14 @@ declare module 'launchdarkly-node-server-sdk' { featureStore?: LDFeatureStore | ((options: LDOptions) => LDFeatureStore); /** - * Additional parameters for configuring the SDK's big segments behavior. + * Additional parameters for configuring the SDK's Big Segments behavior. * - * "Big segments" are a specific type of user segments. For more information, read the - * LaunchDarkly documentation about user segments: https://docs.launchdarkly.com/home/users/big-segments + * Big Segments are a specific type of user segments. For more information, read the + * LaunchDarkly documentation: https://docs.launchdarkly.com/home/users/big-segments * - * By default, there is no configuration and big segments cannot be evaluated. In this - * case, any flag evaluation that references a big segment will behave as if no users - * are included in any big segments, and the {@link LDEvaluationReason} associated with any + * By default, there is no configuration and Big Segments cannot be evaluated. In this + * case, any flag evaluation that references a Big Segment will behave as if no users + * are included in any Big Segments, and the {@link LDEvaluationReason} associated with any * such flag evaluation will have a `bigSegmentsStatus` of `"NOT_CONFIGURED"`. */ bigSegments?: LDBigSegmentsOptions; @@ -432,33 +432,33 @@ declare module 'launchdarkly-node-server-sdk' { } /** - * Additional parameters for configuring the SDK's big segments behavior. + * Additional parameters for configuring the SDK's Big Segments behavior. * - * "Big segments" are a specific type of user segments. For more information, read the LaunchDarkly - * documentation about user segments: https://docs.launchdarkly.com/home/users/big-segments + * Big Segments are a specific type of user segments. For more information, read the LaunchDarkly + * documentation: https://docs.launchdarkly.com/home/users/big-segments * * @see {@link LDOptions.bigSegments} */ export interface LDBigSegmentsOptions { /** - * Specifies the storage component that provides big segments data. + * Specifies the storage component that provides Big Segments data. * * This property is mandatory. It must be obtained from one of the SDK's database integrations, * such as https://github.com/launchdarkly/node-server-sdk-redis. You will normally specify a * database implementation that matches how the LaunchDarkly Relay Proxy is configured, since the - * Relay Proxy manages the big segment data. + * Relay Proxy manages the Big Segment data. */ store: (options: LDOptions) => interfaces.BigSegmentStore; /** - * The maximum number of users whose big segment state will be cached by the SDK at any given time. + * The maximum number of users whose Big Segment state will be cached by the SDK at any given time. * * To reduce database traffic, the SDK maintains a least-recently-used cache by user key. When a feature - * flag that references a big segment is evaluated for some user who is not currently in the cache, the - * SDK queries the database for all big segment memberships of that user, and stores them together in a + * flag that references a Big Segment is evaluated for some user who is not currently in the cache, the + * SDK queries the database for all Big Segment memberships of that user, and stores them together in a * single cache entry. If the cache is full, the oldest entry is dropped. * - * A higher value for `userCacheSize` means that database queries for big segments will be done + * A higher value for `userCacheSize` means that database queries for Big Segments will be done * less often for recently-referenced users, if the application has many users, at the cost of * increased memory used by the cache. * @@ -469,11 +469,11 @@ declare module 'launchdarkly-node-server-sdk' { userCacheSize?: number; /** - * The maximum length of time that the big segment state for a user will be cached by the SDK, + * The maximum length of time that the Big Segment state for a user will be cached by the SDK, * in seconds. * * See {@link userCacheSize} for more about this cache. A higher value for `userCacheTime` means - * that database queries for the big segment state of any given user will be done less often, but + * that database queries for the Big Segment state of any given user will be done less often, but * that changes to segment membership may not be detected as soon. * * If not specified, the default value is 5. Negative values are changed to the default. @@ -481,7 +481,7 @@ declare module 'launchdarkly-node-server-sdk' { userCacheTime?: number; /** - * The interval at which the SDK will poll the big segment store to make sure it is available + * The interval at which the SDK will poll the Big Segment store to make sure it is available * and to determine how long ago it was updated, in seconds. * * If not specified, the default value is 5. Zero or negative values are changed to the default. @@ -489,17 +489,17 @@ declare module 'launchdarkly-node-server-sdk' { statusPollInterval?: number; /** - * The maximum length of time between updates of the big segments data before the data is + * The maximum length of time between updates of the Big Segments data before the data is * considered out of date, in seconds. * - * Normally, the LaunchDarkly Relay Proxy updates a timestamp in the big segments store at intervals to + * Normally, the LaunchDarkly Relay Proxy updates a timestamp in the Big Segment store at intervals to * confirm that it is still in sync with the LaunchDarkly data, even if there have been no changes to the * If the timestamp falls behind the current time by the amount specified in `staleAfter`, the SDK * assumes that something is not working correctly in this process and that the data may not be accurate. * * While in a stale state, the SDK will still continue using the last known data, but the status from * {@link interfaces.BigSegmentStoreStatusProvider.getStatus} will have `stale: true`, and any - * {@link LDEvaluationReason} generated from a feature flag that references a big segment will have a + * {@link LDEvaluationReason} generated from a feature flag that references a Big Segment will have a * `bigSegmentsStatus` of `"STALE"`. * * If not specified, the default value is 120 (two minutes). Zero or negative values are changed to @@ -1093,9 +1093,9 @@ declare module 'launchdarkly-node-server-sdk' { flush(callback?: (err: Error, res: boolean) => void): Promise; /** - * A mechanism for tracking the status of a big segment store. + * A mechanism for tracking the status of a Big Segment store. * - * This object has methods for checking whether the big segment store is (as far as the SDK + * This object has methods for checking whether the Big Segment store is (as far as the SDK * knows) currently operational and tracking changes in this status. See * {@link interfaces.BigSegmentStoreStatusProvider} for more about this functionality. */ @@ -1299,10 +1299,10 @@ declare module 'launchdarkly-node-server-sdk/interfaces' { import { EventEmitter } from 'events'; /** - * A read-only data store that allows querying of user membership in big segments. + * A read-only data store that allows querying of user membership in Big Segments. * - * "Big segments" are a specific type of user segments. For more information, read the LaunchDarkly - * documentation about user segments: https://docs.launchdarkly.com/home/users/big-segments + * Big Segments are a specific type of user segments. For more information, read the LaunchDarkly + * documentation: https://docs.launchdarkly.com/home/users/big-segments */ export interface BigSegmentStore { /** @@ -1323,12 +1323,12 @@ declare module 'launchdarkly-node-server-sdk/interfaces' { * Queries the store for a snapshot of the current segment state for a specific user. * * The userHash is a base64-encoded string produced by hashing the user key as defined by - * the big segments specification; the store implementation does not need to know the details + * the Big Segments specification; the store implementation does not need to know the details * of how this is done, because it deals only with already-hashed keys, but the string can be * assumed to only contain characters that are valid in base64. * * The resolved value of the Promise should be either a [[BigSegmentStoreMembership]], or - * undefined if the user is not referenced in any big segments (this is equivalent to a + * undefined if the user is not referenced in any Big Segments (this is equivalent to a * [[BigSegmentStoreMembership]] that has no properties). * * @param userHash identifies the user @@ -1354,7 +1354,7 @@ declare module 'launchdarkly-node-server-sdk/interfaces' { } /** - * The return type of [[BigSegmentStore.getUserMembership]], describing which big segments a + * The return type of [[BigSegmentStore.getUserMembership]], describing which Big Segments a * specific user is included in or excluded from. * * This object may be cached by the SDK, so it should not be modified after it is created. It @@ -1363,7 +1363,7 @@ declare module 'launchdarkly-node-server-sdk/interfaces' { export interface BigSegmentStoreMembership { /** * Each property key in this object is a "segment reference", which is how segments are - * identified in big segment data. This string is not identical to the segment key-- the SDK + * identified in Big Segment data. This string is not identical to the segment key-- the SDK * will add other information. The store implementation should not be concerned with the * format of the string. * @@ -1377,12 +1377,12 @@ declare module 'launchdarkly-node-server-sdk/interfaces' { } /** - * An interface for querying the status of a big segment store. + * An interface for querying the status of a Big Segment store. * - * The big segment store is the component that receives information about big segments, normally - * from a database populated by the LaunchDarkly Relay Proxy. "Big segments" are a specific type - * of user segments. For more information, read the LaunchDarkly documentation about user - * segments: https://docs.launchdarkly.com/home/users/big-segments + * The Big Segment store is the component that receives information about Big Segments, normally + * from a database populated by the LaunchDarkly Relay Proxy. Big Segments are a specific type + * of user segments. For more information, read the LaunchDarkly documentation: + * https://docs.launchdarkly.com/home/users/big-segments * * An implementation of this interface is returned by {@link LDClient.bigSegmentStoreStatusProvider}. * Application code never needs to implement this interface. @@ -1398,7 +1398,7 @@ declare module 'launchdarkly-node-server-sdk/interfaces' { * Gets the current status of the store, if known. * * @returns a {@link BigSegmentStoreStatus}, or `undefined` if the SDK has not yet queried the - * big segment store status + * Big Segment store status */ getStatus(): BigSegmentStoreStatus | undefined; @@ -1411,31 +1411,31 @@ declare module 'launchdarkly-node-server-sdk/interfaces' { } /** - * Information about the status of a big segment store, provided by {@link BigSegmentStoreStatusProvider}. + * Information about the status of a Big Segment store, provided by {@link BigSegmentStoreStatusProvider}. * - * "Big segments" are a specific type of user segments. For more information, read the LaunchDarkly - * documentation about user segments: https://docs.launchdarkly.com/home/users/big-segments + * Big Segments are a specific type of user segments. For more information, read the LaunchDarkly + * documentation: https://docs.launchdarkly.com/home/users/big-segments */ export interface BigSegmentStoreStatus { /** - * True if the big segment store is able to respond to queries, so that the SDK can + * True if the Big Segment store is able to respond to queries, so that the SDK can * evaluate whether a user is in a segment or not. * * If this property is false, the store is not able to make queries (for instance, it may not have - * a valid database connection). In this case, the SDK will treat any reference to a big segment + * a valid database connection). In this case, the SDK will treat any reference to a Big Segment * as if no users are included in that segment. Also, the {@link LDEvaluationReason} associated - * with any flag evaluation that references a big segment when the store is not available will + * with any flag evaluation that references a Big Segment when the store is not available will * have a `bigSegmentsStatus` of `"STORE_ERROR"`. */ available: boolean; /** - * True if the big segment store is available, but has not been updated within the amount of time + * True if the Big Segment store is available, but has not been updated within the amount of time * specified by {@link LDBigSegmentsOptions.staleAfter}. * * This may indicate that the LaunchDarkly Relay Proxy, which populates the store, has stopped * running or has become unable to receive fresh data from LaunchDarkly. Any feature flag - * evaluations that reference a big segment will be using the last known data, which may be out + * evaluations that reference a Big Segment will be using the last known data, which may be out * of date. */ stale: boolean; @@ -1750,7 +1750,7 @@ declare module 'launchdarkly-node-server-sdk/requestor' { ): void; /** - * A standard test suite that should be run on every big segment store implementation. + * A standard test suite that should be run on every Big Segment store implementation. * * This test suite uses `jest` and should be run inside a `describe` block. * @@ -1766,12 +1766,12 @@ declare module 'launchdarkly-node-server-sdk/requestor' { * in separate files, if `jest` parallelization is enabled; they can interfere with each * other's database state if they are interleaved. * - * @param createStore A function that creates a big segment store instance with the + * @param createStore A function that creates a Big Segment store instance with the * specified key prefix. * @param clearExistingData An asynchronous function that removes any existing data from * the database for the specified key prefix only. * @param setMetadata An asynchronous function that updates the store metadata. - * @param setSegments An asynchronous function that sets a user's big segment state. + * @param setSegments An asynchronous function that sets a user's Big Segment state. */ export function runBigSegmentStoreTests( createStore: (prefix: string, logger: ld.LDLogger) => interfaces.BigSegmentStore, diff --git a/loggers.js b/loggers.js index 4687a9d..1d513cc 100644 --- a/loggers.js +++ b/loggers.js @@ -13,7 +13,7 @@ function basicLogger(options) { let minLevel = 1; // default is 'info' if (options && options.level) { - for (const i in logLevels) { + for (let i = 0; i < logLevels.length; i++) { if (logLevels[i] === options.level) { minLevel = i; } @@ -36,7 +36,7 @@ function basicLogger(options) { } const logger = {}; - for (const i in logLevels) { + for (let i = 0; i < logLevels.length; i++) { const levelName = logLevels[i]; if (levelName !== 'none') { if (i < minLevel) { diff --git a/package.json b/package.json index 9c82ec0..a0220c1 100644 --- a/package.json +++ b/package.json @@ -49,7 +49,7 @@ "launchdarkly-js-test-helpers": "^1.2.1", "prettier": "^2.3.1", "tmp": "^0.2.1", - "typescript": "^4.3.2", + "typescript": "~4.4.4", "yaml": "^1.10.2" }, "jest": { diff --git a/streaming.js b/streaming.js index ff37101..d7a870c 100644 --- a/streaming.js +++ b/streaming.js @@ -125,8 +125,7 @@ function StreamProcessor(sdkKey, config, requestor, diagnosticsManager, specifie reportJsonError('patch', e.data); return; } - for (const k in dataKind) { - const kind = dataKind[k]; + for (const kind of Object.values(dataKind)) { const key = getKeyFromPath(kind, patch.path); if (key !== null) { config.logger.debug('Updating ' + key + ' in ' + kind.namespace); @@ -150,8 +149,7 @@ function StreamProcessor(sdkKey, config, requestor, diagnosticsManager, specifie return; } const version = data.version; - for (const k in dataKind) { - const kind = dataKind[k]; + for (const kind of Object.values(dataKind)) { const key = getKeyFromPath(kind, data.path); if (key !== null) { config.logger.debug('Deleting ' + key + ' in ' + kind.namespace); diff --git a/test/LDClient-tls-test.js b/test/LDClient-tls-test.js index 220ca6b..30020c1 100644 --- a/test/LDClient-tls-test.js +++ b/test/LDClient-tls-test.js @@ -47,7 +47,7 @@ describe('LDClient TLS configuration', () => { await withCloseable(LDClient.init(sdkKey, config), async client => { await sleepAsync(300); // the client won't signal an unrecoverable error, but it should log a message expect(config.logger.warn.mock.calls.length).toEqual(2); - expect(config.logger.warn.mock.calls[1][0]).toMatch(/self signed/); + expect(config.logger.warn.mock.calls[1][0]).toMatch(/self.signed/); }); }); }); diff --git a/test/evaluator-big-segments-test.js b/test/evaluator-big-segments-test.js index 1922583..456b645 100644 --- a/test/evaluator-big-segments-test.js +++ b/test/evaluator-big-segments-test.js @@ -9,9 +9,9 @@ const { makeSegmentMatchClause, } = require('./evaluator_helpers'); -// Tests of flag evaluation involving big segments. +// Tests of flag evaluation involving Big Segments. -describe('Evaluator - big segments', () => { +describe('Evaluator - Big Segments', () => { it('segment is not matched if there is no way to query it', async () => { const segment = { key: 'test', diff --git a/tsconfig.json b/tsconfig.json index 3b8c161..3f0986b 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -3,6 +3,7 @@ "module": "commonjs", "strict": true, "lib": [ + "dom", "es6" ] }, diff --git a/utils/stringifyAttrs.js b/utils/stringifyAttrs.js index 7c0542e..31eb5ae 100644 --- a/utils/stringifyAttrs.js +++ b/utils/stringifyAttrs.js @@ -3,8 +3,7 @@ module.exports = function stringifyAttrs(object, attrs) { return object; } let newObject; - for (const i in attrs) { - const attr = attrs[i]; + for (const attr of attrs) { const value = object[attr]; if (value !== undefined && typeof value !== 'string') { newObject = newObject || Object.assign({}, object);