From 4506089231a4dcd86970cc723c7d24ea9d86876c Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Wed, 10 Jul 2024 16:03:56 -0300 Subject: [PATCH 001/146] Add largeSegments cache in client-side InMemory storage --- src/storages/inMemory/InMemoryStorageCS.ts | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/storages/inMemory/InMemoryStorageCS.ts b/src/storages/inMemory/InMemoryStorageCS.ts index 84d2351b..30667369 100644 --- a/src/storages/inMemory/InMemoryStorageCS.ts +++ b/src/storages/inMemory/InMemoryStorageCS.ts @@ -18,10 +18,12 @@ export function InMemoryStorageCSFactory(params: IStorageFactoryParams): IStorag const splits = new SplitsCacheInMemory(__splitFiltersValidation); const segments = new MySegmentsCacheInMemory(); + const largeSegments = new MySegmentsCacheInMemory(); const storage = { splits, segments, + largeSegments, impressions: new ImpressionsCacheInMemory(impressionsQueueSize), impressionCounts: impressionsMode !== DEBUG ? new ImpressionCountsCacheInMemory() : undefined, events: new EventsCacheInMemory(eventsQueueSize), @@ -32,6 +34,7 @@ export function InMemoryStorageCSFactory(params: IStorageFactoryParams): IStorag destroy() { this.splits.clear(); this.segments.clear(); + this.largeSegments.clear(); this.impressions.clear(); this.impressionCounts && this.impressionCounts.clear(); this.events.clear(); @@ -43,6 +46,7 @@ export function InMemoryStorageCSFactory(params: IStorageFactoryParams): IStorag return { splits: this.splits, segments: new MySegmentsCacheInMemory(), + largeSegments: new MySegmentsCacheInMemory(), impressions: this.impressions, impressionCounts: this.impressionCounts, events: this.events, @@ -52,6 +56,7 @@ export function InMemoryStorageCSFactory(params: IStorageFactoryParams): IStorag destroy() { this.splits = new SplitsCacheInMemory(__splitFiltersValidation); this.segments.clear(); + this.largeSegments.clear(); } }; }, From ee606280ef23aa990322027eb03082b375d975bc Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Wed, 10 Jul 2024 16:15:06 -0300 Subject: [PATCH 002/146] Remove unused KeyBuilderCS::buildOldSegmentNameKey method --- src/storages/KeyBuilderCS.ts | 4 ---- .../inLocalStorage/__tests__/MySegmentsCacheInLocal.spec.ts | 2 +- 2 files changed, 1 insertion(+), 5 deletions(-) diff --git a/src/storages/KeyBuilderCS.ts b/src/storages/KeyBuilderCS.ts index 65b59397..d0752cf5 100644 --- a/src/storages/KeyBuilderCS.ts +++ b/src/storages/KeyBuilderCS.ts @@ -26,10 +26,6 @@ export class KeyBuilderCS extends KeyBuilder { return builtSegmentKeyName.substr(prefix.length); } - // @BREAKING: The key used to start with the matching key instead of the prefix, this was changed on version 10.17.3 - buildOldSegmentNameKey(segmentName: string) { - return `${this.matchingKey}.${this.prefix}.segment.${segmentName}`; - } // @BREAKING: The key used to start with the matching key instead of the prefix, this was changed on version 10.17.3 extractOldSegmentKey(builtSegmentKeyName: string) { const prefix = `${this.matchingKey}.${this.prefix}.segment.`; diff --git a/src/storages/inLocalStorage/__tests__/MySegmentsCacheInLocal.spec.ts b/src/storages/inLocalStorage/__tests__/MySegmentsCacheInLocal.spec.ts index ae6dac82..f61985e0 100644 --- a/src/storages/inLocalStorage/__tests__/MySegmentsCacheInLocal.spec.ts +++ b/src/storages/inLocalStorage/__tests__/MySegmentsCacheInLocal.spec.ts @@ -37,7 +37,7 @@ test('SEGMENT CACHE / in LocalStorage migration for mysegments keys', () => { cache.clear(); // cleanup before starting. // Not adding a full suite for LS keys now, testing here - expect(oldKey1).toBe(keys.buildOldSegmentNameKey('segment1')); + expect(oldKey1).toBe(`test_nico.${keys.prefix}.segment.segment1`); expect('segment1').toBe(keys.extractOldSegmentKey(oldKey1)); // add two segments, one we don't want to send on reset, should only be cleared, other one will be migrated. From 9ac6c25619c3e4cec846aa47d6cb0240e6dc80d4 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Wed, 10 Jul 2024 16:45:45 -0300 Subject: [PATCH 003/146] Add largeSegments cache in client-side InLocal storage --- src/storages/KeyBuilderCS.ts | 26 ++++++++++++++++++- .../inLocalStorage/MySegmentsCacheInLocal.ts | 6 ++--- src/storages/inLocalStorage/index.ts | 12 ++++++--- src/storages/types.ts | 4 ++- 4 files changed, 39 insertions(+), 9 deletions(-) diff --git a/src/storages/KeyBuilderCS.ts b/src/storages/KeyBuilderCS.ts index d0752cf5..01602249 100644 --- a/src/storages/KeyBuilderCS.ts +++ b/src/storages/KeyBuilderCS.ts @@ -1,7 +1,13 @@ import { startsWith } from '../utils/lang'; import { KeyBuilder } from './KeyBuilder'; -export class KeyBuilderCS extends KeyBuilder { +export interface MySegmentsKeyBuilder { + buildSegmentNameKey(segmentName: string): string; + extractSegmentName(builtSegmentKeyName: string): string | undefined; + extractOldSegmentKey(builtSegmentKeyName: string): string | undefined; +} + +export class KeyBuilderCS extends KeyBuilder implements MySegmentsKeyBuilder { protected readonly regexSplitsCacheKey: RegExp; protected readonly matchingKey: string; @@ -42,3 +48,21 @@ export class KeyBuilderCS extends KeyBuilder { return this.regexSplitsCacheKey.test(key); } } + +export function myLargeSegmentsKeyBuilder(prefix: string, matchingKey: string): MySegmentsKeyBuilder { + return { + buildSegmentNameKey(segmentName: string) { + return `${prefix}.${matchingKey}.largeSegment.${segmentName}`; + }, + + extractSegmentName(builtSegmentKeyName: string) { + const p = `${prefix}.${matchingKey}.largeSegment.`; + + if (startsWith(builtSegmentKeyName, p)) return builtSegmentKeyName.substr(p.length); + }, + + extractOldSegmentKey() { + return undefined; + } + }; +} diff --git a/src/storages/inLocalStorage/MySegmentsCacheInLocal.ts b/src/storages/inLocalStorage/MySegmentsCacheInLocal.ts index 849a344b..3b5085be 100644 --- a/src/storages/inLocalStorage/MySegmentsCacheInLocal.ts +++ b/src/storages/inLocalStorage/MySegmentsCacheInLocal.ts @@ -1,14 +1,14 @@ import { ILogger } from '../../logger/types'; import { AbstractSegmentsCacheSync } from '../AbstractSegmentsCacheSync'; -import { KeyBuilderCS } from '../KeyBuilderCS'; +import type { MySegmentsKeyBuilder } from '../KeyBuilderCS'; import { LOG_PREFIX, DEFINED } from './constants'; export class MySegmentsCacheInLocal extends AbstractSegmentsCacheSync { - private readonly keys: KeyBuilderCS; + private readonly keys: MySegmentsKeyBuilder; private readonly log: ILogger; - constructor(log: ILogger, keys: KeyBuilderCS) { + constructor(log: ILogger, keys: MySegmentsKeyBuilder) { super(); this.log = log; this.keys = keys; diff --git a/src/storages/inLocalStorage/index.ts b/src/storages/inLocalStorage/index.ts index 7dca8a24..63c14f3b 100644 --- a/src/storages/inLocalStorage/index.ts +++ b/src/storages/inLocalStorage/index.ts @@ -3,7 +3,7 @@ import { ImpressionCountsCacheInMemory } from '../inMemory/ImpressionCountsCache import { EventsCacheInMemory } from '../inMemory/EventsCacheInMemory'; import { IStorageFactoryParams, IStorageSync, IStorageSyncFactory } from '../types'; import { validatePrefix } from '../KeyBuilder'; -import { KeyBuilderCS } from '../KeyBuilderCS'; +import { KeyBuilderCS, myLargeSegmentsKeyBuilder } from '../KeyBuilderCS'; import { isLocalStorageAvailable } from '../../utils/env/isLocalStorageAvailable'; import { SplitsCacheInLocal } from './SplitsCacheInLocal'; import { MySegmentsCacheInLocal } from './MySegmentsCacheInLocal'; @@ -38,15 +38,17 @@ export function InLocalStorage(options: InLocalStorageOptions = {}): IStorageSyn const { settings, settings: { log, scheduler: { impressionsQueueSize, eventsQueueSize, }, sync: { impressionsMode, __splitFiltersValidation } } } = params; const matchingKey = getMatching(settings.core.key); - const keys = new KeyBuilderCS(prefix, matchingKey as string); + const keys = new KeyBuilderCS(prefix, matchingKey); const expirationTimestamp = Date.now() - DEFAULT_CACHE_EXPIRATION_IN_MILLIS; const splits = new SplitsCacheInLocal(settings, keys, expirationTimestamp); const segments = new MySegmentsCacheInLocal(log, keys); + const largeSegments = new MySegmentsCacheInLocal(log, myLargeSegmentsKeyBuilder(prefix, matchingKey)); return { splits, segments, + largeSegments, impressions: new ImpressionsCacheInMemory(impressionsQueueSize), impressionCounts: impressionsMode !== DEBUG ? new ImpressionCountsCacheInMemory() : undefined, events: new EventsCacheInMemory(eventsQueueSize), @@ -56,6 +58,7 @@ export function InLocalStorage(options: InLocalStorageOptions = {}): IStorageSyn destroy() { this.splits = new SplitsCacheInMemory(__splitFiltersValidation); this.segments = new MySegmentsCacheInMemory(); + this.largeSegments = new MySegmentsCacheInMemory(); this.impressions.clear(); this.impressionCounts && this.impressionCounts.clear(); this.events.clear(); @@ -64,11 +67,11 @@ export function InLocalStorage(options: InLocalStorageOptions = {}): IStorageSyn // When using shared instanciation with MEMORY we reuse everything but segments (they are customer per key). shared(matchingKey: string) { - const childKeysBuilder = new KeyBuilderCS(prefix, matchingKey); return { splits: this.splits, - segments: new MySegmentsCacheInLocal(log, childKeysBuilder), + segments: new MySegmentsCacheInLocal(log, new KeyBuilderCS(prefix, matchingKey)), + largeSegments: new MySegmentsCacheInLocal(log, myLargeSegmentsKeyBuilder(prefix, matchingKey)), impressions: this.impressions, impressionCounts: this.impressionCounts, events: this.events, @@ -77,6 +80,7 @@ export function InLocalStorage(options: InLocalStorageOptions = {}): IStorageSyn destroy() { this.splits = new SplitsCacheInMemory(__splitFiltersValidation); this.segments = new MySegmentsCacheInMemory(); + this.largeSegments = new MySegmentsCacheInMemory(); } }; }, diff --git a/src/storages/types.ts b/src/storages/types.ts index 3fa7d244..61602e91 100644 --- a/src/storages/types.ts +++ b/src/storages/types.ts @@ -477,7 +477,9 @@ export interface IStorageSync extends IStorageBase< IEventsCacheSync, ITelemetryCacheSync, IUniqueKeysCacheSync -> { } +> { + largeSegments?: ISegmentsCacheSync, +} export interface IStorageAsync extends IStorageBase< ISplitsCacheAsync, From 162b092883155b7739b6c6104baa58594c2f62c1 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Wed, 10 Jul 2024 17:03:53 -0300 Subject: [PATCH 004/146] Unit tests --- .../__tests__/MySegmentsCacheInLocal.spec.ts | 48 +++++++++++-------- 1 file changed, 29 insertions(+), 19 deletions(-) diff --git a/src/storages/inLocalStorage/__tests__/MySegmentsCacheInLocal.spec.ts b/src/storages/inLocalStorage/__tests__/MySegmentsCacheInLocal.spec.ts index f61985e0..c581e716 100644 --- a/src/storages/inLocalStorage/__tests__/MySegmentsCacheInLocal.spec.ts +++ b/src/storages/inLocalStorage/__tests__/MySegmentsCacheInLocal.spec.ts @@ -1,26 +1,36 @@ import { MySegmentsCacheInLocal } from '../MySegmentsCacheInLocal'; -import { KeyBuilderCS } from '../../KeyBuilderCS'; +import { KeyBuilderCS, myLargeSegmentsKeyBuilder } from '../../KeyBuilderCS'; import { loggerMock } from '../../../logger/__tests__/sdkLogger.mock'; test('SEGMENT CACHE / in LocalStorage', () => { - const keys = new KeyBuilderCS('SPLITIO', 'user'); - const cache = new MySegmentsCacheInLocal(loggerMock, keys); - - cache.clear(); - - cache.addToSegment('mocked-segment'); - cache.addToSegment('mocked-segment-2'); - - expect(cache.isInSegment('mocked-segment')).toBe(true); - expect(cache.getRegisteredSegments()).toEqual(['mocked-segment', 'mocked-segment-2']); - expect(cache.getKeysCount()).toBe(1); - - cache.removeFromSegment('mocked-segment'); - - expect(cache.isInSegment('mocked-segment')).toBe(false); - expect(cache.getRegisteredSegments()).toEqual(['mocked-segment-2']); - expect(cache.getKeysCount()).toBe(1); - + const caches = [ + new MySegmentsCacheInLocal(loggerMock, new KeyBuilderCS('SPLITIO', 'user')), + new MySegmentsCacheInLocal(loggerMock, myLargeSegmentsKeyBuilder('SPLITIO', 'user')) + ]; + + caches.forEach(cache => { + cache.clear(); + + cache.addToSegment('mocked-segment'); + cache.addToSegment('mocked-segment-2'); + + expect(cache.isInSegment('mocked-segment')).toBe(true); + expect(cache.getRegisteredSegments()).toEqual(['mocked-segment', 'mocked-segment-2']); + expect(cache.getKeysCount()).toBe(1); + }); + + caches.forEach(cache => { + cache.removeFromSegment('mocked-segment'); + + expect(cache.isInSegment('mocked-segment')).toBe(false); + expect(cache.getRegisteredSegments()).toEqual(['mocked-segment-2']); + expect(cache.getKeysCount()).toBe(1); + }); + + expect(localStorage.getItem('SPLITIO.user.segment.mocked-segment-2')).toBe('1'); + expect(localStorage.getItem('SPLITIO.user.segment.mocked-segment')).toBe(null); + expect(localStorage.getItem('SPLITIO.user.largeSegment.mocked-segment-2')).toBe('1'); + expect(localStorage.getItem('SPLITIO.user.largeSegment.mocked-segment')).toBe(null); }); // @BREAKING: REMOVE when removing this backwards compatibility. From 764bb495b9056ca6429867460584fe841817188b Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Wed, 10 Jul 2024 17:39:28 -0300 Subject: [PATCH 005/146] Add IN_LARGE_SEGMENT matcher --- src/dtos/types.ts | 8 +++++- .../__tests__/segment/client_side.spec.ts | 25 +++++++++++++++++++ src/evaluator/matchers/index.ts | 2 ++ src/evaluator/matchers/large_segment.ts | 18 +++++++++++++ src/evaluator/matchers/matcherTypes.ts | 1 + src/evaluator/matchersTransform/index.ts | 2 +- 6 files changed, 54 insertions(+), 2 deletions(-) create mode 100644 src/evaluator/matchers/large_segment.ts diff --git a/src/dtos/types.ts b/src/dtos/types.ts index efbf0acc..351598cf 100644 --- a/src/dtos/types.ts +++ b/src/dtos/types.ts @@ -61,6 +61,11 @@ interface IInSegmentMatcher extends ISplitMatcherBase { userDefinedSegmentMatcherData: IInSegmentMatcherData } +interface IInLargeSegmentMatcher extends ISplitMatcherBase { + matcherType: 'IN_LARGE_SEGMENT', + userDefinedSegmentMatcherData: IInSegmentMatcherData +} + interface IWhitelistMatcher extends ISplitMatcherBase { matcherType: 'WHITELIST', whitelistMatcherData: IWhitelistMatcherData @@ -165,7 +170,8 @@ interface IInListSemverMatcher extends ISplitMatcherBase { export type ISplitMatcher = IAllKeysMatcher | IInSegmentMatcher | IWhitelistMatcher | IEqualToMatcher | IGreaterThanOrEqualToMatcher | ILessThanOrEqualToMatcher | IBetweenMatcher | IEqualToSetMatcher | IContainsAnyOfSetMatcher | IContainsAllOfSetMatcher | IPartOfSetMatcher | IStartsWithMatcher | IEndsWithMatcher | IContainsStringMatcher | IInSplitTreatmentMatcher | IEqualToBooleanMatcher | IMatchesStringMatcher | - IEqualToSemverMatcher | IGreaterThanOrEqualToSemverMatcher | ILessThanOrEqualToSemverMatcher | IBetweenSemverMatcher | IInListSemverMatcher + IEqualToSemverMatcher | IGreaterThanOrEqualToSemverMatcher | ILessThanOrEqualToSemverMatcher | IBetweenSemverMatcher | IInListSemverMatcher | + IInLargeSegmentMatcher /** Split object */ export interface ISplitPartition { diff --git a/src/evaluator/matchers/__tests__/segment/client_side.spec.ts b/src/evaluator/matchers/__tests__/segment/client_side.spec.ts index 3bace2ca..5e192829 100644 --- a/src/evaluator/matchers/__tests__/segment/client_side.spec.ts +++ b/src/evaluator/matchers/__tests__/segment/client_side.spec.ts @@ -32,3 +32,28 @@ test('MATCHER IN_SEGMENT / should return true ONLY when the segment is defined i expect(await matcherTrue()).toBe(true); // segment found in mySegments list expect(await matcherFalse()).toBe(false); // segment not found in mySegments list }); + +test('MATCHER IN_LARGE_SEGMENT / should return true ONLY when the segment is defined inside the segment storage', async function () { + const segment = 'employees'; + + const matcherTrue = matcherFactory(loggerMock, { + type: matcherTypes.IN_LARGE_SEGMENT, + value: segment + } as IMatcherDto, { + largeSegments: { + isInSegment(segmentName) { + return segment === segmentName; + } + } + } as IStorageSync) as IMatcher; + + const matcherFalse = matcherFactory(loggerMock, { + type: matcherTypes.IN_LARGE_SEGMENT, + value: segment + } as IMatcherDto, { + largeSegments: undefined + } as IStorageSync) as IMatcher; + + expect(await matcherTrue()).toBe(true); // large segment found in mySegments list + expect(await matcherFalse()).toBe(false); // large segment storage is not defined +}); diff --git a/src/evaluator/matchers/index.ts b/src/evaluator/matchers/index.ts index b110fc17..d50c38dd 100644 --- a/src/evaluator/matchers/index.ts +++ b/src/evaluator/matchers/index.ts @@ -1,5 +1,6 @@ import { allMatcherContext } from './all'; import { segmentMatcherContext } from './segment'; +import { largeSegmentMatcherContext } from './large_segment'; import { whitelistMatcherContext } from './whitelist'; import { equalToMatcherContext } from './eq'; import { greaterThanEqualMatcherContext } from './gte'; @@ -48,6 +49,7 @@ const matchers = [ lessThanEqualToSemverMatcherContext, // LESS_THAN_OR_EQUAL_TO_SEMVER: 20 betweenSemverMatcherContext, // BETWEEN_SEMVER: 21 inListSemverMatcherContext, // IN_LIST_SEMVER: 22 + largeSegmentMatcherContext, // IN_LARGE_SEGMENT: 23 ]; /** diff --git a/src/evaluator/matchers/large_segment.ts b/src/evaluator/matchers/large_segment.ts new file mode 100644 index 00000000..408fd5da --- /dev/null +++ b/src/evaluator/matchers/large_segment.ts @@ -0,0 +1,18 @@ +import { MaybeThenable } from '../../dtos/types'; +import { ISegmentsCacheBase } from '../../storages/types'; +import { thenable } from '../../utils/promise/thenable'; + +export function largeSegmentMatcherContext(largeSegmentName: string, storage: { largeSegments?: ISegmentsCacheBase }) { + + return function largeSegmentMatcher(key: string): MaybeThenable { + const isInLargeSegment = storage.largeSegments ? storage.largeSegments.isInSegment(largeSegmentName, key) : false; + + if (thenable(isInLargeSegment)) { + isInLargeSegment.then(result => { + return result; + }); + } + + return isInLargeSegment; + }; +} diff --git a/src/evaluator/matchers/matcherTypes.ts b/src/evaluator/matchers/matcherTypes.ts index 469c7a43..f09d50bf 100644 --- a/src/evaluator/matchers/matcherTypes.ts +++ b/src/evaluator/matchers/matcherTypes.ts @@ -22,6 +22,7 @@ export const matcherTypes: Record = { LESS_THAN_OR_EQUAL_TO_SEMVER: 20, BETWEEN_SEMVER: 21, IN_LIST_SEMVER: 22, + IN_LARGE_SEGMENT: 23, }; export const matcherDataTypes = { diff --git a/src/evaluator/matchersTransform/index.ts b/src/evaluator/matchersTransform/index.ts index 23c4d538..877f368d 100644 --- a/src/evaluator/matchersTransform/index.ts +++ b/src/evaluator/matchersTransform/index.ts @@ -33,7 +33,7 @@ export function matchersTransform(matchers: ISplitMatcher[]): IMatcherDto[] { let dataType = matcherDataTypes.STRING; let value = undefined; - if (type === matcherTypes.IN_SEGMENT) { + if (type === matcherTypes.IN_SEGMENT || type === matcherTypes.IN_LARGE_SEGMENT) { value = segmentTransform(userDefinedSegmentMatcherData as IInSegmentMatcherData); } else if (type === matcherTypes.EQUAL_TO) { value = numericTransform(unaryNumericMatcherData as IUnaryNumericMatcherData); From b56f142d6a75826caa9d57f5cfb2295a2769cd9a Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Thu, 11 Jul 2024 13:02:38 -0300 Subject: [PATCH 006/146] Added new configuration options --- src/types.ts | 25 ++++++++++++++++++- .../__tests__/index.spec.ts | 3 ++- .../__tests__/settings.mocks.ts | 7 ++++-- src/utils/settingsValidation/index.ts | 11 +++++--- src/utils/settingsValidation/types.ts | 2 +- 5 files changed, 40 insertions(+), 8 deletions(-) diff --git a/src/types.ts b/src/types.ts index 475b1822..7f7c709d 100644 --- a/src/types.ts +++ b/src/types.ts @@ -86,6 +86,7 @@ export interface ISettings { metricsRefreshRate?: number, telemetryRefreshRate: number, segmentsRefreshRate: number, + largeSegmentsRefreshRate: number, offlineRefreshRate: number, eventsPushRate: number, eventsQueueSize: number, @@ -95,7 +96,8 @@ export interface ISettings { readyTimeout: number, requestTimeoutBeforeReady: number, retriesOnFailureBeforeReady: number, - eventsFirstPushWindow: number + eventsFirstPushWindow: number, + waitForLargeSegments: boolean }, readonly storage: IStorageSyncFactory | IStorageAsyncFactory, readonly integrations: Array<{ @@ -119,6 +121,7 @@ export interface ISettings { __splitFiltersValidation: ISplitFiltersValidation, localhostMode?: SplitIO.LocalhostFactory, enabled: boolean, + largeSegmentsEnabled: boolean, flagSpecVersion: string }, readonly runtime: { @@ -813,6 +816,12 @@ export namespace SplitIO { * @default 10 */ eventsFirstPushWindow?: number, + /** + * Whether the SDK should wait for large segments to be ready before emitting SDK_READY event. + * @property {number} waitForLargeSegments + * @default true + */ + waitForLargeSegments?: boolean }, /** * SDK scheduler settings. @@ -857,6 +866,12 @@ export namespace SplitIO { * @default 60 */ segmentsRefreshRate?: number, + /** + * The SDK polls Split servers for changes to large segment definitions. This parameter controls this polling period in seconds. + * @property {number} largeSegmentsRefreshRate + * @default 60 + */ + largeSegmentsRefreshRate?: number, /** * The SDK posts the queued events data in bulks. This parameter controls the posting rate in seconds. * @property {number} eventsPushRate @@ -929,6 +944,14 @@ export namespace SplitIO { * @property {Object} urls */ urls?: UrlSettings, + sync?: ISharedSettings['sync'] & { + /** + * Enables synchronization of large segments. + * @property {boolean} largeSegmentsEnabled + * @default false + */ + largeSegmentsEnabled?: boolean + } } /** * Settings interface for SDK instances created on NodeJS. diff --git a/src/utils/settingsValidation/__tests__/index.spec.ts b/src/utils/settingsValidation/__tests__/index.spec.ts index 8e0238c4..358b278e 100644 --- a/src/utils/settingsValidation/__tests__/index.spec.ts +++ b/src/utils/settingsValidation/__tests__/index.spec.ts @@ -14,7 +14,8 @@ const minimalSettingsParams = { requestTimeoutBeforeReady: 5, retriesOnFailureBeforeReady: 1, readyTimeout: 10, - eventsFirstPushWindow: 10 + eventsFirstPushWindow: 10, + waitForLargeSegments: false }, version: 'javascript-test', }, diff --git a/src/utils/settingsValidation/__tests__/settings.mocks.ts b/src/utils/settingsValidation/__tests__/settings.mocks.ts index a3057515..4efd8a85 100644 --- a/src/utils/settingsValidation/__tests__/settings.mocks.ts +++ b/src/utils/settingsValidation/__tests__/settings.mocks.ts @@ -52,6 +52,7 @@ export const fullSettings: ISettings = { impressionsRefreshRate: 1, telemetryRefreshRate: 1, segmentsRefreshRate: 1, + largeSegmentsRefreshRate: 1, offlineRefreshRate: 1, eventsPushRate: 1, eventsQueueSize: 1, @@ -62,7 +63,8 @@ export const fullSettings: ISettings = { readyTimeout: 1, requestTimeoutBeforeReady: 1, retriesOnFailureBeforeReady: 1, - eventsFirstPushWindow: 1 + eventsFirstPushWindow: 1, + waitForLargeSegments: false }, features: 'path/to/file', storage: InMemoryStorageCSFactory, @@ -80,7 +82,8 @@ export const fullSettings: ISettings = { groupedFilters: { bySet: [], byName: [], byPrefix: [] }, }, enabled: true, - flagSpecVersion: '1.1' + flagSpecVersion: '1.1', + largeSegmentsEnabled: false }, version: 'jest', runtime: { diff --git a/src/utils/settingsValidation/index.ts b/src/utils/settingsValidation/index.ts index d3dc7662..b3114440 100644 --- a/src/utils/settingsValidation/index.ts +++ b/src/utils/settingsValidation/index.ts @@ -32,6 +32,8 @@ export const base = { featuresRefreshRate: 60, // fetch segments updates each 60 sec segmentsRefreshRate: 60, + // fetch large segments updates each 60 sec + largeSegmentsRefreshRate: 60, // publish telemetry stats each 3600 secs (1 hour) telemetryRefreshRate: 3600, // publish evaluations each 300 sec (default value for OPTIMIZED impressions mode) @@ -85,7 +87,8 @@ export const base = { impressionsMode: OPTIMIZED, localhostMode: undefined, enabled: true, - flagSpecVersion: FLAG_SPEC_VERSION + flagSpecVersion: FLAG_SPEC_VERSION, + largeSegmentsEnabled: false }, // Logger @@ -132,6 +135,7 @@ export function settingsValidation(config: unknown, validationParams: ISettingsV const { scheduler, startup } = withDefaults; scheduler.featuresRefreshRate = fromSecondsToMillis(scheduler.featuresRefreshRate); scheduler.segmentsRefreshRate = fromSecondsToMillis(scheduler.segmentsRefreshRate); + scheduler.largeSegmentsRefreshRate = fromSecondsToMillis(scheduler.largeSegmentsRefreshRate); scheduler.offlineRefreshRate = fromSecondsToMillis(scheduler.offlineRefreshRate); scheduler.eventsPushRate = fromSecondsToMillis(scheduler.eventsPushRate); scheduler.telemetryRefreshRate = fromSecondsToMillis(validateMinValue('telemetryRefreshRate', scheduler.telemetryRefreshRate, 60)); @@ -209,11 +213,12 @@ export function settingsValidation(config: unknown, validationParams: ISettingsV const splitFiltersValidation = validateSplitFilters(log, sync.splitFilters, withDefaults.mode); sync.splitFilters = splitFiltersValidation.validFilters; sync.__splitFiltersValidation = splitFiltersValidation; - sync.flagSpecVersion = flagSpec ? flagSpec(withDefaults) : FLAG_SPEC_VERSION; + // ensure a valid flag spec version + sync.flagSpecVersion = flagSpec ? flagSpec(withDefaults) : FLAG_SPEC_VERSION; // ensure a valid user consent value // @ts-ignore, modify readonly prop - withDefaults.userConsent = consent(withDefaults); + withDefaults.userConsent = consent ? consent(withDefaults) : undefined; return withDefaults; } diff --git a/src/utils/settingsValidation/types.ts b/src/utils/settingsValidation/types.ts index 16d71716..dcc0d8f8 100644 --- a/src/utils/settingsValidation/types.ts +++ b/src/utils/settingsValidation/types.ts @@ -25,7 +25,7 @@ export interface ISettingsValidationParams { /** Localhost mode validator (`settings.sync.localhostMode`) */ localhost?: (settings: ISettings) => ISettings['sync']['localhostMode'], /** User consent validator (`settings.userConsent`) */ - consent: (settings: ISettings) => ISettings['userConsent'], + consent?: (settings: ISettings) => ISettings['userConsent'], /** Flag spec version validation. Configurable by the JS Synchronizer but not by the SDKs */ flagSpec?: (settings: ISettings) => ISettings['sync']['flagSpecVersion'] } From 6827eb649bfa2857ef200cb880e894cab296ffa1 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Thu, 11 Jul 2024 14:53:54 -0300 Subject: [PATCH 007/146] Add new endpoint for myLargeSegments --- src/services/__tests__/splitApi.spec.ts | 7 ++++++- src/services/splitApi.ts | 13 ++++++++++++- src/services/types.ts | 1 + src/sync/submitters/types.ts | 3 ++- src/utils/constants/index.ts | 1 + 5 files changed, 22 insertions(+), 3 deletions(-) diff --git a/src/services/__tests__/splitApi.spec.ts b/src/services/__tests__/splitApi.spec.ts index 07d6dec4..d5550f93 100644 --- a/src/services/__tests__/splitApi.spec.ts +++ b/src/services/__tests__/splitApi.spec.ts @@ -60,7 +60,12 @@ describe('splitApi', () => { splitApi.postMetricsUsage('fake-body'); assertHeaders(settings, fetchMock.mock.calls[8][1].headers); - expect(telemetryTrackerMock.trackHttp).toBeCalledTimes(9); + splitApi.fetchMyLargeSegments('userKey'); + [url, { headers }] = fetchMock.mock.calls[9]; + assertHeaders(settings, headers); + expect(url).toBe('sdk/myLargeSegments/userKey'); + + expect(telemetryTrackerMock.trackHttp).toBeCalledTimes(10); telemetryTrackerMock.trackHttp.mockClear(); fetchMock.mockClear(); diff --git a/src/services/splitApi.ts b/src/services/splitApi.ts index 9e2223ea..8d5b7b79 100644 --- a/src/services/splitApi.ts +++ b/src/services/splitApi.ts @@ -4,7 +4,7 @@ import { splitHttpClientFactory } from './splitHttpClient'; import { ISplitApi } from './types'; import { objectAssign } from '../utils/lang/objectAssign'; import { ITelemetryTracker } from '../trackers/types'; -import { SPLITS, IMPRESSIONS, IMPRESSIONS_COUNT, EVENTS, TELEMETRY, TOKEN, SEGMENT, MY_SEGMENT } from '../utils/constants'; +import { SPLITS, IMPRESSIONS, IMPRESSIONS_COUNT, EVENTS, TELEMETRY, TOKEN, SEGMENT, MY_SEGMENT, MY_LARGE_SEGMENT } from '../utils/constants'; import { ERROR_TOO_MANY_SETS } from '../logger/constants'; const noCacheHeaderOptions = { headers: { 'Cache-Control': 'no-cache' } }; @@ -78,6 +78,17 @@ export function splitApiFactory( return splitHttpClient(url, noCache ? noCacheHeaderOptions : undefined, telemetryTracker.trackHttp(MY_SEGMENT)); }, + fetchMyLargeSegments(userMatchingKey: string, noCache?: boolean) { + /** + * URI encoding of user keys in order to: + * - avoid 400 responses (due to URI malformed). E.g.: '/api/mySegments/%' + * - avoid 404 responses. E.g.: '/api/mySegments/foo/bar' + * - match user keys with special characters. E.g.: 'foo%bar', 'foo/bar' + */ + const url = `${urls.sdk}/myLargeSegments/${encodeURIComponent(userMatchingKey)}`; + return splitHttpClient(url, noCache ? noCacheHeaderOptions : undefined, telemetryTracker.trackHttp(MY_LARGE_SEGMENT)); + }, + /** * Post events. * diff --git a/src/services/types.ts b/src/services/types.ts index 116ccec5..a5dfde2a 100644 --- a/src/services/types.ts +++ b/src/services/types.ts @@ -62,6 +62,7 @@ export interface ISplitApi { fetchSplitChanges: IFetchSplitChanges fetchSegmentChanges: IFetchSegmentChanges fetchMySegments: IFetchMySegments + fetchMyLargeSegments: IFetchMySegments postEventsBulk: IPostEventsBulk postUniqueKeysBulkCs: IPostUniqueKeysBulkCs postUniqueKeysBulkSs: IPostUniqueKeysBulkSs diff --git a/src/sync/submitters/types.ts b/src/sync/submitters/types.ts index 440f466a..210cf603 100644 --- a/src/sync/submitters/types.ts +++ b/src/sync/submitters/types.ts @@ -113,7 +113,8 @@ export type TELEMETRY = 'te'; export type TOKEN = 'to'; export type SEGMENT = 'se'; export type MY_SEGMENT = 'ms'; -export type OperationType = SPLITS | IMPRESSIONS | IMPRESSIONS_COUNT | EVENTS | TELEMETRY | TOKEN | SEGMENT | MY_SEGMENT; +export type MY_LARGE_SEGMENT = 'mls'; +export type OperationType = SPLITS | IMPRESSIONS | IMPRESSIONS_COUNT | EVENTS | TELEMETRY | TOKEN | SEGMENT | MY_SEGMENT | MY_LARGE_SEGMENT; export type LastSync = Partial> export type HttpErrors = Partial> diff --git a/src/utils/constants/index.ts b/src/utils/constants/index.ts index d3f4eb6f..cff201b1 100644 --- a/src/utils/constants/index.ts +++ b/src/utils/constants/index.ts @@ -76,6 +76,7 @@ export const TELEMETRY = 'te'; export const TOKEN = 'to'; export const SEGMENT = 'se'; export const MY_SEGMENT = 'ms'; +export const MY_LARGE_SEGMENT = 'mls'; export const TREATMENT = 't'; export const TREATMENTS = 'ts'; From 3c7d05f070dc0a47b6a7819e98297acbffdecd2a Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Thu, 11 Jul 2024 15:49:25 -0300 Subject: [PATCH 008/146] Update Readiness Manager to handle largeSegments sync if largeSegmentsEnabled and waitForLargeSegments are true --- .../__tests__/readinessManager.spec.ts | 48 +++++++++++++------ .../__tests__/sdkReadinessManager.spec.ts | 23 ++++----- src/readiness/readinessManager.ts | 16 +++++-- src/readiness/sdkReadinessManager.ts | 14 +++--- src/readiness/types.ts | 5 +- src/sdkClient/sdkClientMethodCS.ts | 4 +- src/sdkClient/sdkClientMethodCSWithTT.ts | 4 +- src/sdkFactory/index.ts | 2 +- .../__tests__/splitChangesUpdater.spec.ts | 4 +- 9 files changed, 73 insertions(+), 47 deletions(-) diff --git a/src/readiness/__tests__/readinessManager.spec.ts b/src/readiness/__tests__/readinessManager.spec.ts index 6f2ade74..601f849f 100644 --- a/src/readiness/__tests__/readinessManager.spec.ts +++ b/src/readiness/__tests__/readinessManager.spec.ts @@ -2,8 +2,26 @@ import { readinessManagerFactory } from '../readinessManager'; import { EventEmitter } from '../../utils/MinEvents'; import { IReadinessManager } from '../types'; import { SDK_READY, SDK_UPDATE, SDK_SPLITS_ARRIVED, SDK_SEGMENTS_ARRIVED, SDK_READY_FROM_CACHE, SDK_SPLITS_CACHE_LOADED, SDK_READY_TIMED_OUT } from '../constants'; +import { ISettings } from '../../types'; + +const settings = { + startup: { + readyTimeout: 0, + waitForLargeSegments: false + }, + sync: { + largeSegmentEnabled: false + } +} as unknown as ISettings; + +const settingsWithTimeout = { + ...settings, + startup: { + ...settings.startup, + readyTimeout: 50 + } +}; -const timeoutMs = 100; const statusFlagsCount = 5; function assertInitialStatus(readinessManager: IReadinessManager) { @@ -17,7 +35,7 @@ function assertInitialStatus(readinessManager: IReadinessManager) { test('READINESS MANAGER / Share splits but segments (without timeout enabled)', (done) => { expect.assertions(2 + statusFlagsCount * 2); - const readinessManager = readinessManagerFactory(EventEmitter); + const readinessManager = readinessManagerFactory(EventEmitter, settings); const readinessManager2 = readinessManager.shared(); assertInitialStatus(readinessManager); // all status flags must be false @@ -50,7 +68,7 @@ test('READINESS MANAGER / Share splits but segments (without timeout enabled)', }); test('READINESS MANAGER / Ready event should be fired once', () => { - const readinessManager = readinessManagerFactory(EventEmitter); + const readinessManager = readinessManagerFactory(EventEmitter, settings); let counter = 0; readinessManager.gate.on(SDK_READY, () => { @@ -69,7 +87,7 @@ test('READINESS MANAGER / Ready event should be fired once', () => { }); test('READINESS MANAGER / Ready from cache event should be fired once', (done) => { - const readinessManager = readinessManagerFactory(EventEmitter); + const readinessManager = readinessManagerFactory(EventEmitter, settings); let counter = 0; readinessManager.gate.on(SDK_READY_FROM_CACHE, () => { @@ -94,7 +112,7 @@ test('READINESS MANAGER / Ready from cache event should be fired once', (done) = }); test('READINESS MANAGER / Update event should be fired after the Ready event', () => { - const readinessManager = readinessManagerFactory(EventEmitter); + const readinessManager = readinessManagerFactory(EventEmitter, settings); let isReady = false; let counter = 0; @@ -121,7 +139,7 @@ test('READINESS MANAGER / Update event should be fired after the Ready event', ( test('READINESS MANAGER / Segment updates should not be propagated', (done) => { let updateCounter = 0; - const readinessManager = readinessManagerFactory(EventEmitter); + const readinessManager = readinessManagerFactory(EventEmitter, settings); const readinessManager2 = readinessManager.shared(); readinessManager2.gate.on(SDK_UPDATE, () => { @@ -149,7 +167,7 @@ describe('READINESS MANAGER / Timeout ready event', () => { beforeEach(() => { // Schedule timeout to be fired before SDK_READY - readinessManager = readinessManagerFactory(EventEmitter, 10); + readinessManager = readinessManagerFactory(EventEmitter, settingsWithTimeout); timeoutCounter = 0; readinessManager.gate.on(SDK_READY_TIMED_OUT, () => { @@ -160,7 +178,7 @@ describe('READINESS MANAGER / Timeout ready event', () => { setTimeout(() => { readinessManager.splits.emit(SDK_SPLITS_ARRIVED); readinessManager.segments.emit(SDK_SEGMENTS_ARRIVED); - }, 20); + }, settingsWithTimeout.startup.readyTimeout + 20); }); test('should be fired once', (done) => { @@ -194,7 +212,7 @@ test('READINESS MANAGER / Cancel timeout if ready fired', (done) => { let sdkReadyCalled = false; let sdkReadyTimedoutCalled = false; - const readinessManager = readinessManagerFactory(EventEmitter, timeoutMs); + const readinessManager = readinessManagerFactory(EventEmitter, settingsWithTimeout); readinessManager.gate.on(SDK_READY_TIMED_OUT, () => { sdkReadyTimedoutCalled = true; }); readinessManager.gate.once(SDK_READY, () => { sdkReadyCalled = true; }); @@ -204,16 +222,16 @@ test('READINESS MANAGER / Cancel timeout if ready fired', (done) => { expect(sdkReadyTimedoutCalled).toBeFalsy(); expect(sdkReadyCalled).toBeTruthy(); done(); - }, timeoutMs * 3); + }, settingsWithTimeout.startup.readyTimeout * 3); setTimeout(() => { readinessManager.splits.emit(SDK_SPLITS_ARRIVED); readinessManager.segments.emit(SDK_SEGMENTS_ARRIVED); - }, timeoutMs * 0.8); + }, settingsWithTimeout.startup.readyTimeout * 0.8); }); test('READINESS MANAGER / Destroy after it was ready but before timedout', () => { - const readinessManager = readinessManagerFactory(EventEmitter, timeoutMs); + const readinessManager = readinessManagerFactory(EventEmitter, settingsWithTimeout); let counter = 0; @@ -238,7 +256,7 @@ test('READINESS MANAGER / Destroy after it was ready but before timedout', () => }); test('READINESS MANAGER / Destroy before it was ready and timedout', (done) => { - const readinessManager = readinessManagerFactory(EventEmitter, timeoutMs); + const readinessManager = readinessManagerFactory(EventEmitter, settingsWithTimeout); readinessManager.gate.on(SDK_READY, () => { throw new Error('SDK_READY should have not been emitted'); @@ -250,7 +268,7 @@ test('READINESS MANAGER / Destroy before it was ready and timedout', (done) => { setTimeout(() => { readinessManager.destroy(); // Destroy the gate, removing all the listeners and clearing the ready timeout. - }, timeoutMs * 0.5); + }, settingsWithTimeout.startup.readyTimeout * 0.5); setTimeout(() => { readinessManager.splits.emit(SDK_SPLITS_ARRIVED); @@ -258,6 +276,6 @@ test('READINESS MANAGER / Destroy before it was ready and timedout', (done) => { expect('Calling destroy should have removed the readyTimeout and the test should end now.'); done(); - }, timeoutMs * 1.5); + }, settingsWithTimeout.startup.readyTimeout * 1.5); }); diff --git a/src/readiness/__tests__/sdkReadinessManager.spec.ts b/src/readiness/__tests__/sdkReadinessManager.spec.ts index 9c407052..3bb43192 100644 --- a/src/readiness/__tests__/sdkReadinessManager.spec.ts +++ b/src/readiness/__tests__/sdkReadinessManager.spec.ts @@ -5,6 +5,7 @@ import { SDK_READY, SDK_READY_FROM_CACHE, SDK_READY_TIMED_OUT, SDK_UPDATE } from import { sdkReadinessManagerFactory } from '../sdkReadinessManager'; import { IReadinessManager } from '../types'; import { ERROR_CLIENT_LISTENER, CLIENT_READY_FROM_CACHE, CLIENT_READY, CLIENT_NO_LISTENER } from '../../logger/constants'; +import { fullSettings } from '../../utils/settingsValidation/__tests__/settings.mocks'; const EventEmitterMock = jest.fn(() => ({ on: jest.fn(), @@ -40,7 +41,7 @@ describe('SDK Readiness Manager - Event emitter', () => { test('Providing the gate object to get the SDK status interface that manages events', () => { expect(typeof sdkReadinessManagerFactory).toBe('function'); // The module exposes a function. - const sdkReadinessManager = sdkReadinessManagerFactory(loggerMock, EventEmitterMock); + const sdkReadinessManager = sdkReadinessManagerFactory(EventEmitterMock, fullSettings); expect(typeof sdkReadinessManager).toBe('object'); // The function result contains the readiness manager and a sdkStatus object. const gateMock = sdkReadinessManager.readinessManager.gate; const sdkStatus = sdkReadinessManager.sdkStatus; @@ -76,7 +77,7 @@ describe('SDK Readiness Manager - Event emitter', () => { }); test('The event callbacks should work as expected - SDK_READY_FROM_CACHE', () => { - const sdkReadinessManager = sdkReadinessManagerFactory(loggerMock, EventEmitterMock); + const sdkReadinessManager = sdkReadinessManagerFactory(EventEmitterMock, fullSettings); const gateMock = sdkReadinessManager.readinessManager.gate; const readyFromCacheEventCB = gateMock.once.mock.calls[2][1]; @@ -86,7 +87,7 @@ describe('SDK Readiness Manager - Event emitter', () => { }); test('The event callbacks should work as expected - SDK_READY emits with no callbacks', () => { - const sdkReadinessManager = sdkReadinessManagerFactory(loggerMock, EventEmitterMock); + const sdkReadinessManager = sdkReadinessManagerFactory(EventEmitterMock, fullSettings); // Get the callbacks const addListenerCB = sdkReadinessManager.readinessManager.gate.on.mock.calls[1][1]; @@ -112,7 +113,7 @@ describe('SDK Readiness Manager - Event emitter', () => { }); test('The event callbacks should work as expected - SDK_READY emits with callbacks', () => { - const sdkReadinessManager = sdkReadinessManagerFactory(loggerMock, EventEmitterMock); + const sdkReadinessManager = sdkReadinessManagerFactory(EventEmitterMock, fullSettings); // Get the callbacks const addListenerCB = sdkReadinessManager.readinessManager.gate.on.mock.calls[1][1]; @@ -130,7 +131,7 @@ describe('SDK Readiness Manager - Event emitter', () => { }); test('The event callbacks should work as expected - If we end up removing the listeners for SDK_READY, it behaves as if it had none', () => { - const sdkReadinessManager = sdkReadinessManagerFactory(loggerMock, EventEmitterMock); + const sdkReadinessManager = sdkReadinessManagerFactory(EventEmitterMock, fullSettings); const gateMock = sdkReadinessManager.readinessManager.gate; // Get the callbacks @@ -150,7 +151,7 @@ describe('SDK Readiness Manager - Event emitter', () => { }); test('The event callbacks should work as expected - If we end up removing the listeners for SDK_READY, it behaves as if it had none', () => { - const sdkReadinessManager = sdkReadinessManagerFactory(loggerMock, EventEmitterMock); + const sdkReadinessManager = sdkReadinessManagerFactory(EventEmitterMock, fullSettings); const gateMock = sdkReadinessManager.readinessManager.gate; // Get the callbacks @@ -172,7 +173,7 @@ describe('SDK Readiness Manager - Event emitter', () => { test('The event callbacks should work as expected - SDK_READY emits with expected internal callbacks', () => { // the sdkReadinessManager expects more than one SDK_READY callback to not log the "No listeners" warning - const sdkReadinessManager = sdkReadinessManagerFactory(loggerMock, EventEmitterMock); + const sdkReadinessManager = sdkReadinessManagerFactory(EventEmitterMock, fullSettings); sdkReadinessManager.incInternalReadyCbCount(); const gateMock = sdkReadinessManager.readinessManager.gate; @@ -197,7 +198,7 @@ describe('SDK Readiness Manager - Event emitter', () => { describe('SDK Readiness Manager - Ready promise', () => { test('.ready() promise behaviour for clients', async () => { - const sdkReadinessManager = sdkReadinessManagerFactory(loggerMock, EventEmitterMock); + const sdkReadinessManager = sdkReadinessManagerFactory(EventEmitterMock, fullSettings); const ready = sdkReadinessManager.sdkStatus.ready(); expect(ready instanceof Promise).toBe(true); // It should return a promise. @@ -226,7 +227,7 @@ describe('SDK Readiness Manager - Ready promise', () => { // control assertion. stubs already reset. expect(testPassedCount).toBe(2); - const sdkReadinessManagerForTimedout = sdkReadinessManagerFactory(loggerMock, EventEmitterMock); + const sdkReadinessManagerForTimedout = sdkReadinessManagerFactory(EventEmitterMock, fullSettings); const readyForTimeout = sdkReadinessManagerForTimedout.sdkStatus.ready(); @@ -265,7 +266,7 @@ describe('SDK Readiness Manager - Ready promise', () => { }); test('Full blown ready promise count as a callback and resolves on SDK_READY', (done) => { - const sdkReadinessManager = sdkReadinessManagerFactory(loggerMock, EventEmitterMock); + const sdkReadinessManager = sdkReadinessManagerFactory(EventEmitterMock, fullSettings); const readyPromise = sdkReadinessManager.sdkStatus.ready(); // Get the callback @@ -287,7 +288,7 @@ describe('SDK Readiness Manager - Ready promise', () => { }); test('.ready() rejected promises have a default onRejected handler that just logs the error', (done) => { - const sdkReadinessManager = sdkReadinessManagerFactory(loggerMock, EventEmitterMock); + const sdkReadinessManager = sdkReadinessManagerFactory(EventEmitterMock, fullSettings); let readyForTimeout = sdkReadinessManager.sdkStatus.ready(); emitTimeoutEvent(sdkReadinessManager.readinessManager); // make the SDK "timed out" diff --git a/src/readiness/readinessManager.ts b/src/readiness/readinessManager.ts index c5ac1c35..a4a5290d 100644 --- a/src/readiness/readinessManager.ts +++ b/src/readiness/readinessManager.ts @@ -1,5 +1,5 @@ import { objectAssign } from '../utils/lang/objectAssign'; -import { IEventEmitter } from '../types'; +import { IEventEmitter, ISettings } from '../types'; import { SDK_SPLITS_ARRIVED, SDK_SPLITS_CACHE_LOADED, SDK_SEGMENTS_ARRIVED, SDK_READY_TIMED_OUT, SDK_READY_FROM_CACHE, SDK_UPDATE, SDK_READY } from './constants'; import { IReadinessEventEmitter, IReadinessManager, ISegmentsEventEmitter, ISplitsEventEmitter } from './types'; @@ -33,10 +33,13 @@ function segmentsEventEmitterFactory(EventEmitter: new () => IEventEmitter): ISe */ export function readinessManagerFactory( EventEmitter: new () => IEventEmitter, - readyTimeout = 0, + settings: ISettings, splits: ISplitsEventEmitter = splitsEventEmitterFactory(EventEmitter)): IReadinessManager { + const { startup: { readyTimeout, waitForLargeSegments }, sync: { largeSegmentsEnabled } } = settings; + const segments: ISegmentsEventEmitter = segmentsEventEmitterFactory(EventEmitter); + const largeSegments = largeSegmentsEnabled && waitForLargeSegments ? segmentsEventEmitterFactory(EventEmitter) : undefined; const gate: IReadinessEventEmitter = new EventEmitter(); // emit SDK_READY_FROM_CACHE @@ -62,6 +65,7 @@ export function readinessManagerFactory( let isReady = false; splits.on(SDK_SPLITS_ARRIVED, checkIsReadyOrUpdate); segments.on(SDK_SEGMENTS_ARRIVED, checkIsReadyOrUpdate); + if (largeSegments) largeSegments.on(SDK_SEGMENTS_ARRIVED, checkIsReadyOrUpdate); let isDestroyed = false; @@ -87,7 +91,7 @@ export function readinessManagerFactory( setTimeout(() => { throw e; }, 0); } } else { - if (splits.splitsArrived && segments.segmentsArrived) { + if (splits.splitsArrived && segments.segmentsArrived && (!largeSegments || largeSegments.segmentsArrived)) { clearTimeout(readyTimeoutId); isReady = true; try { @@ -105,11 +109,12 @@ export function readinessManagerFactory( return { splits, segments, + largeSegments, gate, - shared(readyTimeout = 0) { + shared() { refCount++; - return readinessManagerFactory(EventEmitter, readyTimeout, splits); + return readinessManagerFactory(EventEmitter, settings, splits); }, // @TODO review/remove next methods when non-recoverable errors are reworked @@ -123,6 +128,7 @@ export function readinessManagerFactory( isDestroyed = true; segments.removeAllListeners(); + if (largeSegments) largeSegments.removeAllListeners(); gate.removeAllListeners(); clearTimeout(readyTimeoutId); diff --git a/src/readiness/sdkReadinessManager.ts b/src/readiness/sdkReadinessManager.ts index c137e040..be12a6e1 100644 --- a/src/readiness/sdkReadinessManager.ts +++ b/src/readiness/sdkReadinessManager.ts @@ -2,9 +2,8 @@ import { objectAssign } from '../utils/lang/objectAssign'; import { promiseWrapper } from '../utils/promise/wrapper'; import { readinessManagerFactory } from './readinessManager'; import { ISdkReadinessManager } from './types'; -import { IEventEmitter } from '../types'; +import { IEventEmitter, ISettings } from '../types'; import { SDK_READY, SDK_READY_TIMED_OUT, SDK_READY_FROM_CACHE, SDK_UPDATE } from './constants'; -import { ILogger } from '../logger/types'; import { ERROR_CLIENT_LISTENER, CLIENT_READY_FROM_CACHE, CLIENT_READY, CLIENT_NO_LISTENER } from '../logger/constants'; const NEW_LISTENER_EVENT = 'newListener'; @@ -18,10 +17,11 @@ const REMOVE_LISTENER_EVENT = 'removeListener'; * @param readinessManager optional readinessManager to use. only used internally for `shared` method */ export function sdkReadinessManagerFactory( - log: ILogger, EventEmitter: new () => IEventEmitter, - readyTimeout = 0, - readinessManager = readinessManagerFactory(EventEmitter, readyTimeout)): ISdkReadinessManager { + settings: ISettings, + readinessManager = readinessManagerFactory(EventEmitter, settings)): ISdkReadinessManager { + + const log = settings.log; /** Ready callback warning */ let internalReadyCbCount = 0; @@ -72,8 +72,8 @@ export function sdkReadinessManagerFactory( return { readinessManager, - shared(readyTimeout = 0) { - return sdkReadinessManagerFactory(log, EventEmitter, readyTimeout, readinessManager.shared(readyTimeout)); + shared() { + return sdkReadinessManagerFactory(EventEmitter, settings, readinessManager.shared()); }, incInternalReadyCbCount() { diff --git a/src/readiness/types.ts b/src/readiness/types.ts index d126b5ec..c5cd9b0f 100644 --- a/src/readiness/types.ts +++ b/src/readiness/types.ts @@ -45,6 +45,7 @@ export interface IReadinessManager { /** Event emitters */ splits: ISplitsEventEmitter, segments: ISegmentsEventEmitter, + largeSegments?: ISegmentsEventEmitter, // Undefined if largeSegmentsEnabled or waitForLargeSegments are false gate: IReadinessEventEmitter, /** Readiness status */ @@ -59,7 +60,7 @@ export interface IReadinessManager { destroy(): void, /** for client-side */ - shared(readyTimeout?: number): IReadinessManager, + shared(): IReadinessManager, } /** SDK readiness manager */ @@ -75,5 +76,5 @@ export interface ISdkReadinessManager { incInternalReadyCbCount(): void /** for client-side */ - shared(readyTimeout?: number): ISdkReadinessManager + shared(): ISdkReadinessManager } diff --git a/src/sdkClient/sdkClientMethodCS.ts b/src/sdkClient/sdkClientMethodCS.ts index 724a5871..50822592 100644 --- a/src/sdkClient/sdkClientMethodCS.ts +++ b/src/sdkClient/sdkClientMethodCS.ts @@ -21,7 +21,7 @@ const method = 'Client instantiation'; * Therefore, clients don't have a bound TT for the track method. */ export function sdkClientMethodCSFactory(params: ISdkFactoryContext): (key?: SplitIO.SplitKey) => SplitIO.ICsClient { - const { storage, syncManager, sdkReadinessManager, settings: { core: { key }, startup: { readyTimeout }, log } } = params; + const { storage, syncManager, sdkReadinessManager, settings: { core: { key }, log } } = params; const mainClientInstance = clientCSDecorator( log, @@ -53,7 +53,7 @@ export function sdkClientMethodCSFactory(params: ISdkFactoryContext): (key?: Spl if (!clientInstances[instanceId]) { const matchingKey = getMatching(validKey); - const sharedSdkReadiness = sdkReadinessManager.shared(readyTimeout); + const sharedSdkReadiness = sdkReadinessManager.shared(); const sharedStorage = storage.shared && storage.shared(matchingKey, (err) => { if (err) { sharedSdkReadiness.readinessManager.timeout(); diff --git a/src/sdkClient/sdkClientMethodCSWithTT.ts b/src/sdkClient/sdkClientMethodCSWithTT.ts index 78957ba8..79731f31 100644 --- a/src/sdkClient/sdkClientMethodCSWithTT.ts +++ b/src/sdkClient/sdkClientMethodCSWithTT.ts @@ -23,7 +23,7 @@ const method = 'Client instantiation'; * (default client) or the client method (shared clients). */ export function sdkClientMethodCSFactory(params: ISdkFactoryContext): (key?: SplitIO.SplitKey, trafficType?: string) => SplitIO.ICsClient { - const { storage, syncManager, sdkReadinessManager, settings: { core: { key, trafficType }, startup: { readyTimeout }, log } } = params; + const { storage, syncManager, sdkReadinessManager, settings: { core: { key, trafficType }, log } } = params; const mainClientInstance = clientCSDecorator( log, @@ -63,7 +63,7 @@ export function sdkClientMethodCSFactory(params: ISdkFactoryContext): (key?: Spl if (!clientInstances[instanceId]) { const matchingKey = getMatching(validKey); - const sharedSdkReadiness = sdkReadinessManager.shared(readyTimeout); + const sharedSdkReadiness = sdkReadinessManager.shared(); const sharedStorage = storage.shared && storage.shared(matchingKey, (err) => { if (err) { sharedSdkReadiness.readinessManager.timeout(); diff --git a/src/sdkFactory/index.ts b/src/sdkFactory/index.ts index cd15e9ef..31a9ddc5 100644 --- a/src/sdkFactory/index.ts +++ b/src/sdkFactory/index.ts @@ -32,7 +32,7 @@ export function sdkFactory(params: ISdkFactoryParams): SplitIO.ICsSDK | SplitIO. // We will just log and allow for the SDK to end up throwing an SDK_TIMEOUT event for devs to handle. validateAndTrackApiKey(log, settings.core.authorizationKey); - const sdkReadinessManager = sdkReadinessManagerFactory(log, platform.EventEmitter, settings.startup.readyTimeout); + const sdkReadinessManager = sdkReadinessManagerFactory(platform.EventEmitter, settings); const readiness = sdkReadinessManager.readinessManager; const storage = storageFactory({ diff --git a/src/sync/polling/updaters/__tests__/splitChangesUpdater.spec.ts b/src/sync/polling/updaters/__tests__/splitChangesUpdater.spec.ts index faf31e44..b4dca3fe 100644 --- a/src/sync/polling/updaters/__tests__/splitChangesUpdater.spec.ts +++ b/src/sync/polling/updaters/__tests__/splitChangesUpdater.spec.ts @@ -7,7 +7,7 @@ import { splitChangesFetcherFactory } from '../../fetchers/splitChangesFetcher'; import { splitChangesUpdaterFactory, parseSegments, computeSplitsMutation } from '../splitChangesUpdater'; import splitChangesMock1 from '../../../../__tests__/mocks/splitchanges.since.-1.json'; import fetchMock from '../../../../__tests__/testUtils/fetchMock'; -import { settingsSplitApi } from '../../../../utils/settingsValidation/__tests__/settings.mocks'; +import { fullSettings, settingsSplitApi } from '../../../../utils/settingsValidation/__tests__/settings.mocks'; import { EventEmitter } from '../../../../utils/MinEvents'; import { loggerMock } from '../../../../logger/__tests__/sdkLogger.mock'; import { telemetryTrackerFactory } from '../../../../trackers/telemetryTracker'; @@ -165,7 +165,7 @@ describe('splitChangesUpdater', () => { const segmentsCache = new SegmentsCacheInMemory(); const registerSegments = jest.spyOn(segmentsCache, 'registerSegments'); - const readinessManager = readinessManagerFactory(EventEmitter); + const readinessManager = readinessManagerFactory(EventEmitter, fullSettings); const splitsEmitSpy = jest.spyOn(readinessManager.splits, 'emit'); let splitFiltersValidation = { queryString: null, groupedFilters: { bySet: [], byName: [], byPrefix: [] }, validFilters: [] }; From fac27f93b4b2b7a5a4016bdc2752cea6b0ef3223 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Thu, 11 Jul 2024 16:29:57 -0300 Subject: [PATCH 009/146] Add unit tests --- .../__tests__/readinessManager.spec.ts | 41 +++++++++++++++++++ 1 file changed, 41 insertions(+) diff --git a/src/readiness/__tests__/readinessManager.spec.ts b/src/readiness/__tests__/readinessManager.spec.ts index 601f849f..fafa4019 100644 --- a/src/readiness/__tests__/readinessManager.spec.ts +++ b/src/readiness/__tests__/readinessManager.spec.ts @@ -279,3 +279,44 @@ test('READINESS MANAGER / Destroy before it was ready and timedout', (done) => { }, settingsWithTimeout.startup.readyTimeout * 1.5); }); + +test('READINESS MANAGER / with large segments', () => { + const readinessManager = readinessManagerFactory(EventEmitter, { + startup: { readyTimeout: 0, waitForLargeSegments: false }, + sync: { largeSegmentsEnabled: true } + } as unknown as ISettings); + + expect(readinessManager.largeSegments).toBeUndefined(); + + const readinessManagerWithLargeSegments = readinessManagerFactory(EventEmitter, { + startup: { readyTimeout: 0, waitForLargeSegments: true }, + sync: { largeSegmentsEnabled: true } + } as unknown as ISettings); + + expect(readinessManagerWithLargeSegments.largeSegments).toBeDefined(); + + [readinessManager, readinessManagerWithLargeSegments].forEach(rm => { + let counter = 0; + + rm.gate.on(SDK_READY, () => { + expect(rm.isReady()).toBe(true); + counter++; + }); + + rm.splits.emit(SDK_SPLITS_ARRIVED); + rm.segments.emit(SDK_SEGMENTS_ARRIVED); + if (rm.largeSegments) { + expect(counter).toBe(0); // should not be called yet + rm.largeSegments.emit(SDK_SEGMENTS_ARRIVED); + } + expect(counter).toBe(1); // should be called + + rm.splits.emit(SDK_SPLITS_ARRIVED); + rm.segments.emit(SDK_SEGMENTS_ARRIVED); + rm.splits.emit(SDK_SPLITS_ARRIVED); + rm.segments.emit(SDK_SEGMENTS_ARRIVED); + if (rm.largeSegments) rm.largeSegments.emit(SDK_SEGMENTS_ARRIVED); + + expect(counter).toBe(1); // should be called once + }); +}); From 84a1cd3d207246c534da003b9211481cc37eefa4 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Fri, 12 Jul 2024 15:32:25 -0300 Subject: [PATCH 010/146] Add comment --- src/types.ts | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/types.ts b/src/types.ts index 7f7c709d..f259c1c0 100644 --- a/src/types.ts +++ b/src/types.ts @@ -818,6 +818,7 @@ export namespace SplitIO { eventsFirstPushWindow?: number, /** * Whether the SDK should wait for large segments to be ready before emitting SDK_READY event. + * It only applies if largeSegmentsEnabled is true. * @property {number} waitForLargeSegments * @default true */ @@ -868,6 +869,7 @@ export namespace SplitIO { segmentsRefreshRate?: number, /** * The SDK polls Split servers for changes to large segment definitions. This parameter controls this polling period in seconds. + * It only applies if largeSegmentsEnabled is true. * @property {number} largeSegmentsRefreshRate * @default 60 */ From 04ac92243ea28b99d931deafc228b17b80a6bc5f Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Fri, 12 Jul 2024 16:35:00 -0300 Subject: [PATCH 011/146] Refactor usesSegments method for usesMatcher --- src/storages/AbstractSplitsCacheAsync.ts | 4 +-- src/storages/AbstractSplitsCacheSync.ts | 9 +++---- src/storages/KeyBuilder.ts | 3 +++ .../inLocalStorage/SplitsCacheInLocal.ts | 25 +++++++++++++++---- .../__tests__/SplitsCacheInLocal.spec.ts | 15 +++++------ src/storages/inMemory/SplitsCacheInMemory.ts | 25 +++++++++++-------- src/storages/types.ts | 6 ++--- src/sync/__tests__/syncManagerOnline.spec.ts | 2 +- src/sync/polling/pollingManagerCS.ts | 9 ++++--- .../polling/updaters/mySegmentsUpdater.ts | 3 ++- src/sync/syncManagerOnline.ts | 6 ++--- src/utils/constants/index.ts | 1 + 12 files changed, 67 insertions(+), 41 deletions(-) diff --git a/src/storages/AbstractSplitsCacheAsync.ts b/src/storages/AbstractSplitsCacheAsync.ts index 9e4e136c..4abb6e34 100644 --- a/src/storages/AbstractSplitsCacheAsync.ts +++ b/src/storages/AbstractSplitsCacheAsync.ts @@ -22,9 +22,9 @@ export abstract class AbstractSplitsCacheAsync implements ISplitsCacheAsync { abstract trafficTypeExists(trafficType: string): Promise abstract clear(): Promise - // @TODO revisit segment-related methods ('usesSegments', 'getRegisteredSegments', 'registerSegments') + // @TODO revisit segment-related methods ('usesMatcher', 'getRegisteredSegments', 'registerSegments') // noop, just keeping the interface. This is used by standalone client-side API only, and so only implemented by InMemory and InLocalStorage. - usesSegments(): Promise { + usesMatcher(): Promise { return Promise.resolve(true); } diff --git a/src/storages/AbstractSplitsCacheSync.ts b/src/storages/AbstractSplitsCacheSync.ts index d516837e..1d33eb93 100644 --- a/src/storages/AbstractSplitsCacheSync.ts +++ b/src/storages/AbstractSplitsCacheSync.ts @@ -2,7 +2,6 @@ import { ISplitsCacheSync } from './types'; import { ISplit } from '../dtos/types'; import { objectAssign } from '../utils/lang/objectAssign'; import { ISet } from '../utils/lang/sets'; -import { IN_SEGMENT } from '../utils/constants'; /** * This class provides a skeletal implementation of the ISplitsCacheSync interface @@ -44,7 +43,7 @@ export abstract class AbstractSplitsCacheSync implements ISplitsCacheSync { abstract trafficTypeExists(trafficType: string): boolean - abstract usesSegments(): boolean + abstract usesMatcher(matcherType: string): boolean abstract clear(): void @@ -86,15 +85,15 @@ export abstract class AbstractSplitsCacheSync implements ISplitsCacheSync { /** * Given a parsed split, it returns a boolean flagging if its conditions use segments matchers (rules & whitelists). - * This util is intended to simplify the implementation of `splitsCache::usesSegments` method + * This util is intended to simplify the implementation of `splitsCache::usesMatcher` method */ -export function usesSegments(split: ISplit) { +export function usesMatcher(split: ISplit, matcherType: string) { const conditions = split.conditions || []; for (let i = 0; i < conditions.length; i++) { const matchers = conditions[i].matcherGroup.matchers; for (let j = 0; j < matchers.length; j++) { - if (matchers[j].matcherType === IN_SEGMENT) return true; + if (matchers[j].matcherType === matcherType) return true; } } diff --git a/src/storages/KeyBuilder.ts b/src/storages/KeyBuilder.ts index e70b251b..60ab7af8 100644 --- a/src/storages/KeyBuilder.ts +++ b/src/storages/KeyBuilder.ts @@ -51,6 +51,9 @@ export class KeyBuilder { buildSplitsWithSegmentCountKey() { return `${this.prefix}.splits.usingSegments`; } + buildSplitsWithLargeSegmentCountKey() { + return `${this.prefix}.splits.usingLargeSegments`; + } buildSegmentNameKey(segmentName: string) { return `${this.prefix}.segment.${segmentName}`; diff --git a/src/storages/inLocalStorage/SplitsCacheInLocal.ts b/src/storages/inLocalStorage/SplitsCacheInLocal.ts index ccd4859f..30f47f67 100644 --- a/src/storages/inLocalStorage/SplitsCacheInLocal.ts +++ b/src/storages/inLocalStorage/SplitsCacheInLocal.ts @@ -1,5 +1,5 @@ import { ISplit } from '../../dtos/types'; -import { AbstractSplitsCacheSync, usesSegments } from '../AbstractSplitsCacheSync'; +import { AbstractSplitsCacheSync, usesMatcher } from '../AbstractSplitsCacheSync'; import { isFiniteNumber, toNumber, isNaNNumber } from '../../utils/lang'; import { KeyBuilderCS } from '../KeyBuilderCS'; import { ILogger } from '../../logger/types'; @@ -7,6 +7,7 @@ import { LOG_PREFIX } from './constants'; import { ISet, _Set, setToArray } from '../../utils/lang/sets'; import { ISettings } from '../../types'; import { getStorageHash } from '../KeyBuilder'; +import { IN_LARGE_SEGMENT, IN_SEGMENT } from '../../utils/constants'; /** * ISplitsCacheSync implementation that stores split definitions in browser LocalStorage. @@ -50,10 +51,15 @@ export class SplitsCacheInLocal extends AbstractSplitsCacheSync { const ttKey = this.keys.buildTrafficTypeKey(split.trafficTypeName); this._decrementCount(ttKey); - if (usesSegments(split)) { + if (usesMatcher(split, IN_SEGMENT)) { const segmentsCountKey = this.keys.buildSplitsWithSegmentCountKey(); this._decrementCount(segmentsCountKey); } + + if (usesMatcher(split, IN_LARGE_SEGMENT)) { + const segmentsCountKey = this.keys.buildSplitsWithLargeSegmentCountKey(); + this._decrementCount(segmentsCountKey); + } } } catch (e) { this.log.error(LOG_PREFIX + e); @@ -67,11 +73,17 @@ export class SplitsCacheInLocal extends AbstractSplitsCacheSync { // @ts-expect-error localStorage.setItem(ttKey, toNumber(localStorage.getItem(ttKey)) + 1); - if (usesSegments(split)) { + if (usesMatcher(split, IN_SEGMENT)) { const segmentsCountKey = this.keys.buildSplitsWithSegmentCountKey(); // @ts-expect-error localStorage.setItem(segmentsCountKey, toNumber(localStorage.getItem(segmentsCountKey)) + 1); } + + if (usesMatcher(split, IN_LARGE_SEGMENT)) { + const segmentsCountKey = this.keys.buildSplitsWithLargeSegmentCountKey(); + // @ts-expect-error + localStorage.setItem(segmentsCountKey, toNumber(localStorage.getItem(segmentsCountKey)) + 1); + } } } catch (e) { this.log.error(LOG_PREFIX + e); @@ -203,11 +215,14 @@ export class SplitsCacheInLocal extends AbstractSplitsCacheSync { return isFiniteNumber(ttCount) && ttCount > 0; } - usesSegments() { + usesMatcher(matcherType: string) { // If cache hasn't been synchronized with the cloud, assume we need them. if (!this.hasSync) return true; - const storedCount = localStorage.getItem(this.keys.buildSplitsWithSegmentCountKey()); + const storedCount = localStorage.getItem(matcherType === IN_SEGMENT ? + this.keys.buildSplitsWithSegmentCountKey() : + this.keys.buildSplitsWithLargeSegmentCountKey() + ); const splitsWithSegmentsCount = storedCount === null ? 0 : toNumber(storedCount); if (isFiniteNumber(splitsWithSegmentsCount)) { diff --git a/src/storages/inLocalStorage/__tests__/SplitsCacheInLocal.spec.ts b/src/storages/inLocalStorage/__tests__/SplitsCacheInLocal.spec.ts index 732ca8b7..bf857888 100644 --- a/src/storages/inLocalStorage/__tests__/SplitsCacheInLocal.spec.ts +++ b/src/storages/inLocalStorage/__tests__/SplitsCacheInLocal.spec.ts @@ -4,6 +4,7 @@ import { splitWithUserTT, splitWithAccountTT, splitWithAccountTTAndUsesSegments, import { ISplit } from '../../../dtos/types'; import { _Set } from '../../../utils/lang/sets'; import { fullSettings } from '../../../utils/settingsValidation/__tests__/settings.mocks'; +import { IN_SEGMENT } from '../../../utils/constants'; test('SPLIT CACHE / LocalStorage', () => { @@ -141,26 +142,26 @@ test('SPLIT CACHE / LocalStorage / killLocally', () => { }); -test('SPLIT CACHE / LocalStorage / usesSegments', () => { +test('SPLIT CACHE / LocalStorage / usesMatcher', () => { const cache = new SplitsCacheInLocal(fullSettings, new KeyBuilderCS('SPLITIO', 'user')); - expect(cache.usesSegments()).toBe(true); // true initially, until data is synchronized + expect(cache.usesMatcher(IN_SEGMENT)).toBe(true); // true initially, until data is synchronized cache.setChangeNumber(1); // to indicate that data has been synced. cache.addSplits([['split1', splitWithUserTT], ['split2', splitWithAccountTT],]); - expect(cache.usesSegments()).toBe(false); // 0 splits using segments + expect(cache.usesMatcher(IN_SEGMENT)).toBe(false); // 0 splits using segments cache.addSplit('split3', splitWithAccountTTAndUsesSegments); - expect(cache.usesSegments()).toBe(true); // 1 split using segments + expect(cache.usesMatcher(IN_SEGMENT)).toBe(true); // 1 split using segments cache.addSplit('split4', splitWithAccountTTAndUsesSegments); - expect(cache.usesSegments()).toBe(true); // 2 splits using segments + expect(cache.usesMatcher(IN_SEGMENT)).toBe(true); // 2 splits using segments cache.removeSplit('split3'); - expect(cache.usesSegments()).toBe(true); // 1 split using segments + expect(cache.usesMatcher(IN_SEGMENT)).toBe(true); // 1 split using segments cache.removeSplit('split4'); - expect(cache.usesSegments()).toBe(false); // 0 splits using segments + expect(cache.usesMatcher(IN_SEGMENT)).toBe(false); // 0 splits using segments }); test('SPLIT CACHE / LocalStorage / flag set cache tests', () => { diff --git a/src/storages/inMemory/SplitsCacheInMemory.ts b/src/storages/inMemory/SplitsCacheInMemory.ts index cf570eea..b8f8c06b 100644 --- a/src/storages/inMemory/SplitsCacheInMemory.ts +++ b/src/storages/inMemory/SplitsCacheInMemory.ts @@ -1,7 +1,8 @@ import { ISplit, ISplitFiltersValidation } from '../../dtos/types'; -import { AbstractSplitsCacheSync, usesSegments } from '../AbstractSplitsCacheSync'; +import { AbstractSplitsCacheSync, usesMatcher } from '../AbstractSplitsCacheSync'; import { isFiniteNumber } from '../../utils/lang'; import { ISet, _Set } from '../../utils/lang/sets'; +import { IN_LARGE_SEGMENT, IN_SEGMENT } from '../../utils/constants'; /** * Default ISplitsCacheSync implementation that stores split definitions in memory. @@ -13,7 +14,8 @@ export class SplitsCacheInMemory extends AbstractSplitsCacheSync { private splitsCache: Record = {}; private ttCache: Record = {}; private changeNumber: number = -1; - private splitsWithSegmentsCount: number = 0; + private segmentsCount: number = 0; + private largeSegmentsCount: number = 0; private flagSetsCache: Record> = {}; constructor(splitFiltersValidation?: ISplitFiltersValidation) { @@ -25,7 +27,8 @@ export class SplitsCacheInMemory extends AbstractSplitsCacheSync { this.splitsCache = {}; this.ttCache = {}; this.changeNumber = -1; - this.splitsWithSegmentsCount = 0; + this.segmentsCount = 0; + this.largeSegmentsCount = 0; } addSplit(name: string, split: ISplit): boolean { @@ -38,9 +41,9 @@ export class SplitsCacheInMemory extends AbstractSplitsCacheSync { this.removeFromFlagSets(previousSplit.name, previousSplit.sets); - if (usesSegments(previousSplit)) { // Substract from segments count for the previous version of this Split. - this.splitsWithSegmentsCount--; - } + // Substract from segments count for the previous version of this Split. + if (usesMatcher(previousSplit, IN_SEGMENT)) this.segmentsCount--; + if (usesMatcher(previousSplit, IN_LARGE_SEGMENT)) this.largeSegmentsCount--; } if (split) { @@ -52,7 +55,8 @@ export class SplitsCacheInMemory extends AbstractSplitsCacheSync { this.addToFlagSets(split); // Add to segments count for the new version of the Split - if (usesSegments(split)) this.splitsWithSegmentsCount++; + if (usesMatcher(split, IN_SEGMENT)) this.segmentsCount++; + if (usesMatcher(split, IN_LARGE_SEGMENT)) this.largeSegmentsCount++; return true; } else { @@ -72,7 +76,8 @@ export class SplitsCacheInMemory extends AbstractSplitsCacheSync { this.removeFromFlagSets(split.name, split.sets); // Update the segments count. - if (usesSegments(split)) this.splitsWithSegmentsCount--; + if (usesMatcher(split, IN_SEGMENT)) this.segmentsCount--; + if (usesMatcher(split, IN_LARGE_SEGMENT)) this.largeSegmentsCount--; return true; } else { @@ -101,8 +106,8 @@ export class SplitsCacheInMemory extends AbstractSplitsCacheSync { return isFiniteNumber(this.ttCache[trafficType]) && this.ttCache[trafficType] > 0; } - usesSegments(): boolean { - return this.getChangeNumber() === -1 || this.splitsWithSegmentsCount > 0; + usesMatcher(matcherType: string): boolean { + return this.getChangeNumber() === -1 || (matcherType === IN_SEGMENT ? this.segmentsCount > 0 : this.largeSegmentsCount > 0); } getNamesByFlagSets(flagSets: string[]): ISet[] { diff --git a/src/storages/types.ts b/src/storages/types.ts index 61602e91..10e83604 100644 --- a/src/storages/types.ts +++ b/src/storages/types.ts @@ -205,7 +205,7 @@ export interface ISplitsCacheBase { // should never reject or throw an exception. Instead return true by default, asssuming the TT might exist. trafficTypeExists(trafficType: string): MaybeThenable, // only for Client-Side - usesSegments(): MaybeThenable, + usesMatcher(matcherType: string): MaybeThenable, clear(): MaybeThenable, // should never reject or throw an exception. Instead return false by default, to avoid emitting SDK_READY_FROM_CACHE. checkCache(): MaybeThenable, @@ -223,7 +223,7 @@ export interface ISplitsCacheSync extends ISplitsCacheBase { getAll(): ISplit[], getSplitNames(): string[], trafficTypeExists(trafficType: string): boolean, - usesSegments(): boolean, + usesMatcher(matcherType: string): boolean, clear(): void, checkCache(): boolean, killLocally(name: string, defaultTreatment: string, changeNumber: number): boolean, @@ -240,7 +240,7 @@ export interface ISplitsCacheAsync extends ISplitsCacheBase { getAll(): Promise, getSplitNames(): Promise, trafficTypeExists(trafficType: string): Promise, - usesSegments(): Promise, + usesMatcher(matcherType: string): Promise, clear(): Promise, checkCache(): Promise, killLocally(name: string, defaultTreatment: string, changeNumber: number): Promise, diff --git a/src/sync/__tests__/syncManagerOnline.spec.ts b/src/sync/__tests__/syncManagerOnline.spec.ts index 44b9c8b3..164c93eb 100644 --- a/src/sync/__tests__/syncManagerOnline.spec.ts +++ b/src/sync/__tests__/syncManagerOnline.spec.ts @@ -12,7 +12,7 @@ jest.mock('../submitters/submitterManager', () => { // Mocked storageManager const storageManagerMock = { splits: { - usesSegments: () => false + usesMatcher: () => false } }; diff --git a/src/sync/polling/pollingManagerCS.ts b/src/sync/polling/pollingManagerCS.ts index ac3253f2..1a17490c 100644 --- a/src/sync/polling/pollingManagerCS.ts +++ b/src/sync/polling/pollingManagerCS.ts @@ -8,6 +8,7 @@ import { getMatching } from '../../utils/key'; import { SDK_SPLITS_ARRIVED, SDK_SEGMENTS_ARRIVED } from '../../readiness/constants'; import { POLLING_SMART_PAUSING, POLLING_START, POLLING_STOP } from '../../logger/constants'; import { ISdkFactoryContextSync } from '../../sdkFactory/types'; +import { IN_SEGMENT } from '../../utils/constants'; /** * Expose start / stop mechanism for polling data from services. @@ -43,7 +44,7 @@ export function pollingManagerCSFactory( // smart pausing readiness.splits.on(SDK_SPLITS_ARRIVED, () => { if (!splitsSyncTask.isRunning()) return; // noop if not doing polling - const splitsHaveSegments = storage.splits.usesSegments(); + const splitsHaveSegments = storage.splits.usesMatcher(IN_SEGMENT); if (splitsHaveSegments !== mySegmentsSyncTask.isRunning()) { log.info(POLLING_SMART_PAUSING, [splitsHaveSegments ? 'ON' : 'OFF']); if (splitsHaveSegments) { @@ -59,9 +60,9 @@ export function pollingManagerCSFactory( // smart ready function smartReady() { - if (!readiness.isReady() && !storage.splits.usesSegments()) readiness.segments.emit(SDK_SEGMENTS_ARRIVED); + if (!readiness.isReady() && !storage.splits.usesMatcher(IN_SEGMENT)) readiness.segments.emit(SDK_SEGMENTS_ARRIVED); } - if (!storage.splits.usesSegments()) setTimeout(smartReady, 0); + if (!storage.splits.usesMatcher(IN_SEGMENT)) setTimeout(smartReady, 0); else readiness.splits.once(SDK_SPLITS_ARRIVED, smartReady); mySegmentsSyncTasks[matchingKey] = mySegmentsSyncTask; @@ -77,7 +78,7 @@ export function pollingManagerCSFactory( log.info(POLLING_START); splitsSyncTask.start(); - if (storage.splits.usesSegments()) startMySegmentsSyncTasks(); + if (storage.splits.usesMatcher(IN_SEGMENT)) startMySegmentsSyncTasks(); }, // Stop periodic fetching (polling) diff --git a/src/sync/polling/updaters/mySegmentsUpdater.ts b/src/sync/polling/updaters/mySegmentsUpdater.ts index 421e0c3f..ab62b1fc 100644 --- a/src/sync/polling/updaters/mySegmentsUpdater.ts +++ b/src/sync/polling/updaters/mySegmentsUpdater.ts @@ -6,6 +6,7 @@ import { SDK_SEGMENTS_ARRIVED } from '../../../readiness/constants'; import { ILogger } from '../../../logger/types'; import { SYNC_MYSEGMENTS_FETCH_RETRY } from '../../../logger/constants'; import { MySegmentsData } from '../types'; +import { IN_SEGMENT } from '../../../utils/constants'; type IMySegmentsUpdater = (segmentList?: string[], noCache?: boolean) => Promise @@ -55,7 +56,7 @@ export function mySegmentsUpdaterFactory( } // Notify update if required - if (splitsCache.usesSegments() && (shouldNotifyUpdate || readyOnAlreadyExistentState)) { + if (splitsCache.usesMatcher(IN_SEGMENT) && (shouldNotifyUpdate || readyOnAlreadyExistentState)) { readyOnAlreadyExistentState = false; segmentsEventEmitter.emit(SDK_SEGMENTS_ARRIVED); } diff --git a/src/sync/syncManagerOnline.ts b/src/sync/syncManagerOnline.ts index b6407630..d399e699 100644 --- a/src/sync/syncManagerOnline.ts +++ b/src/sync/syncManagerOnline.ts @@ -7,7 +7,7 @@ import { IPollingManager, IPollingManagerCS } from './polling/types'; import { PUSH_SUBSYSTEM_UP, PUSH_SUBSYSTEM_DOWN } from './streaming/constants'; import { SYNC_START_POLLING, SYNC_CONTINUE_POLLING, SYNC_STOP_POLLING } from '../logger/constants'; import { isConsentGranted } from '../consent'; -import { POLLING, STREAMING, SYNC_MODE_UPDATE } from '../utils/constants'; +import { IN_SEGMENT, POLLING, STREAMING, SYNC_MODE_UPDATE } from '../utils/constants'; import { ISdkFactoryContextSync } from '../sdkFactory/types'; /** @@ -150,7 +150,7 @@ export function syncManagerOnlineFactory( if (pushManager) { if (pollingManager!.isRunning()) { // if doing polling, we must start the periodic fetch of data - if (storage.splits.usesSegments()) mySegmentsSyncTask.start(); + if (storage.splits.usesMatcher(IN_SEGMENT)) mySegmentsSyncTask.start(); } else { // if not polling, we must execute the sync task for the initial fetch // of segments since `syncAll` was already executed when starting the main client @@ -158,7 +158,7 @@ export function syncManagerOnlineFactory( } pushManager.add(matchingKey, mySegmentsSyncTask); } else { - if (storage.splits.usesSegments()) mySegmentsSyncTask.start(); + if (storage.splits.usesMatcher(IN_SEGMENT)) mySegmentsSyncTask.start(); } } else { if (!readinessManager.isReady()) mySegmentsSyncTask.execute(); diff --git a/src/utils/constants/index.ts b/src/utils/constants/index.ts index 77bda4e1..51decc18 100644 --- a/src/utils/constants/index.ts +++ b/src/utils/constants/index.ts @@ -109,3 +109,4 @@ export const FLAG_SPEC_VERSION = '1.1'; // Matcher types export const IN_SEGMENT = 'IN_SEGMENT'; +export const IN_LARGE_SEGMENT = 'IN_LARGE_SEGMENT'; From a5f6f365955be1310255b3855181b46dad43cbd9 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Fri, 12 Jul 2024 17:47:06 -0300 Subject: [PATCH 012/146] Add myLargeSegmentsSyncTasks --- src/logger/constants.ts | 1 - src/logger/messages/info.ts | 1 - src/sync/polling/pollingManagerCS.ts | 92 +++++++++++++------ .../polling/syncTasks/mySegmentsSyncTask.ts | 17 ++-- src/sync/polling/types.ts | 5 +- .../polling/updaters/mySegmentsUpdater.ts | 14 +-- 6 files changed, 82 insertions(+), 48 deletions(-) diff --git a/src/logger/constants.ts b/src/logger/constants.ts index 36f6a139..d7adf667 100644 --- a/src/logger/constants.ts +++ b/src/logger/constants.ts @@ -36,7 +36,6 @@ export const IMPRESSION = 102; export const IMPRESSION_QUEUEING = 103; export const NEW_SHARED_CLIENT = 104; export const NEW_FACTORY = 105; -export const POLLING_SMART_PAUSING = 106; export const POLLING_START = 107; export const POLLING_STOP = 108; export const SYNC_SPLITS_FETCH_RETRY = 109; diff --git a/src/logger/messages/info.ts b/src/logger/messages/info.ts index 907c9fc7..64e0e64a 100644 --- a/src/logger/messages/info.ts +++ b/src/logger/messages/info.ts @@ -19,7 +19,6 @@ export const codesInfo: [number, string][] = codesWarn.concat([ [c.USER_CONSENT_INITIAL, 'Starting the SDK with %s user consent. No data will be sent.'], // synchronizer - [c.POLLING_SMART_PAUSING, c.LOG_PREFIX_SYNC_POLLING + 'Turning segments data polling %s.'], [c.POLLING_START, c.LOG_PREFIX_SYNC_POLLING + 'Starting polling'], [c.POLLING_STOP, c.LOG_PREFIX_SYNC_POLLING + 'Stopping polling'], [c.SYNC_SPLITS_FETCH_RETRY, c.LOG_PREFIX_SYNC_SPLITS + 'Retrying download of feature flags #%s. Reason: %s'], diff --git a/src/sync/polling/pollingManagerCS.ts b/src/sync/polling/pollingManagerCS.ts index 1a17490c..34427642 100644 --- a/src/sync/polling/pollingManagerCS.ts +++ b/src/sync/polling/pollingManagerCS.ts @@ -6,9 +6,9 @@ import { mySegmentsSyncTaskFactory } from './syncTasks/mySegmentsSyncTask'; import { splitsSyncTaskFactory } from './syncTasks/splitsSyncTask'; import { getMatching } from '../../utils/key'; import { SDK_SPLITS_ARRIVED, SDK_SEGMENTS_ARRIVED } from '../../readiness/constants'; -import { POLLING_SMART_PAUSING, POLLING_START, POLLING_STOP } from '../../logger/constants'; +import { POLLING_START, POLLING_STOP } from '../../logger/constants'; import { ISdkFactoryContextSync } from '../../sdkFactory/types'; -import { IN_SEGMENT } from '../../utils/constants'; +import { IN_LARGE_SEGMENT, IN_SEGMENT } from '../../utils/constants'; /** * Expose start / stop mechanism for polling data from services. @@ -25,60 +25,96 @@ export function pollingManagerCSFactory( // Map of matching keys to their corresponding MySegmentsSyncTask. const mySegmentsSyncTasks: Record = {}; + const myLargeSegmentsSyncTasks: Record = {}; const matchingKey = getMatching(settings.core.key); - const mySegmentsSyncTask = add(matchingKey, readiness, storage); + const { msSyncTask, mlsSyncTask } = add(matchingKey, readiness, storage); function startMySegmentsSyncTasks() { - forOwn(mySegmentsSyncTasks, function (mySegmentsSyncTask) { - mySegmentsSyncTask.start(); + const splitsHaveSegments = storage.splits.usesMatcher(IN_SEGMENT); + + forOwn(mySegmentsSyncTasks, (mySegmentsSyncTask) => { + if (splitsHaveSegments) mySegmentsSyncTask.start(); + else mySegmentsSyncTask.stop(); + }); + + const splitsHaveLargeSegments = storage.splits.usesMatcher(IN_LARGE_SEGMENT); + + forOwn(myLargeSegmentsSyncTasks, (myLargeSegmentsSyncTask) => { + if (myLargeSegmentsSyncTask) { + if (splitsHaveLargeSegments) myLargeSegmentsSyncTask.start(); + else myLargeSegmentsSyncTask.stop(); + } }); } function stopMySegmentsSyncTasks() { - forOwn(mySegmentsSyncTasks, function (mySegmentsSyncTask) { - if (mySegmentsSyncTask.isRunning()) mySegmentsSyncTask.stop(); - }); + forOwn(mySegmentsSyncTasks, (mySegmentsSyncTask) => mySegmentsSyncTask.stop()); + forOwn(myLargeSegmentsSyncTasks, (myLargeSegmentsSyncTask) => myLargeSegmentsSyncTask && myLargeSegmentsSyncTask.stop()); } // smart pausing readiness.splits.on(SDK_SPLITS_ARRIVED, () => { - if (!splitsSyncTask.isRunning()) return; // noop if not doing polling - const splitsHaveSegments = storage.splits.usesMatcher(IN_SEGMENT); - if (splitsHaveSegments !== mySegmentsSyncTask.isRunning()) { - log.info(POLLING_SMART_PAUSING, [splitsHaveSegments ? 'ON' : 'OFF']); - if (splitsHaveSegments) { - startMySegmentsSyncTasks(); - } else { - stopMySegmentsSyncTasks(); - } - } + // smart pausing of mySegments polling + if (splitsSyncTask.isRunning()) startMySegmentsSyncTasks(); }); function add(matchingKey: string, readiness: IReadinessManager, storage: IStorageSync) { - const mySegmentsSyncTask = mySegmentsSyncTaskFactory(splitApi.fetchMySegments, storage, readiness, settings, matchingKey); + const msSyncTask = mySegmentsSyncTaskFactory( + splitApi.fetchMySegments, + storage.segments, + () => { + if (storage.splits.usesMatcher(IN_SEGMENT)) readiness.segments.emit(SDK_SEGMENTS_ARRIVED); + }, + settings, + matchingKey, + settings.scheduler.segmentsRefreshRate + ); + + let mlsSyncTask; + if (settings.sync.largeSegmentsEnabled) { + mlsSyncTask = mySegmentsSyncTaskFactory( + splitApi.fetchMyLargeSegments, + storage.largeSegments!, + () => { + if (readiness.largeSegments && storage.splits.usesMatcher(IN_LARGE_SEGMENT)) readiness.largeSegments.emit(SDK_SEGMENTS_ARRIVED); + }, + settings, + matchingKey, + settings.scheduler.largeSegmentsRefreshRate + ); + } // smart ready function smartReady() { - if (!readiness.isReady() && !storage.splits.usesMatcher(IN_SEGMENT)) readiness.segments.emit(SDK_SEGMENTS_ARRIVED); + if (!readiness.isReady()) { + if (!storage.splits.usesMatcher(IN_SEGMENT)) readiness.segments.emit(SDK_SEGMENTS_ARRIVED); + if (readiness.largeSegments && !storage.splits.usesMatcher(IN_LARGE_SEGMENT)) readiness.largeSegments.emit(SDK_SEGMENTS_ARRIVED); + } } - if (!storage.splits.usesMatcher(IN_SEGMENT)) setTimeout(smartReady, 0); + if (!storage.splits.usesMatcher(IN_SEGMENT) && !storage.splits.usesMatcher(IN_LARGE_SEGMENT)) setTimeout(smartReady, 0); else readiness.splits.once(SDK_SPLITS_ARRIVED, smartReady); - mySegmentsSyncTasks[matchingKey] = mySegmentsSyncTask; - return mySegmentsSyncTask; + mySegmentsSyncTasks[matchingKey] = msSyncTask; + myLargeSegmentsSyncTasks[matchingKey] = mlsSyncTask; + + return { + msSyncTask, + mlsSyncTask + }; } return { splitsSyncTask, - segmentsSyncTask: mySegmentsSyncTask, + segmentsSyncTask: msSyncTask, + largeSegmentsSyncTask: mlsSyncTask, // Start periodic fetching (polling) start() { log.info(POLLING_START); splitsSyncTask.start(); - if (storage.splits.usesMatcher(IN_SEGMENT)) startMySegmentsSyncTasks(); + startMySegmentsSyncTasks(); }, // Stop periodic fetching (polling) @@ -106,10 +142,14 @@ export function pollingManagerCSFactory( remove(matchingKey: string) { delete mySegmentsSyncTasks[matchingKey]; + delete myLargeSegmentsSyncTasks[matchingKey]; }, get(matchingKey: string) { - return mySegmentsSyncTasks[matchingKey]; + return { + msSyncTask: mySegmentsSyncTasks[matchingKey], + mlsSyncTask: myLargeSegmentsSyncTasks[matchingKey] + }; } }; diff --git a/src/sync/polling/syncTasks/mySegmentsSyncTask.ts b/src/sync/polling/syncTasks/mySegmentsSyncTask.ts index f8ebade2..f9c54ff0 100644 --- a/src/sync/polling/syncTasks/mySegmentsSyncTask.ts +++ b/src/sync/polling/syncTasks/mySegmentsSyncTask.ts @@ -1,5 +1,4 @@ -import { IStorageSync } from '../../../storages/types'; -import { IReadinessManager } from '../../../readiness/types'; +import { ISegmentsCacheSync } from '../../../storages/types'; import { syncTaskFactory } from '../../syncTask'; import { IMySegmentsSyncTask } from '../types'; import { IFetchMySegments } from '../../../services/types'; @@ -12,24 +11,24 @@ import { mySegmentsUpdaterFactory } from '../updaters/mySegmentsUpdater'; */ export function mySegmentsSyncTaskFactory( fetchMySegments: IFetchMySegments, - storage: IStorageSync, - readiness: IReadinessManager, + mySegmentsCache: ISegmentsCacheSync, + notifyUpdate: () => void, settings: ISettings, - matchingKey: string + matchingKey: string, + segmentsRefreshRate: number ): IMySegmentsSyncTask { return syncTaskFactory( settings.log, mySegmentsUpdaterFactory( settings.log, mySegmentsFetcherFactory(fetchMySegments), - storage.splits, - storage.segments, - readiness.segments, + mySegmentsCache, + notifyUpdate, settings.startup.requestTimeoutBeforeReady, settings.startup.retriesOnFailureBeforeReady, matchingKey ), - settings.scheduler.segmentsRefreshRate, + segmentsRefreshRate, 'mySegmentsUpdater', ); } diff --git a/src/sync/polling/types.ts b/src/sync/polling/types.ts index 4653b568..d7187c8e 100644 --- a/src/sync/polling/types.ts +++ b/src/sync/polling/types.ts @@ -20,13 +20,14 @@ export interface IPollingManager extends ITask { syncAll(): Promise splitsSyncTask: ISplitsSyncTask segmentsSyncTask: ISyncTask + largeSegmentsSyncTask?: ISyncTask } /** * PollingManager for client-side with support for multiple clients */ export interface IPollingManagerCS extends IPollingManager { - add(matchingKey: string, readiness: IReadinessManager, storage: IStorageSync): IMySegmentsSyncTask + add(matchingKey: string, readiness: IReadinessManager, storage: IStorageSync): { msSyncTask: IMySegmentsSyncTask, mlsSyncTask?: IMySegmentsSyncTask } remove(matchingKey: string): void; - get(matchingKey: string): IMySegmentsSyncTask | undefined + get(matchingKey: string): { msSyncTask: IMySegmentsSyncTask, mlsSyncTask?: IMySegmentsSyncTask } | undefined } diff --git a/src/sync/polling/updaters/mySegmentsUpdater.ts b/src/sync/polling/updaters/mySegmentsUpdater.ts index ab62b1fc..a914c676 100644 --- a/src/sync/polling/updaters/mySegmentsUpdater.ts +++ b/src/sync/polling/updaters/mySegmentsUpdater.ts @@ -1,14 +1,11 @@ import { IMySegmentsFetcher } from '../fetchers/types'; -import { ISegmentsCacheSync, ISplitsCacheSync } from '../../../storages/types'; -import { ISegmentsEventEmitter } from '../../../readiness/types'; +import { ISegmentsCacheSync } from '../../../storages/types'; import { timeout } from '../../../utils/promise/timeout'; -import { SDK_SEGMENTS_ARRIVED } from '../../../readiness/constants'; import { ILogger } from '../../../logger/types'; import { SYNC_MYSEGMENTS_FETCH_RETRY } from '../../../logger/constants'; import { MySegmentsData } from '../types'; -import { IN_SEGMENT } from '../../../utils/constants'; -type IMySegmentsUpdater = (segmentList?: string[], noCache?: boolean) => Promise +type IMySegmentsUpdater = (segmentList?: MySegmentsData, noCache?: boolean) => Promise /** * factory of MySegments updater, a task that: @@ -19,9 +16,8 @@ type IMySegmentsUpdater = (segmentList?: string[], noCache?: boolean) => Promise export function mySegmentsUpdaterFactory( log: ILogger, mySegmentsFetcher: IMySegmentsFetcher, - splitsCache: ISplitsCacheSync, mySegmentsCache: ISegmentsCacheSync, - segmentsEventEmitter: ISegmentsEventEmitter, + notifyUpdate: () => void, requestTimeoutBeforeReady: number, retriesOnFailureBeforeReady: number, matchingKey: string @@ -56,9 +52,9 @@ export function mySegmentsUpdaterFactory( } // Notify update if required - if (splitsCache.usesMatcher(IN_SEGMENT) && (shouldNotifyUpdate || readyOnAlreadyExistentState)) { + if (shouldNotifyUpdate || readyOnAlreadyExistentState) { readyOnAlreadyExistentState = false; - segmentsEventEmitter.emit(SDK_SEGMENTS_ARRIVED); + notifyUpdate(); } } From e8969bc4878f87ffa2d8bbc9fba4d77d4de4de36 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Mon, 15 Jul 2024 16:37:46 -0300 Subject: [PATCH 013/146] Refactor --- src/sync/polling/pollingManagerCS.ts | 42 ++++++++++++---------------- 1 file changed, 18 insertions(+), 24 deletions(-) diff --git a/src/sync/polling/pollingManagerCS.ts b/src/sync/polling/pollingManagerCS.ts index 34427642..890a2dd0 100644 --- a/src/sync/polling/pollingManagerCS.ts +++ b/src/sync/polling/pollingManagerCS.ts @@ -23,34 +23,32 @@ export function pollingManagerCSFactory( const splitsSyncTask = splitsSyncTaskFactory(splitApi.fetchSplitChanges, storage, readiness, settings, true); - // Map of matching keys to their corresponding MySegmentsSyncTask. - const mySegmentsSyncTasks: Record = {}; - const myLargeSegmentsSyncTasks: Record = {}; + // Map of matching keys to their corresponding MySegmentsSyncTask for segments and large segments. + const mySegmentsSyncTasks: Record = {}; const matchingKey = getMatching(settings.core.key); const { msSyncTask, mlsSyncTask } = add(matchingKey, readiness, storage); function startMySegmentsSyncTasks() { const splitsHaveSegments = storage.splits.usesMatcher(IN_SEGMENT); - - forOwn(mySegmentsSyncTasks, (mySegmentsSyncTask) => { - if (splitsHaveSegments) mySegmentsSyncTask.start(); - else mySegmentsSyncTask.stop(); - }); - const splitsHaveLargeSegments = storage.splits.usesMatcher(IN_LARGE_SEGMENT); - forOwn(myLargeSegmentsSyncTasks, (myLargeSegmentsSyncTask) => { - if (myLargeSegmentsSyncTask) { - if (splitsHaveLargeSegments) myLargeSegmentsSyncTask.start(); - else myLargeSegmentsSyncTask.stop(); + forOwn(mySegmentsSyncTasks, ({ msSyncTask, mlsSyncTask }) => { + if (splitsHaveSegments) msSyncTask.start(); + else msSyncTask.stop(); + + if (mlsSyncTask) { + if (splitsHaveLargeSegments) mlsSyncTask.start(); + else mlsSyncTask.stop(); } }); } function stopMySegmentsSyncTasks() { - forOwn(mySegmentsSyncTasks, (mySegmentsSyncTask) => mySegmentsSyncTask.stop()); - forOwn(myLargeSegmentsSyncTasks, (myLargeSegmentsSyncTask) => myLargeSegmentsSyncTask && myLargeSegmentsSyncTask.stop()); + forOwn(mySegmentsSyncTasks, ({ msSyncTask, mlsSyncTask }) => { + msSyncTask.stop(); + mlsSyncTask && mlsSyncTask.stop(); + }); } // smart pausing @@ -95,8 +93,7 @@ export function pollingManagerCSFactory( if (!storage.splits.usesMatcher(IN_SEGMENT) && !storage.splits.usesMatcher(IN_LARGE_SEGMENT)) setTimeout(smartReady, 0); else readiness.splits.once(SDK_SPLITS_ARRIVED, smartReady); - mySegmentsSyncTasks[matchingKey] = msSyncTask; - myLargeSegmentsSyncTasks[matchingKey] = mlsSyncTask; + mySegmentsSyncTasks[matchingKey] = { msSyncTask: msSyncTask, mlsSyncTask: mlsSyncTask }; return { msSyncTask, @@ -131,8 +128,9 @@ export function pollingManagerCSFactory( // fetch splits and segments syncAll() { const promises = [splitsSyncTask.execute()]; - forOwn(mySegmentsSyncTasks, function (mySegmentsSyncTask) { - promises.push(mySegmentsSyncTask.execute()); + forOwn(mySegmentsSyncTasks, function ({ msSyncTask, mlsSyncTask }) { + promises.push(msSyncTask.execute()); + mlsSyncTask && promises.push(mlsSyncTask.execute()); }); return Promise.all(promises); }, @@ -142,14 +140,10 @@ export function pollingManagerCSFactory( remove(matchingKey: string) { delete mySegmentsSyncTasks[matchingKey]; - delete myLargeSegmentsSyncTasks[matchingKey]; }, get(matchingKey: string) { - return { - msSyncTask: mySegmentsSyncTasks[matchingKey], - mlsSyncTask: myLargeSegmentsSyncTasks[matchingKey] - }; + return mySegmentsSyncTasks[matchingKey]; } }; From e42e09fe123cdf6b0b40ba9323e8b954304ec397 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Mon, 15 Jul 2024 17:13:56 -0300 Subject: [PATCH 014/146] Remove redundant comment --- src/services/splitApi.ts | 6 ------ 1 file changed, 6 deletions(-) diff --git a/src/services/splitApi.ts b/src/services/splitApi.ts index 8d5b7b79..51f1a02a 100644 --- a/src/services/splitApi.ts +++ b/src/services/splitApi.ts @@ -79,12 +79,6 @@ export function splitApiFactory( }, fetchMyLargeSegments(userMatchingKey: string, noCache?: boolean) { - /** - * URI encoding of user keys in order to: - * - avoid 400 responses (due to URI malformed). E.g.: '/api/mySegments/%' - * - avoid 404 responses. E.g.: '/api/mySegments/foo/bar' - * - match user keys with special characters. E.g.: 'foo%bar', 'foo/bar' - */ const url = `${urls.sdk}/myLargeSegments/${encodeURIComponent(userMatchingKey)}`; return splitHttpClient(url, noCache ? noCacheHeaderOptions : undefined, telemetryTracker.trackHttp(MY_LARGE_SEGMENT)); }, From 274c9d55b439ca9b466f621218f2a006cf7ea659 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Mon, 15 Jul 2024 17:34:38 -0300 Subject: [PATCH 015/146] Handle MY_LARGE_SEGMENTS_UPDATE streaming notifications --- src/sync/streaming/SSEHandler/index.ts | 3 +- src/sync/streaming/SSEHandler/types.ts | 16 +- .../UpdateWorkers/MySegmentsUpdateWorker.ts | 6 +- src/sync/streaming/constants.ts | 1 + src/sync/streaming/pushManager.ts | 153 ++++++++++-------- src/sync/streaming/types.ts | 8 +- src/sync/submitters/types.ts | 5 +- src/sync/syncManagerOnline.ts | 30 ++-- 8 files changed, 137 insertions(+), 85 deletions(-) diff --git a/src/sync/streaming/SSEHandler/index.ts b/src/sync/streaming/SSEHandler/index.ts index d8f20b32..43da3e14 100644 --- a/src/sync/streaming/SSEHandler/index.ts +++ b/src/sync/streaming/SSEHandler/index.ts @@ -1,6 +1,6 @@ import { errorParser, messageParser } from './NotificationParser'; import { notificationKeeperFactory } from './NotificationKeeper'; -import { PUSH_RETRYABLE_ERROR, PUSH_NONRETRYABLE_ERROR, OCCUPANCY, CONTROL, MY_SEGMENTS_UPDATE, MY_SEGMENTS_UPDATE_V2, SEGMENT_UPDATE, SPLIT_KILL, SPLIT_UPDATE } from '../constants'; +import { PUSH_RETRYABLE_ERROR, PUSH_NONRETRYABLE_ERROR, OCCUPANCY, CONTROL, MY_SEGMENTS_UPDATE, MY_SEGMENTS_UPDATE_V2, SEGMENT_UPDATE, SPLIT_KILL, SPLIT_UPDATE, MY_LARGE_SEGMENTS_UPDATE } from '../constants'; import { IPushEventEmitter } from '../types'; import { ISseEventHandler } from '../SSEClient/types'; import { INotificationError, INotificationMessage } from './types'; @@ -83,6 +83,7 @@ export function SSEHandlerFactory(log: ILogger, pushEmitter: IPushEventEmitter, case SPLIT_UPDATE: case SEGMENT_UPDATE: case MY_SEGMENTS_UPDATE_V2: + case MY_LARGE_SEGMENTS_UPDATE: case SPLIT_KILL: pushEmitter.emit(parsedData.type, parsedData); break; diff --git a/src/sync/streaming/SSEHandler/types.ts b/src/sync/streaming/SSEHandler/types.ts index d794322c..dc1b4257 100644 --- a/src/sync/streaming/SSEHandler/types.ts +++ b/src/sync/streaming/SSEHandler/types.ts @@ -1,5 +1,5 @@ import { ControlType } from '../constants'; -import { MY_SEGMENTS_UPDATE, MY_SEGMENTS_UPDATE_V2, SEGMENT_UPDATE, SPLIT_UPDATE, SPLIT_KILL, CONTROL, OCCUPANCY } from '../types'; +import { MY_SEGMENTS_UPDATE, MY_SEGMENTS_UPDATE_V2, SEGMENT_UPDATE, SPLIT_UPDATE, SPLIT_KILL, CONTROL, OCCUPANCY, MY_LARGE_SEGMENTS_UPDATE } from '../types'; export interface IMySegmentsUpdateData { type: MY_SEGMENTS_UPDATE, @@ -35,6 +35,18 @@ export interface IMySegmentsUpdateV2Data { u: UpdateStrategy, } +export interface IMyLargeSegmentsUpdateData { + type: MY_LARGE_SEGMENTS_UPDATE, + changeNumber: number, + largeSegment: string, + c: Compression, + d: string, + u: UpdateStrategy, + i?: number, // time interval in millis + h?: number, // hash function. 0 for murmur3_32, 1 for murmur3_64 + s?: number, // seed for hash function +} + export interface ISegmentUpdateData { type: SEGMENT_UPDATE, changeNumber: number, @@ -68,6 +80,6 @@ export interface IOccupancyData { } } -export type INotificationData = IMySegmentsUpdateData | IMySegmentsUpdateV2Data | ISegmentUpdateData | ISplitUpdateData | ISplitKillData | IControlData | IOccupancyData +export type INotificationData = IMySegmentsUpdateData | IMySegmentsUpdateV2Data | IMyLargeSegmentsUpdateData | ISegmentUpdateData | ISplitUpdateData | ISplitKillData | IControlData | IOccupancyData export type INotificationMessage = { parsedData: INotificationData, channel: string, timestamp: number, data: string } export type INotificationError = Event & { parsedData?: any, message?: string } diff --git a/src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts b/src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts index eb1a25b1..c91d8bbe 100644 --- a/src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts +++ b/src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts @@ -1,13 +1,13 @@ import { IMySegmentsSyncTask, MySegmentsData } from '../../polling/types'; import { Backoff } from '../../../utils/Backoff'; import { IUpdateWorker } from './types'; -import { MY_SEGMENT } from '../../../utils/constants'; import { ITelemetryTracker } from '../../../trackers/types'; +import { UpdatesFromSSEEnum } from '../../submitters/types'; /** * MySegmentsUpdateWorker factory */ -export function MySegmentsUpdateWorker(mySegmentsSyncTask: IMySegmentsSyncTask, telemetryTracker: ITelemetryTracker): IUpdateWorker { +export function MySegmentsUpdateWorker(mySegmentsSyncTask: IMySegmentsSyncTask, telemetryTracker: ITelemetryTracker, updateType: UpdatesFromSSEEnum): IUpdateWorker { let maxChangeNumber = 0; // keeps the maximum changeNumber among queued events let currentChangeNumber = -1; @@ -26,7 +26,7 @@ export function MySegmentsUpdateWorker(mySegmentsSyncTask: IMySegmentsSyncTask, mySegmentsSyncTask.execute(_segmentsData, true).then((result) => { if (!isHandlingEvent) return; // halt if `stop` has been called if (result !== false) {// Unlike `Splits|SegmentsUpdateWorker`, we cannot use `mySegmentsCache.getChangeNumber` since `/mySegments` endpoint doesn't provide this value. - if (_segmentsData) telemetryTracker.trackUpdatesFromSSE(MY_SEGMENT); + if (_segmentsData) telemetryTracker.trackUpdatesFromSSE(updateType); currentChangeNumber = Math.max(currentChangeNumber, currentMaxChangeNumber); // use `currentMaxChangeNumber`, in case that `maxChangeNumber` was updated during fetch. } if (handleNewEvent) { diff --git a/src/sync/streaming/constants.ts b/src/sync/streaming/constants.ts index 8afb41a6..c6a338c9 100644 --- a/src/sync/streaming/constants.ts +++ b/src/sync/streaming/constants.ts @@ -30,6 +30,7 @@ export const MY_SEGMENTS_UPDATE_V2 = 'MY_SEGMENTS_UPDATE_V2'; export const SEGMENT_UPDATE = 'SEGMENT_UPDATE'; export const SPLIT_KILL = 'SPLIT_KILL'; export const SPLIT_UPDATE = 'SPLIT_UPDATE'; +export const MY_LARGE_SEGMENTS_UPDATE = 'MY_LARGE_SEGMENTS_UPDATE'; // Control-type push notifications, handled by NotificationKeeper export const CONTROL = 'CONTROL'; diff --git a/src/sync/streaming/pushManager.ts b/src/sync/streaming/pushManager.ts index db8d4dbe..21ed7d0d 100644 --- a/src/sync/streaming/pushManager.ts +++ b/src/sync/streaming/pushManager.ts @@ -11,14 +11,14 @@ import { authenticateFactory, hashUserKey } from './AuthClient'; import { forOwn } from '../../utils/lang'; import { SSEClient } from './SSEClient'; import { getMatching } from '../../utils/key'; -import { MY_SEGMENTS_UPDATE, MY_SEGMENTS_UPDATE_V2, PUSH_NONRETRYABLE_ERROR, PUSH_SUBSYSTEM_DOWN, SECONDS_BEFORE_EXPIRATION, SEGMENT_UPDATE, SPLIT_KILL, SPLIT_UPDATE, PUSH_RETRYABLE_ERROR, PUSH_SUBSYSTEM_UP, ControlType } from './constants'; +import { MY_SEGMENTS_UPDATE, MY_SEGMENTS_UPDATE_V2, PUSH_NONRETRYABLE_ERROR, PUSH_SUBSYSTEM_DOWN, SECONDS_BEFORE_EXPIRATION, SEGMENT_UPDATE, SPLIT_KILL, SPLIT_UPDATE, PUSH_RETRYABLE_ERROR, PUSH_SUBSYSTEM_UP, ControlType, MY_LARGE_SEGMENTS_UPDATE } from './constants'; import { STREAMING_FALLBACK, STREAMING_REFRESH_TOKEN, STREAMING_CONNECTING, STREAMING_DISABLED, ERROR_STREAMING_AUTH, STREAMING_DISCONNECTING, STREAMING_RECONNECT, STREAMING_PARSING_MY_SEGMENTS_UPDATE_V2, STREAMING_PARSING_SPLIT_UPDATE } from '../../logger/constants'; -import { KeyList, UpdateStrategy } from './SSEHandler/types'; +import { IMyLargeSegmentsUpdateData, IMySegmentsUpdateV2Data, KeyList, UpdateStrategy } from './SSEHandler/types'; import { isInBitmap, parseBitmap, parseFFUpdatePayload, parseKeyList } from './parseUtils'; import { ISet, _Set } from '../../utils/lang/sets'; import { Hash64, hash64 } from '../../utils/murmur3/murmur3_64'; import { IAuthTokenPushEnabled } from './AuthClient/types'; -import { TOKEN_REFRESH, AUTH_REJECTION } from '../../utils/constants'; +import { TOKEN_REFRESH, AUTH_REJECTION, MY_LARGE_SEGMENT, MY_SEGMENT } from '../../utils/constants'; import { ISdkFactoryContextSync } from '../../sdkFactory/types'; import { IUpdateWorker } from './UpdateWorkers/types'; @@ -64,7 +64,7 @@ export function pushManagerFactory( const userKeyHashes: Record = {}; // [Only for client-side] map of user keys to their corresponding hash64 and MySegmentsUpdateWorkers. // Hash64 is used to process MY_SEGMENTS_UPDATE_V2 events and dispatch actions to the corresponding MySegmentsUpdateWorker. - const clients: Record = {}; + const clients: Record = {}; // [Only for client-side] variable to flag that a new client was added. It is needed to reconnect streaming. let connectForNewClient = false; @@ -171,7 +171,10 @@ export function pushManagerFactory( // cancel scheduled fetch retries of Splits, Segments, and MySegments Update Workers function stopWorkers() { splitsUpdateWorker.stop(); - if (userKey) forOwn(clients, ({ worker }) => worker.stop()); + if (userKey) forOwn(clients, ({ worker, workerLarge }) => { + worker.stop(); + workerLarge && workerLarge.stop(); + }); else segmentsUpdateWorker!.stop(); } @@ -236,76 +239,96 @@ export function pushManagerFactory( splitsUpdateWorker.put(parsedData); }); - if (userKey) { - pushEmitter.on(MY_SEGMENTS_UPDATE, function handleMySegmentsUpdate(parsedData, channel) { - const userKeyHash = channel.split('_')[2]; - const userKey = userKeyHashes[userKeyHash]; - if (userKey && clients[userKey]) { // check existence since it can be undefined if client has been destroyed - clients[userKey].worker.put( - parsedData.changeNumber, - parsedData.includesPayload ? parsedData.segmentList ? parsedData.segmentList : [] : undefined); - } - }); - pushEmitter.on(MY_SEGMENTS_UPDATE_V2, function handleMySegmentsUpdate(parsedData) { - switch (parsedData.u) { - case UpdateStrategy.BoundedFetchRequest: { - let bitmap: Uint8Array; - try { - bitmap = parseBitmap(parsedData.d, parsedData.c); - } catch (e) { - log.warn(STREAMING_PARSING_MY_SEGMENTS_UPDATE_V2, ['BoundedFetchRequest', e]); - break; - } - - forOwn(clients, ({ hash64, worker }) => { - if (isInBitmap(bitmap, hash64.hex)) { - worker.put(parsedData.changeNumber); // fetch mySegments - } - }); - return; + function handleMySegmentsUpdate(parsedData: IMySegmentsUpdateV2Data | IMyLargeSegmentsUpdateData) { + const isLS = parsedData.type === MY_LARGE_SEGMENTS_UPDATE; + + switch (parsedData.u) { + case UpdateStrategy.BoundedFetchRequest: { + let bitmap: Uint8Array; + try { + bitmap = parseBitmap(parsedData.d, parsedData.c); + } catch (e) { + log.warn(STREAMING_PARSING_MY_SEGMENTS_UPDATE_V2, ['BoundedFetchRequest', e]); + break; } - case UpdateStrategy.KeyList: { - let keyList: KeyList, added: ISet, removed: ISet; - try { - keyList = parseKeyList(parsedData.d, parsedData.c); - added = new _Set(keyList.a); - removed = new _Set(keyList.r); - } catch (e) { - log.warn(STREAMING_PARSING_MY_SEGMENTS_UPDATE_V2, ['KeyList', e]); - break; + + forOwn(clients, ({ hash64, worker, workerLarge }) => { + if (isInBitmap(bitmap, hash64.hex)) { + isLS ? + workerLarge && workerLarge.put(parsedData.changeNumber) : + worker.put(parsedData.changeNumber); } + }); + return; + } + case UpdateStrategy.KeyList: { + let keyList: KeyList, added: ISet, removed: ISet; + try { + keyList = parseKeyList(parsedData.d, parsedData.c); + added = new _Set(keyList.a); + removed = new _Set(keyList.r); + } catch (e) { + log.warn(STREAMING_PARSING_MY_SEGMENTS_UPDATE_V2, ['KeyList', e]); + break; + } - forOwn(clients, ({ hash64, worker }) => { - const add = added.has(hash64.dec) ? true : removed.has(hash64.dec) ? false : undefined; - if (add !== undefined) { + forOwn(clients, ({ hash64, worker, workerLarge }) => { + const add = added.has(hash64.dec) ? true : removed.has(hash64.dec) ? false : undefined; + if (add !== undefined) { + isLS ? + workerLarge && workerLarge.put(parsedData.changeNumber, { + name: parsedData.largeSegment, + add + }) : worker.put(parsedData.changeNumber, { name: parsedData.segmentName, add }); - } - }); - return; - } - case UpdateStrategy.SegmentRemoval: - if (!parsedData.segmentName) { - log.warn(STREAMING_PARSING_MY_SEGMENTS_UPDATE_V2, ['SegmentRemoval', 'No segment name was provided']); - break; } + }); + return; + } + case UpdateStrategy.SegmentRemoval: + if (!(parsedData as IMySegmentsUpdateV2Data).segmentName && !(parsedData as IMyLargeSegmentsUpdateData).largeSegment) { + log.warn(STREAMING_PARSING_MY_SEGMENTS_UPDATE_V2, ['SegmentRemoval', 'No segment name was provided']); + break; + } - forOwn(clients, ({ worker }) => + forOwn(clients, ({ worker, workerLarge }) => { + isLS ? + workerLarge && workerLarge.put(parsedData.changeNumber, { + name: parsedData.largeSegment, + add: false + }) : worker.put(parsedData.changeNumber, { name: parsedData.segmentName, add: false - }) - ); - return; - } + }); + }); + return; + } - // `UpdateStrategy.UnboundedFetchRequest` and fallbacks of other cases - forOwn(clients, ({ worker }) => { + // `UpdateStrategy.UnboundedFetchRequest` and fallbacks of other cases + forOwn(clients, ({ worker, workerLarge }) => { + isLS ? + workerLarge && workerLarge.put(parsedData.changeNumber) : worker.put(parsedData.changeNumber); - }); }); + } + + if (userKey) { + pushEmitter.on(MY_SEGMENTS_UPDATE, function handleMySegmentsUpdate(parsedData, channel) { + const userKeyHash = channel.split('_')[2]; + const userKey = userKeyHashes[userKeyHash]; + if (userKey && clients[userKey]) { // check existence since it can be undefined if client has been destroyed + clients[userKey].worker.put( + parsedData.changeNumber, + parsedData.includesPayload ? parsedData.segmentList ? parsedData.segmentList : [] : undefined); + } + }); + + pushEmitter.on(MY_SEGMENTS_UPDATE_V2, handleMySegmentsUpdate); + pushEmitter.on(MY_LARGE_SEGMENTS_UPDATE, handleMySegmentsUpdate); } else { pushEmitter.on(SEGMENT_UPDATE, segmentsUpdateWorker!.put); } @@ -328,7 +351,7 @@ export function pushManagerFactory( if (disabled || disconnected === false) return; disconnected = false; - if (userKey) this.add(userKey, pollingManager.segmentsSyncTask as IMySegmentsSyncTask); // client-side + if (userKey) this.add(userKey, pollingManager.segmentsSyncTask, pollingManager.largeSegmentsSyncTask!); // client-side else setTimeout(connectPush); // server-side runs in next cycle as in client-side, for consistency with client-side }, @@ -338,12 +361,16 @@ export function pushManagerFactory( }, // [Only for client-side] - add(userKey: string, mySegmentsSyncTask: IMySegmentsSyncTask) { + add(userKey: string, mySegmentsSyncTask: IMySegmentsSyncTask, myLargeSegmentsSyncTask?: IMySegmentsSyncTask) { const hash = hashUserKey(userKey); if (!userKeyHashes[hash]) { userKeyHashes[hash] = userKey; - clients[userKey] = { hash64: hash64(userKey), worker: MySegmentsUpdateWorker(mySegmentsSyncTask, telemetryTracker) }; + clients[userKey] = { + hash64: hash64(userKey), + worker: MySegmentsUpdateWorker(mySegmentsSyncTask, telemetryTracker, MY_SEGMENT), + workerLarge: myLargeSegmentsSyncTask ? MySegmentsUpdateWorker(myLargeSegmentsSyncTask, telemetryTracker, MY_LARGE_SEGMENT) : undefined + }; connectForNewClient = true; // we must reconnect on start, to listen the channel for the new user key // Reconnects in case of a new client. diff --git a/src/sync/streaming/types.ts b/src/sync/streaming/types.ts index 715220f1..607ab67d 100644 --- a/src/sync/streaming/types.ts +++ b/src/sync/streaming/types.ts @@ -1,4 +1,4 @@ -import { IMySegmentsUpdateData, IMySegmentsUpdateV2Data, ISegmentUpdateData, ISplitUpdateData, ISplitKillData } from './SSEHandler/types'; +import { IMySegmentsUpdateData, IMySegmentsUpdateV2Data, ISegmentUpdateData, ISplitUpdateData, ISplitKillData, IMyLargeSegmentsUpdateData } from './SSEHandler/types'; import { ITask } from '../types'; import { IMySegmentsSyncTask } from '../polling/types'; import { IEventEmitter } from '../../types'; @@ -16,16 +16,18 @@ export type MY_SEGMENTS_UPDATE_V2 = 'MY_SEGMENTS_UPDATE_V2'; export type SEGMENT_UPDATE = 'SEGMENT_UPDATE'; export type SPLIT_KILL = 'SPLIT_KILL'; export type SPLIT_UPDATE = 'SPLIT_UPDATE'; +export type MY_LARGE_SEGMENTS_UPDATE = 'MY_LARGE_SEGMENTS_UPDATE'; // Control-type push notifications, handled by NotificationKeeper export type CONTROL = 'CONTROL'; export type OCCUPANCY = 'OCCUPANCY'; -export type IPushEvent = PUSH_SUBSYSTEM_UP | PUSH_SUBSYSTEM_DOWN | PUSH_NONRETRYABLE_ERROR | PUSH_RETRYABLE_ERROR | MY_SEGMENTS_UPDATE | MY_SEGMENTS_UPDATE_V2 | SEGMENT_UPDATE | SPLIT_UPDATE | SPLIT_KILL | ControlType.STREAMING_RESET +export type IPushEvent = PUSH_SUBSYSTEM_UP | PUSH_SUBSYSTEM_DOWN | PUSH_NONRETRYABLE_ERROR | PUSH_RETRYABLE_ERROR | MY_SEGMENTS_UPDATE | MY_SEGMENTS_UPDATE_V2 | SEGMENT_UPDATE | SPLIT_UPDATE | SPLIT_KILL | MY_LARGE_SEGMENTS_UPDATE | ControlType.STREAMING_RESET type IParsedData = T extends MY_SEGMENTS_UPDATE ? IMySegmentsUpdateData : T extends MY_SEGMENTS_UPDATE_V2 ? IMySegmentsUpdateV2Data : + T extends MY_LARGE_SEGMENTS_UPDATE ? IMyLargeSegmentsUpdateData : T extends SEGMENT_UPDATE ? ISegmentUpdateData : T extends SPLIT_UPDATE ? ISplitUpdateData : T extends SPLIT_KILL ? ISplitKillData : undefined; @@ -45,6 +47,6 @@ export interface IPushEventEmitter extends IEventEmitter { */ export interface IPushManager extends ITask, IPushEventEmitter { // Methods used in client-side, to support multiple clients - add(userKey: string, mySegmentsSyncTask: IMySegmentsSyncTask): void, + add(userKey: string, mySegmentsSyncTask: IMySegmentsSyncTask, myLargeSegmentsSyncTask?: IMySegmentsSyncTask): void, remove(userKey: string): void } diff --git a/src/sync/submitters/types.ts b/src/sync/submitters/types.ts index 210cf603..ca6317ad 100644 --- a/src/sync/submitters/types.ts +++ b/src/sync/submitters/types.ts @@ -103,7 +103,7 @@ export type DROPPED = 1; export type DEDUPED = 2; export type ImpressionDataType = QUEUED | DROPPED | DEDUPED export type EventDataType = QUEUED | DROPPED; -export type UpdatesFromSSEEnum = SPLITS | MY_SEGMENT; +export type UpdatesFromSSEEnum = SPLITS | MY_SEGMENT | MY_LARGE_SEGMENT; export type SPLITS = 'sp'; export type IMPRESSIONS = 'im'; @@ -161,6 +161,7 @@ export type TelemetryUsageStats = { export type UpdatesFromSSE = { sp: number, // splits ms?: number, // my segments + mls?: number // my large segments } // 'metrics/usage' JSON request body @@ -181,7 +182,7 @@ export type TelemetryUsageStatsPayload = TelemetryUsageStats & { eD: number, // eventsDropped sE: Array, // streamingEvents t?: Array, // tags - ufs?: UpdatesFromSSE, //UpdatesFromSSE + ufs?: UpdatesFromSSE, // instant updates } /** diff --git a/src/sync/syncManagerOnline.ts b/src/sync/syncManagerOnline.ts index d399e699..5a2d9f50 100644 --- a/src/sync/syncManagerOnline.ts +++ b/src/sync/syncManagerOnline.ts @@ -7,7 +7,7 @@ import { IPollingManager, IPollingManagerCS } from './polling/types'; import { PUSH_SUBSYSTEM_UP, PUSH_SUBSYSTEM_DOWN } from './streaming/constants'; import { SYNC_START_POLLING, SYNC_CONTINUE_POLLING, SYNC_STOP_POLLING } from '../logger/constants'; import { isConsentGranted } from '../consent'; -import { IN_SEGMENT, POLLING, STREAMING, SYNC_MODE_UPDATE } from '../utils/constants'; +import { IN_LARGE_SEGMENT, IN_SEGMENT, POLLING, STREAMING, SYNC_MODE_UPDATE } from '../utils/constants'; import { ISdkFactoryContextSync } from '../sdkFactory/types'; /** @@ -141,36 +141,44 @@ export function syncManagerOnlineFactory( shared(matchingKey: string, readinessManager: IReadinessManager, storage: IStorageSync) { if (!pollingManager) return; - const mySegmentsSyncTask = (pollingManager as IPollingManagerCS).add(matchingKey, readinessManager, storage); + const { msSyncTask, mlsSyncTask } = (pollingManager as IPollingManagerCS).add(matchingKey, readinessManager, storage); return { - isRunning: mySegmentsSyncTask.isRunning, + isRunning: msSyncTask.isRunning, start() { if (syncEnabled) { if (pushManager) { if (pollingManager!.isRunning()) { // if doing polling, we must start the periodic fetch of data - if (storage.splits.usesMatcher(IN_SEGMENT)) mySegmentsSyncTask.start(); + if (storage.splits.usesMatcher(IN_SEGMENT)) msSyncTask.start(); + if (mlsSyncTask && storage.splits.usesMatcher(IN_LARGE_SEGMENT)) mlsSyncTask.start(); } else { // if not polling, we must execute the sync task for the initial fetch // of segments since `syncAll` was already executed when starting the main client - mySegmentsSyncTask.execute(); + msSyncTask.execute(); + mlsSyncTask && mlsSyncTask.execute(); } - pushManager.add(matchingKey, mySegmentsSyncTask); + pushManager.add(matchingKey, msSyncTask, mlsSyncTask); } else { - if (storage.splits.usesMatcher(IN_SEGMENT)) mySegmentsSyncTask.start(); + if (storage.splits.usesMatcher(IN_SEGMENT)) msSyncTask.start(); + if (mlsSyncTask && storage.splits.usesMatcher(IN_LARGE_SEGMENT)) mlsSyncTask.start(); } } else { - if (!readinessManager.isReady()) mySegmentsSyncTask.execute(); + if (!readinessManager.isReady()) { + msSyncTask.execute(); + mlsSyncTask && mlsSyncTask.execute(); + } } }, stop() { // check in case `client.destroy()` has been invoked more than once for the same client - const mySegmentsSyncTask = (pollingManager as IPollingManagerCS).get(matchingKey); - if (mySegmentsSyncTask) { + const syncTasks = (pollingManager as IPollingManagerCS).get(matchingKey); + if (syncTasks) { + const { msSyncTask, mlsSyncTask } = syncTasks; // stop syncing if (pushManager) pushManager.remove(matchingKey); - if (mySegmentsSyncTask.isRunning()) mySegmentsSyncTask.stop(); + if (msSyncTask.isRunning()) msSyncTask.stop(); + if (mlsSyncTask && mlsSyncTask.isRunning()) mlsSyncTask.stop(); (pollingManager as IPollingManagerCS).remove(matchingKey); } From 6bb6b57c87845df093230d3c21e623085d19f09f Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Mon, 15 Jul 2024 17:39:03 -0300 Subject: [PATCH 016/146] Update TelemetryCache to handle new notification updates --- src/storages/inMemory/TelemetryCacheInMemory.ts | 14 ++++---------- .../__tests__/TelemetryCacheInMemory.spec.ts | 12 ++++++------ .../__tests__/telemetrySubmitter.spec.ts | 2 +- src/sync/submitters/types.ts | 2 +- 4 files changed, 12 insertions(+), 18 deletions(-) diff --git a/src/storages/inMemory/TelemetryCacheInMemory.ts b/src/storages/inMemory/TelemetryCacheInMemory.ts index 26fb2b17..3e3979b6 100644 --- a/src/storages/inMemory/TelemetryCacheInMemory.ts +++ b/src/storages/inMemory/TelemetryCacheInMemory.ts @@ -1,4 +1,4 @@ -import { ImpressionDataType, EventDataType, LastSync, HttpErrors, HttpLatencies, StreamingEvent, Method, OperationType, MethodExceptions, MethodLatencies, TelemetryUsageStatsPayload, UpdatesFromSSEEnum } from '../../sync/submitters/types'; +import { ImpressionDataType, EventDataType, LastSync, HttpErrors, HttpLatencies, StreamingEvent, Method, OperationType, MethodExceptions, MethodLatencies, TelemetryUsageStatsPayload, UpdatesFromSSEEnum, UpdatesFromSSE } from '../../sync/submitters/types'; import { DEDUPED, DROPPED, LOCALHOST_MODE, QUEUED } from '../../utils/constants'; import { findLatencyIndex } from '../findLatencyIndex'; import { ISegmentsCacheSync, ISplitsCacheSync, IStorageFactoryParams, ITelemetryCacheSync } from '../types'; @@ -245,22 +245,16 @@ export class TelemetryCacheInMemory implements ITelemetryCacheSync { this.e = false; } - private updatesFromSSE = { - sp: 0, - ms: 0 - }; + private updatesFromSSE: UpdatesFromSSE = {}; popUpdatesFromSSE() { const result = this.updatesFromSSE; - this.updatesFromSSE = { - sp: 0, - ms: 0, - }; + this.updatesFromSSE = {}; return result; } recordUpdatesFromSSE(type: UpdatesFromSSEEnum) { - this.updatesFromSSE[type]++; + this.updatesFromSSE[type] = (this.updatesFromSSE[type] || 0) + 1; this.e = false; } diff --git a/src/storages/inMemory/__tests__/TelemetryCacheInMemory.spec.ts b/src/storages/inMemory/__tests__/TelemetryCacheInMemory.spec.ts index 8bed17b7..4ed3eb9c 100644 --- a/src/storages/inMemory/__tests__/TelemetryCacheInMemory.spec.ts +++ b/src/storages/inMemory/__tests__/TelemetryCacheInMemory.spec.ts @@ -211,7 +211,7 @@ describe('TELEMETRY CACHE', () => { test('"isEmpty" and "pop" methods', () => { const cache = new TelemetryCacheInMemory(); const expectedEmptyPayload = { - lS: {}, mL: {}, mE: {}, hE: {}, hL: {}, tR: 0, aR: 0, iQ: 0, iDe: 0, iDr: 0, spC: undefined, seC: undefined, skC: undefined, eQ: 0, eD: 0, sE: [], t: [], ufs:{ sp: 0, ms: 0 } + lS: {}, mL: {}, mE: {}, hE: {}, hL: {}, tR: 0, aR: 0, iQ: 0, iDe: 0, iDr: 0, spC: undefined, seC: undefined, skC: undefined, eQ: 0, eD: 0, sE: [], t: [], ufs: {} }; // Initially, the cache is empty @@ -228,20 +228,20 @@ describe('TELEMETRY CACHE', () => { }); test('updates from SSE', () => { - expect(cache.popUpdatesFromSSE()).toEqual({sp: 0, ms: 0}); + expect(cache.popUpdatesFromSSE()).toEqual({}); cache.recordUpdatesFromSSE(SPLITS); cache.recordUpdatesFromSSE(SPLITS); cache.recordUpdatesFromSSE(SPLITS); cache.recordUpdatesFromSSE(MY_SEGMENT); cache.recordUpdatesFromSSE(MY_SEGMENT); - expect(cache.popUpdatesFromSSE()).toEqual({sp: 3, ms: 2}); - expect(cache.popUpdatesFromSSE()).toEqual({sp: 0, ms: 0}); + expect(cache.popUpdatesFromSSE()).toEqual({ sp: 3, ms: 2 }); + expect(cache.popUpdatesFromSSE()).toEqual({}); cache.recordUpdatesFromSSE(SPLITS); cache.recordUpdatesFromSSE(MY_SEGMENT); cache.recordUpdatesFromSSE(SPLITS); cache.recordUpdatesFromSSE(MY_SEGMENT); - expect(cache.popUpdatesFromSSE()).toEqual({sp: 2, ms: 2}); - expect(cache.popUpdatesFromSSE()).toEqual({sp: 0, ms: 0}); + expect(cache.popUpdatesFromSSE()).toEqual({ sp: 2, ms: 2 }); + expect(cache.popUpdatesFromSSE()).toEqual({}); }); }); diff --git a/src/sync/submitters/__tests__/telemetrySubmitter.spec.ts b/src/sync/submitters/__tests__/telemetrySubmitter.spec.ts index 000dc0a7..ed11ffbb 100644 --- a/src/sync/submitters/__tests__/telemetrySubmitter.spec.ts +++ b/src/sync/submitters/__tests__/telemetrySubmitter.spec.ts @@ -48,7 +48,7 @@ describe('Telemetry submitter', () => { expect(isEmptySpy).toBeCalledTimes(1); expect(popSpy).toBeCalledTimes(1); expect(postMetricsUsage).toBeCalledWith(JSON.stringify({ - lS: {}, mL: {}, mE: {}, hE: {}, hL: {}, tR: 0, aR: 0, iQ: 0, iDe: 0, iDr: 0, spC: 0, seC: 0, skC: 0, eQ: 0, eD: 0, sE: [], t: ['tag1'], ufs:{ sp: 0, ms: 0 } + lS: {}, mL: {}, mE: {}, hE: {}, hL: {}, tR: 0, aR: 0, iQ: 0, iDe: 0, iDr: 0, spC: 0, seC: 0, skC: 0, eQ: 0, eD: 0, sE: [], t: ['tag1'], ufs: {} })); // Await second periodic execution diff --git a/src/sync/submitters/types.ts b/src/sync/submitters/types.ts index ca6317ad..802c5093 100644 --- a/src/sync/submitters/types.ts +++ b/src/sync/submitters/types.ts @@ -159,7 +159,7 @@ export type TelemetryUsageStats = { // amount of instant updates that we are doing by avoiding fetching to Split servers export type UpdatesFromSSE = { - sp: number, // splits + sp?: number, // splits ms?: number, // my segments mls?: number // my large segments } From af4d9f1962bd9ba5c6e5d9a3dcdb5a89db07e5a1 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Mon, 15 Jul 2024 17:40:19 -0300 Subject: [PATCH 017/146] Fix test --- src/sync/__tests__/syncManagerOnline.spec.ts | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/sync/__tests__/syncManagerOnline.spec.ts b/src/sync/__tests__/syncManagerOnline.spec.ts index 164c93eb..bb0bd72b 100644 --- a/src/sync/__tests__/syncManagerOnline.spec.ts +++ b/src/sync/__tests__/syncManagerOnline.spec.ts @@ -29,7 +29,9 @@ const pollingManagerMock = { start: jest.fn(), stop: jest.fn(), isRunning: jest.fn(), - add: jest.fn(()=>{return {isrunning: () => true};}), + add: jest.fn(() => ({ + msSyncTask: { isRunning: () => true } + })), get: jest.fn() }; From 8f7caabba572baa12f6f1535ec86670a74354647 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Mon, 15 Jul 2024 17:49:51 -0300 Subject: [PATCH 018/146] Add new fields to telemetry payloads --- src/storages/inMemory/TelemetryCacheInMemory.ts | 4 +++- src/sync/submitters/telemetrySubmitter.ts | 2 ++ src/sync/submitters/types.ts | 4 ++++ 3 files changed, 9 insertions(+), 1 deletion(-) diff --git a/src/storages/inMemory/TelemetryCacheInMemory.ts b/src/storages/inMemory/TelemetryCacheInMemory.ts index 3e3979b6..ddb57086 100644 --- a/src/storages/inMemory/TelemetryCacheInMemory.ts +++ b/src/storages/inMemory/TelemetryCacheInMemory.ts @@ -25,7 +25,7 @@ export function shouldRecordTelemetry({ settings }: IStorageFactoryParams) { export class TelemetryCacheInMemory implements ITelemetryCacheSync { - constructor(private splits?: ISplitsCacheSync, private segments?: ISegmentsCacheSync) { } + constructor(private splits?: ISplitsCacheSync, private segments?: ISegmentsCacheSync, private largeSegments?: ISegmentsCacheSync) { } // isEmpty flag private e = true; @@ -51,6 +51,8 @@ export class TelemetryCacheInMemory implements ITelemetryCacheSync { spC: this.splits && this.splits.getSplitNames().length, seC: this.segments && this.segments.getRegisteredSegments().length, skC: this.segments && this.segments.getKeysCount(), + lseC: this.largeSegments && this.largeSegments.getRegisteredSegments().length, + lskC: this.largeSegments && this.largeSegments.getKeysCount(), sL: this.getSessionLength(), eQ: this.getEventStats(QUEUED), eD: this.getEventStats(DROPPED), diff --git a/src/sync/submitters/telemetrySubmitter.ts b/src/sync/submitters/telemetrySubmitter.ts index a2289e08..5b5d09bb 100644 --- a/src/sync/submitters/telemetrySubmitter.ts +++ b/src/sync/submitters/telemetrySubmitter.ts @@ -76,10 +76,12 @@ export function telemetryCacheConfigAdapter(telemetry: ITelemetryCacheSync, sett return objectAssign(getTelemetryConfigStats(settings.mode, settings.storage.type), { sE: settings.streamingEnabled, + lE: isClientSide ? settings.sync.largeSegmentsEnabled : undefined, rR: { sp: scheduler.featuresRefreshRate / 1000, se: isClientSide ? undefined : scheduler.segmentsRefreshRate / 1000, ms: isClientSide ? scheduler.segmentsRefreshRate / 1000 : undefined, + mls: isClientSide && settings.sync.largeSegmentsEnabled ? scheduler.largeSegmentsRefreshRate / 1000 : undefined, im: scheduler.impressionsRefreshRate / 1000, ev: scheduler.eventsPushRate / 1000, te: scheduler.telemetryRefreshRate / 1000, diff --git a/src/sync/submitters/types.ts b/src/sync/submitters/types.ts index 802c5093..8f63f76f 100644 --- a/src/sync/submitters/types.ts +++ b/src/sync/submitters/types.ts @@ -177,6 +177,8 @@ export type TelemetryUsageStatsPayload = TelemetryUsageStats & { spC?: number, // splitCount seC?: number, // segmentCount skC?: number, // segmentKeyCount + lseC?: number, // largeSegmentCount + lskC?: number, // largeSegmentKeyCount sL?: number, // sessionLengthMs eQ: number, // eventsQueued eD: number, // eventsDropped @@ -203,6 +205,7 @@ export type RefreshRates = { sp: number, // splits se?: number, // segments ms?: number, // mySegments + mls?: number, // myLargeSegments im: number, // impressions ev: number, // events te: number, // telemetry @@ -228,6 +231,7 @@ export type TelemetryConfigStats = { // 'metrics/config' JSON request body export type TelemetryConfigStatsPayload = TelemetryConfigStats & { sE: boolean, // streamingEnabled + lE?: boolean, // largeSegmentsEnabled rR: RefreshRates, // refreshRates uO: UrlOverrides, // urlOverrides iQ: number, // impressionsQueueSize From a18ff4a43ebf83432bf39cf26049ada41f2916a7 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Mon, 15 Jul 2024 18:27:14 -0300 Subject: [PATCH 019/146] Handle largeSegments list in MY_LARGE_SEGMENTS_UPDATE notifications --- src/sync/streaming/SSEHandler/types.ts | 2 +- src/sync/streaming/pushManager.ts | 12 +++++++----- 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/src/sync/streaming/SSEHandler/types.ts b/src/sync/streaming/SSEHandler/types.ts index dc1b4257..19fb54e4 100644 --- a/src/sync/streaming/SSEHandler/types.ts +++ b/src/sync/streaming/SSEHandler/types.ts @@ -38,7 +38,7 @@ export interface IMySegmentsUpdateV2Data { export interface IMyLargeSegmentsUpdateData { type: MY_LARGE_SEGMENTS_UPDATE, changeNumber: number, - largeSegment: string, + largeSegments: string[], c: Compression, d: string, u: UpdateStrategy, diff --git a/src/sync/streaming/pushManager.ts b/src/sync/streaming/pushManager.ts index 21ed7d0d..dce2cfb9 100644 --- a/src/sync/streaming/pushManager.ts +++ b/src/sync/streaming/pushManager.ts @@ -277,7 +277,7 @@ export function pushManagerFactory( if (add !== undefined) { isLS ? workerLarge && workerLarge.put(parsedData.changeNumber, { - name: parsedData.largeSegment, + name: parsedData.largeSegments[0], add }) : worker.put(parsedData.changeNumber, { @@ -289,16 +289,18 @@ export function pushManagerFactory( return; } case UpdateStrategy.SegmentRemoval: - if (!(parsedData as IMySegmentsUpdateV2Data).segmentName && !(parsedData as IMyLargeSegmentsUpdateData).largeSegment) { + if ((isLS && parsedData.largeSegments.length === 0) || (!isLS && !parsedData.segmentName)) { log.warn(STREAMING_PARSING_MY_SEGMENTS_UPDATE_V2, ['SegmentRemoval', 'No segment name was provided']); break; } forOwn(clients, ({ worker, workerLarge }) => { isLS ? - workerLarge && workerLarge.put(parsedData.changeNumber, { - name: parsedData.largeSegment, - add: false + workerLarge && parsedData.largeSegments.forEach(largeSegment => { + workerLarge.put(parsedData.changeNumber, { + name: largeSegment, + add: false + }); }) : worker.put(parsedData.changeNumber, { name: parsedData.segmentName, From cc926589f1ecfa0fcae215363a8b6df39352550b Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Wed, 17 Jul 2024 11:07:59 -0300 Subject: [PATCH 020/146] Update SyncManagerOnline and fix test --- src/sync/__tests__/syncManagerOnline.spec.ts | 4 ++- src/sync/syncManagerOnline.ts | 30 +++++++++++++------- 2 files changed, 22 insertions(+), 12 deletions(-) diff --git a/src/sync/__tests__/syncManagerOnline.spec.ts b/src/sync/__tests__/syncManagerOnline.spec.ts index 164c93eb..bb0bd72b 100644 --- a/src/sync/__tests__/syncManagerOnline.spec.ts +++ b/src/sync/__tests__/syncManagerOnline.spec.ts @@ -29,7 +29,9 @@ const pollingManagerMock = { start: jest.fn(), stop: jest.fn(), isRunning: jest.fn(), - add: jest.fn(()=>{return {isrunning: () => true};}), + add: jest.fn(() => ({ + msSyncTask: { isRunning: () => true } + })), get: jest.fn() }; diff --git a/src/sync/syncManagerOnline.ts b/src/sync/syncManagerOnline.ts index d399e699..0feb53da 100644 --- a/src/sync/syncManagerOnline.ts +++ b/src/sync/syncManagerOnline.ts @@ -7,7 +7,7 @@ import { IPollingManager, IPollingManagerCS } from './polling/types'; import { PUSH_SUBSYSTEM_UP, PUSH_SUBSYSTEM_DOWN } from './streaming/constants'; import { SYNC_START_POLLING, SYNC_CONTINUE_POLLING, SYNC_STOP_POLLING } from '../logger/constants'; import { isConsentGranted } from '../consent'; -import { IN_SEGMENT, POLLING, STREAMING, SYNC_MODE_UPDATE } from '../utils/constants'; +import { IN_LARGE_SEGMENT, IN_SEGMENT, POLLING, STREAMING, SYNC_MODE_UPDATE } from '../utils/constants'; import { ISdkFactoryContextSync } from '../sdkFactory/types'; /** @@ -141,36 +141,44 @@ export function syncManagerOnlineFactory( shared(matchingKey: string, readinessManager: IReadinessManager, storage: IStorageSync) { if (!pollingManager) return; - const mySegmentsSyncTask = (pollingManager as IPollingManagerCS).add(matchingKey, readinessManager, storage); + const { msSyncTask, mlsSyncTask } = (pollingManager as IPollingManagerCS).add(matchingKey, readinessManager, storage); return { - isRunning: mySegmentsSyncTask.isRunning, + isRunning: msSyncTask.isRunning, start() { if (syncEnabled) { if (pushManager) { if (pollingManager!.isRunning()) { // if doing polling, we must start the periodic fetch of data - if (storage.splits.usesMatcher(IN_SEGMENT)) mySegmentsSyncTask.start(); + if (storage.splits.usesMatcher(IN_SEGMENT)) msSyncTask.start(); + if (mlsSyncTask && storage.splits.usesMatcher(IN_LARGE_SEGMENT)) mlsSyncTask.start(); } else { // if not polling, we must execute the sync task for the initial fetch // of segments since `syncAll` was already executed when starting the main client - mySegmentsSyncTask.execute(); + msSyncTask.execute(); + mlsSyncTask && mlsSyncTask.execute(); } - pushManager.add(matchingKey, mySegmentsSyncTask); + pushManager.add(matchingKey, msSyncTask); } else { - if (storage.splits.usesMatcher(IN_SEGMENT)) mySegmentsSyncTask.start(); + if (storage.splits.usesMatcher(IN_SEGMENT)) msSyncTask.start(); + if (mlsSyncTask && storage.splits.usesMatcher(IN_LARGE_SEGMENT)) mlsSyncTask.start(); } } else { - if (!readinessManager.isReady()) mySegmentsSyncTask.execute(); + if (!readinessManager.isReady()) { + msSyncTask.execute(); + mlsSyncTask && mlsSyncTask.execute(); + } } }, stop() { // check in case `client.destroy()` has been invoked more than once for the same client - const mySegmentsSyncTask = (pollingManager as IPollingManagerCS).get(matchingKey); - if (mySegmentsSyncTask) { + const syncTasks = (pollingManager as IPollingManagerCS).get(matchingKey); + if (syncTasks) { + const { msSyncTask, mlsSyncTask } = syncTasks; // stop syncing if (pushManager) pushManager.remove(matchingKey); - if (mySegmentsSyncTask.isRunning()) mySegmentsSyncTask.stop(); + if (msSyncTask.isRunning()) msSyncTask.stop(); + if (mlsSyncTask && mlsSyncTask.isRunning()) mlsSyncTask.stop(); (pollingManager as IPollingManagerCS).remove(matchingKey); } From a2c68dff36e8f3084c03c757b72ef8e67ad51bad Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Wed, 17 Jul 2024 11:51:51 -0300 Subject: [PATCH 021/146] Handle delays for myLargeSegments fetch --- src/sync/polling/types.ts | 2 +- src/sync/polling/updaters/mySegmentsUpdater.ts | 10 ++++++++-- .../UpdateWorkers/MySegmentsUpdateWorker.ts | 6 ++++-- .../__tests__/MySegmentsUpdateWorker.spec.ts | 10 +++++----- src/sync/streaming/pushManager.ts | 17 +++++++++++++---- 5 files changed, 31 insertions(+), 14 deletions(-) diff --git a/src/sync/polling/types.ts b/src/sync/polling/types.ts index d7187c8e..5b7991c2 100644 --- a/src/sync/polling/types.ts +++ b/src/sync/polling/types.ts @@ -14,7 +14,7 @@ export type MySegmentsData = string[] | { add: boolean } -export interface IMySegmentsSyncTask extends ISyncTask<[segmentsData?: MySegmentsData, noCache?: boolean], boolean> { } +export interface IMySegmentsSyncTask extends ISyncTask<[segmentsData?: MySegmentsData, noCache?: boolean, delay?: number], boolean> { } export interface IPollingManager extends ITask { syncAll(): Promise diff --git a/src/sync/polling/updaters/mySegmentsUpdater.ts b/src/sync/polling/updaters/mySegmentsUpdater.ts index a914c676..e409e71e 100644 --- a/src/sync/polling/updaters/mySegmentsUpdater.ts +++ b/src/sync/polling/updaters/mySegmentsUpdater.ts @@ -94,8 +94,14 @@ export function mySegmentsUpdaterFactory( * (3) or `undefined`, for which the updater will fetch mySegments in order to sync the storage. * @param {boolean | undefined} noCache true to revalidate data to fetch */ - return function mySegmentsUpdater(segmentsData?: MySegmentsData, noCache?: boolean) { - return _mySegmentsUpdater(0, segmentsData, noCache); + return function mySegmentsUpdater(segmentsData?: MySegmentsData, noCache?: boolean, delay?: number) { + return delay ? + new Promise(res => { + setTimeout(() => { + _mySegmentsUpdater(0, segmentsData, noCache).then(res); + }, delay); + }) : + _mySegmentsUpdater(0, segmentsData, noCache); }; } diff --git a/src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts b/src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts index c91d8bbe..2b9786b6 100644 --- a/src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts +++ b/src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts @@ -14,6 +14,7 @@ export function MySegmentsUpdateWorker(mySegmentsSyncTask: IMySegmentsSyncTask, let handleNewEvent = false; let isHandlingEvent: boolean; let _segmentsData: MySegmentsData | undefined; // keeps the segmentsData (if included in notification payload) from the queued event with maximum changeNumber + let _delay: undefined | number; const backoff = new Backoff(__handleMySegmentsUpdateCall); function __handleMySegmentsUpdateCall() { @@ -23,7 +24,7 @@ export function MySegmentsUpdateWorker(mySegmentsSyncTask: IMySegmentsSyncTask, const currentMaxChangeNumber = maxChangeNumber; // fetch mySegments revalidating data if cached - mySegmentsSyncTask.execute(_segmentsData, true).then((result) => { + mySegmentsSyncTask.execute(_segmentsData, true, _delay).then((result) => { if (!isHandlingEvent) return; // halt if `stop` has been called if (result !== false) {// Unlike `Splits|SegmentsUpdateWorker`, we cannot use `mySegmentsCache.getChangeNumber` since `/mySegments` endpoint doesn't provide this value. if (_segmentsData) telemetryTracker.trackUpdatesFromSSE(updateType); @@ -47,12 +48,13 @@ export function MySegmentsUpdateWorker(mySegmentsSyncTask: IMySegmentsSyncTask, * @param {number} changeNumber change number of the MY_SEGMENTS_UPDATE notification * @param {SegmentsData | undefined} segmentsData might be undefined */ - put(changeNumber: number, segmentsData?: MySegmentsData) { + put(changeNumber: number, segmentsData?: MySegmentsData, delay?: number) { if (changeNumber <= currentChangeNumber || changeNumber <= maxChangeNumber) return; maxChangeNumber = changeNumber; handleNewEvent = true; _segmentsData = segmentsData; + _delay = delay; if (backoff.timeoutID || !isHandlingEvent) __handleMySegmentsUpdateCall(); backoff.reset(); diff --git a/src/sync/streaming/UpdateWorkers/__tests__/MySegmentsUpdateWorker.spec.ts b/src/sync/streaming/UpdateWorkers/__tests__/MySegmentsUpdateWorker.spec.ts index 6726ed86..ccbaa01b 100644 --- a/src/sync/streaming/UpdateWorkers/__tests__/MySegmentsUpdateWorker.spec.ts +++ b/src/sync/streaming/UpdateWorkers/__tests__/MySegmentsUpdateWorker.spec.ts @@ -73,7 +73,7 @@ describe('MySegmentsUpdateWorker', () => { mySegmentsSyncTask.__resolveMySegmentsUpdaterCall(); // fetch success await new Promise(res => setTimeout(res, 10)); expect(mySegmentsSyncTask.execute).toBeCalledTimes(3); // doesn't synchronize MySegments while queue is empty - expect(mySegmentsSyncTask.execute.mock.calls).toEqual([[undefined, true], [undefined, true], [undefined, true]]); + expect(mySegmentsSyncTask.execute.mock.calls).toEqual([[undefined, true, undefined], [undefined, true, undefined], [undefined, true, undefined]]); // assert handling an event with segmentList after an event without segmentList, // to validate the special case than the fetch associated to the first event is resolved after a second event with payload arrives @@ -83,12 +83,12 @@ describe('MySegmentsUpdateWorker', () => { expect(mySegmentsSyncTask.isExecuting()).toBe(true); mySegmentUpdateWorker.put(120, ['some_segment']); expect(mySegmentsSyncTask.execute).toBeCalledTimes(1); // doesn't synchronize MySegments if `isExecuting` is true, even if payload (segmentList) is included - expect(mySegmentsSyncTask.execute).toHaveBeenLastCalledWith(undefined, true); + expect(mySegmentsSyncTask.execute).toHaveBeenLastCalledWith(undefined, true, undefined); mySegmentsSyncTask.__resolveMySegmentsUpdaterCall(); // fetch success await new Promise(res => setTimeout(res, 10)); expect(mySegmentsSyncTask.execute).toBeCalledTimes(2); // re-synchronizes MySegments once previous event was handled - expect(mySegmentsSyncTask.execute).toHaveBeenLastCalledWith(['some_segment'], true); // synchronizes MySegments with given segmentList + expect(mySegmentsSyncTask.execute).toHaveBeenLastCalledWith(['some_segment'], true, undefined); // synchronizes MySegments with given segmentList mySegmentsSyncTask.__resolveMySegmentsUpdaterCall(); // fetch success await new Promise(res => setTimeout(res, 10)); @@ -97,12 +97,12 @@ describe('MySegmentsUpdateWorker', () => { mySegmentUpdateWorker.put(130, ['other_segment']); mySegmentUpdateWorker.put(140); expect(mySegmentsSyncTask.execute).toBeCalledTimes(1); // synchronizes MySegments once, until event is handled - expect(mySegmentsSyncTask.execute).toHaveBeenLastCalledWith(['other_segment'], true); + expect(mySegmentsSyncTask.execute).toHaveBeenLastCalledWith(['other_segment'], true, undefined); mySegmentsSyncTask.__resolveMySegmentsUpdaterCall(); // fetch success await new Promise(res => setTimeout(res)); expect(mySegmentsSyncTask.execute).toBeCalledTimes(2); // re-synchronizes MySegments once previous event was handled - expect(mySegmentsSyncTask.execute).toHaveBeenLastCalledWith(undefined, true); // synchronizes MySegments without segmentList if the event doesn't have payload + expect(mySegmentsSyncTask.execute).toHaveBeenLastCalledWith(undefined, true, undefined); // synchronizes MySegments without segmentList if the event doesn't have payload mySegmentsSyncTask.__resolveMySegmentsUpdaterCall(); // fetch success await new Promise(res => setTimeout(res, 20)); // Wait to assert no more calls with backoff to `execute` diff --git a/src/sync/streaming/pushManager.ts b/src/sync/streaming/pushManager.ts index dce2cfb9..6679df9f 100644 --- a/src/sync/streaming/pushManager.ts +++ b/src/sync/streaming/pushManager.ts @@ -16,12 +16,21 @@ import { STREAMING_FALLBACK, STREAMING_REFRESH_TOKEN, STREAMING_CONNECTING, STRE import { IMyLargeSegmentsUpdateData, IMySegmentsUpdateV2Data, KeyList, UpdateStrategy } from './SSEHandler/types'; import { isInBitmap, parseBitmap, parseFFUpdatePayload, parseKeyList } from './parseUtils'; import { ISet, _Set } from '../../utils/lang/sets'; +import { hash } from '../../utils/murmur3/murmur3'; import { Hash64, hash64 } from '../../utils/murmur3/murmur3_64'; import { IAuthTokenPushEnabled } from './AuthClient/types'; import { TOKEN_REFRESH, AUTH_REJECTION, MY_LARGE_SEGMENT, MY_SEGMENT } from '../../utils/constants'; import { ISdkFactoryContextSync } from '../../sdkFactory/types'; import { IUpdateWorker } from './UpdateWorkers/types'; +export function getDelay(parsedData: IMyLargeSegmentsUpdateData, matchingKey: string) { + const interval = parsedData.i || 60000; + // const hashType = parsedData.h || 0; + const seed = parsedData.s || 0; + + return hash(matchingKey, seed) % interval; +} + /** * PushManager factory: * - for server-side if key is not provided in settings. @@ -252,10 +261,10 @@ export function pushManagerFactory( break; } - forOwn(clients, ({ hash64, worker, workerLarge }) => { + forOwn(clients, ({ hash64, worker, workerLarge }, matchingKey) => { if (isInBitmap(bitmap, hash64.hex)) { isLS ? - workerLarge && workerLarge.put(parsedData.changeNumber) : + workerLarge && workerLarge.put(parsedData.changeNumber, undefined, getDelay(parsedData, matchingKey)) : worker.put(parsedData.changeNumber); } }); @@ -311,9 +320,9 @@ export function pushManagerFactory( } // `UpdateStrategy.UnboundedFetchRequest` and fallbacks of other cases - forOwn(clients, ({ worker, workerLarge }) => { + forOwn(clients, ({ worker, workerLarge }, matchingKey) => { isLS ? - workerLarge && workerLarge.put(parsedData.changeNumber) : + workerLarge && workerLarge.put(parsedData.changeNumber, undefined, getDelay(parsedData, matchingKey)) : worker.put(parsedData.changeNumber); }); } From 9434695dd8ebc3cd4e5b804145ada59c193b0756 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Wed, 17 Jul 2024 12:38:43 -0300 Subject: [PATCH 022/146] rc --- package-lock.json | 4 ++-- package.json | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/package-lock.json b/package-lock.json index fe94ba18..81ef37f9 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "@splitsoftware/splitio-commons", - "version": "1.16.1", + "version": "1.16.1-rc.0", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "@splitsoftware/splitio-commons", - "version": "1.16.1", + "version": "1.16.1-rc.0", "license": "Apache-2.0", "dependencies": { "tslib": "^2.3.1" diff --git a/package.json b/package.json index 471e6ae2..e22a46d9 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@splitsoftware/splitio-commons", - "version": "1.16.1", + "version": "1.16.1-rc.0", "description": "Split JavaScript SDK common components", "main": "cjs/index.js", "module": "esm/index.js", From 3765ec2dfaf36c2c7b9a844da09a5351b77f7a91 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Wed, 17 Jul 2024 13:53:30 -0300 Subject: [PATCH 023/146] Update comments --- src/sync/polling/pollingManagerCS.ts | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/src/sync/polling/pollingManagerCS.ts b/src/sync/polling/pollingManagerCS.ts index 890a2dd0..91ce27b2 100644 --- a/src/sync/polling/pollingManagerCS.ts +++ b/src/sync/polling/pollingManagerCS.ts @@ -35,11 +35,11 @@ export function pollingManagerCSFactory( forOwn(mySegmentsSyncTasks, ({ msSyncTask, mlsSyncTask }) => { if (splitsHaveSegments) msSyncTask.start(); - else msSyncTask.stop(); + else msSyncTask.stop(); // smart pausing if (mlsSyncTask) { if (splitsHaveLargeSegments) mlsSyncTask.start(); - else mlsSyncTask.stop(); + else mlsSyncTask.stop(); // smart pausing } }); } @@ -51,9 +51,7 @@ export function pollingManagerCSFactory( }); } - // smart pausing readiness.splits.on(SDK_SPLITS_ARRIVED, () => { - // smart pausing of mySegments polling if (splitsSyncTask.isRunning()) startMySegmentsSyncTasks(); }); From 9212f67a4eddf90fcdc3a4436d82af42c8725438 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Wed, 17 Jul 2024 15:24:40 -0300 Subject: [PATCH 024/146] Update MySegmentsFetcher to handle /myLargeSegments response payload --- src/dtos/types.ts | 16 ++++++++++------ src/sync/polling/fetchers/mySegmentsFetcher.ts | 8 ++++++-- 2 files changed, 16 insertions(+), 8 deletions(-) diff --git a/src/dtos/types.ts b/src/dtos/types.ts index 351598cf..dbe181e5 100644 --- a/src/dtos/types.ts +++ b/src/dtos/types.ts @@ -224,14 +224,18 @@ export interface ISegmentChangesResponse { till: number } -export interface IMySegmentsResponseItem { - id: string, - name: string -} - /** Interface of the parsed JSON response of `/mySegments/{userKey}` */ export interface IMySegmentsResponse { - mySegments: IMySegmentsResponseItem[] + mySegments: { + id: string, + name: string + }[] +} + +/** Interface of the parsed JSON response of `/myLargeSegments/{userKey}` */ +export interface IMyLargeSegmentsResponse { + myLargeSegments: string[], + changeNumber: number } /** Metadata internal type for storages */ diff --git a/src/sync/polling/fetchers/mySegmentsFetcher.ts b/src/sync/polling/fetchers/mySegmentsFetcher.ts index 498132b0..9b342c75 100644 --- a/src/sync/polling/fetchers/mySegmentsFetcher.ts +++ b/src/sync/polling/fetchers/mySegmentsFetcher.ts @@ -1,5 +1,5 @@ import { IFetchMySegments, IResponse } from '../../../services/types'; -import { IMySegmentsResponseItem } from '../../../dtos/types'; +import { IMySegmentsResponse, IMyLargeSegmentsResponse } from '../../../dtos/types'; import { IMySegmentsFetcher } from './types'; /** @@ -21,7 +21,11 @@ export function mySegmentsFetcherFactory(fetchMySegments: IFetchMySegments): IMy // Extract segment names return mySegmentsPromise .then(resp => resp.json()) - .then(json => json.mySegments.map((segment: IMySegmentsResponseItem) => segment.name)); + .then((json: IMySegmentsResponse | IMyLargeSegmentsResponse) => { + return (json as IMySegmentsResponse).mySegments ? + (json as IMySegmentsResponse).mySegments.map((segment) => segment.name) : + (json as IMyLargeSegmentsResponse).myLargeSegments; + }); }; } From 57fd39bcb1e7081913e2f81f718cf91e66ab2c2e Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Fri, 19 Jul 2024 15:45:17 -0300 Subject: [PATCH 025/146] Add unit tests and fix some typos --- ...MENTS_UPDATE.SEGMENT_REMOVAL.1457552653000.json | 4 ++++ ...GE_SEGMENTS_UPDATE.UNBOUNDED.1457552650000.json | 4 ++++ src/storages/pluggable/inMemoryWrapper.ts | 2 +- src/sync/__tests__/syncTask.spec.ts | 2 +- .../streaming/SSEHandler/__tests__/index.spec.ts | 14 +++++++++++++- src/sync/streaming/__tests__/pushManager.spec.ts | 4 ++-- .../submitters/__tests__/eventsSubmitter.spec.ts | 4 ++-- .../__tests__/telemetrySubmitter.spec.ts | 2 +- 8 files changed, 28 insertions(+), 8 deletions(-) create mode 100644 src/__tests__/mocks/message.MY_LARGE_SEGMENTS_UPDATE.SEGMENT_REMOVAL.1457552653000.json create mode 100644 src/__tests__/mocks/message.MY_LARGE_SEGMENTS_UPDATE.UNBOUNDED.1457552650000.json diff --git a/src/__tests__/mocks/message.MY_LARGE_SEGMENTS_UPDATE.SEGMENT_REMOVAL.1457552653000.json b/src/__tests__/mocks/message.MY_LARGE_SEGMENTS_UPDATE.SEGMENT_REMOVAL.1457552653000.json new file mode 100644 index 00000000..69e900dd --- /dev/null +++ b/src/__tests__/mocks/message.MY_LARGE_SEGMENTS_UPDATE.SEGMENT_REMOVAL.1457552653000.json @@ -0,0 +1,4 @@ +{ + "type": "message", + "data": "{\"data\":\"{\\\"type\\\":\\\"MY_LARGE_SEGMENTS_UPDATE\\\",\\\"changeNumber\\\":1457552653000,\\\"largeSegments\\\":[\\\"employees\\\"],\\\"c\\\": 0,\\\"u\\\": 3,\\\"d\\\":\\\"\\\"}\"}" +} \ No newline at end of file diff --git a/src/__tests__/mocks/message.MY_LARGE_SEGMENTS_UPDATE.UNBOUNDED.1457552650000.json b/src/__tests__/mocks/message.MY_LARGE_SEGMENTS_UPDATE.UNBOUNDED.1457552650000.json new file mode 100644 index 00000000..8563135d --- /dev/null +++ b/src/__tests__/mocks/message.MY_LARGE_SEGMENTS_UPDATE.UNBOUNDED.1457552650000.json @@ -0,0 +1,4 @@ +{ + "type": "message", + "data": "{\"data\":\"{\\\"type\\\":\\\"MY_LARGE_SEGMENTS_UPDATE\\\",\\\"changeNumber\\\":1457552650000,\\\"largeSegments\\\":[],\\\"c\\\": 0,\\\"u\\\": 0,\\\"d\\\":\\\"\\\",\\\"i\\\":300,\\\"h\\\":0,\\\"s\\\":0}\"}" +} \ No newline at end of file diff --git a/src/storages/pluggable/inMemoryWrapper.ts b/src/storages/pluggable/inMemoryWrapper.ts index c87c9a47..7d8a8837 100644 --- a/src/storages/pluggable/inMemoryWrapper.ts +++ b/src/storages/pluggable/inMemoryWrapper.ts @@ -7,7 +7,7 @@ import { ISet, setToArray, _Set } from '../../utils/lang/sets'; * The `_cache` property is the object were items are stored. * Intended for testing purposes. * - * @param connDelay delay in millis for `connect` resolve. If not provided, `connect` resolves inmediatelly. + * @param connDelay delay in millis for `connect` resolve. If not provided, `connect` resolves immediately. */ export function inMemoryWrapperFactory(connDelay?: number): IPluggableStorageWrapper & { _cache: Record>, _setConnDelay(connDelay: number): void } { diff --git a/src/sync/__tests__/syncTask.spec.ts b/src/sync/__tests__/syncTask.spec.ts index f2516e8a..5423d387 100644 --- a/src/sync/__tests__/syncTask.spec.ts +++ b/src/sync/__tests__/syncTask.spec.ts @@ -24,7 +24,7 @@ test('syncTaskFactory / start & stop methods for periodic execution', async () = // Calling `start` again has not effect expect(syncTask.start(...startArgs)).toBe(undefined); - // Calling `execute` inmediatelly executes the given task and returns its result + // Calling `execute` immediately executes the given task and returns its result result = await syncTask.execute(3, 4); expect(result).toBe(taskResult); expect(asyncTask).toHaveBeenLastCalledWith(3, 4); diff --git a/src/sync/streaming/SSEHandler/__tests__/index.spec.ts b/src/sync/streaming/SSEHandler/__tests__/index.spec.ts index 9651d232..5c0e7093 100644 --- a/src/sync/streaming/SSEHandler/__tests__/index.spec.ts +++ b/src/sync/streaming/SSEHandler/__tests__/index.spec.ts @@ -1,6 +1,6 @@ // @ts-nocheck import { SSEHandlerFactory } from '..'; -import { PUSH_SUBSYSTEM_UP, PUSH_NONRETRYABLE_ERROR, PUSH_SUBSYSTEM_DOWN, PUSH_RETRYABLE_ERROR, MY_SEGMENTS_UPDATE, SEGMENT_UPDATE, SPLIT_KILL, SPLIT_UPDATE, MY_SEGMENTS_UPDATE_V2, ControlType } from '../../constants'; +import { PUSH_SUBSYSTEM_UP, PUSH_NONRETRYABLE_ERROR, PUSH_SUBSYSTEM_DOWN, PUSH_RETRYABLE_ERROR, MY_SEGMENTS_UPDATE, SEGMENT_UPDATE, SPLIT_KILL, SPLIT_UPDATE, MY_SEGMENTS_UPDATE_V2, MY_LARGE_SEGMENTS_UPDATE, ControlType } from '../../constants'; import { loggerMock } from '../../../../logger/__tests__/sdkLogger.mock'; // update messages @@ -16,6 +16,10 @@ import keylistGzipMessage from '../../../../__tests__/mocks/message.V2.KEYLIST.G import segmentRemovalMessage from '../../../../__tests__/mocks/message.V2.SEGMENT_REMOVAL.1457552653000.json'; import { keylists, bitmaps } from '../../__tests__/dataMocks'; +// update messages MY_LARGE_SEGMENTS_UPDATE +import largeSegmentUnboundedMessage from '../../../../__tests__/mocks/message.MY_LARGE_SEGMENTS_UPDATE.UNBOUNDED.1457552650000.json'; +import largeSegmentRemovalMessage from '../../../../__tests__/mocks/message.MY_LARGE_SEGMENTS_UPDATE.SEGMENT_REMOVAL.1457552653000.json'; + // occupancy messages import occupancy1ControlPri from '../../../../__tests__/mocks/message.OCCUPANCY.1.control_pri.1586987434450.json'; import occupancy0ControlPri from '../../../../__tests__/mocks/message.OCCUPANCY.0.control_pri.1586987434550.json'; @@ -169,6 +173,14 @@ test('`handlerMessage` for update notifications (NotificationProcessor) and stre sseHandler.handleMessage(segmentRemovalMessage); expect(pushEmitter.emit).toHaveBeenLastCalledWith(MY_SEGMENTS_UPDATE_V2, ...expectedParams); // must emit MY_SEGMENTS_UPDATE_V2 with the message parsed data + expectedParams = [{ type: 'MY_LARGE_SEGMENTS_UPDATE', changeNumber: 1457552650000, c: 0, d: '', u: 0, largeSegments: [], i: 300, h: 0, s: 0 }]; + sseHandler.handleMessage(largeSegmentUnboundedMessage); + expect(pushEmitter.emit).toHaveBeenLastCalledWith(MY_LARGE_SEGMENTS_UPDATE, ...expectedParams); // must emit MY_SEGMENTS_UPDATE_V2 with the message parsed data + + expectedParams = [{ type: 'MY_LARGE_SEGMENTS_UPDATE', changeNumber: 1457552653000, c: 0, d: '', u: 3, largeSegments: ['employees'] }]; + sseHandler.handleMessage(largeSegmentRemovalMessage); + expect(pushEmitter.emit).toHaveBeenLastCalledWith(MY_LARGE_SEGMENTS_UPDATE, ...expectedParams); // must emit MY_SEGMENTS_UPDATE_V2 with the message parsed data + sseHandler.handleMessage(streamingReset); expect(pushEmitter.emit).toHaveBeenLastCalledWith(ControlType.STREAMING_RESET); // must emit STREAMING_RESET diff --git a/src/sync/streaming/__tests__/pushManager.spec.ts b/src/sync/streaming/__tests__/pushManager.spec.ts index fb5fc39d..7a08bc9b 100644 --- a/src/sync/streaming/__tests__/pushManager.spec.ts +++ b/src/sync/streaming/__tests__/pushManager.spec.ts @@ -33,7 +33,7 @@ test('pushManagerFactory returns undefined if EventSource is not available', () describe('pushManager in client-side', () => { - test('does not connect to streaming if it is stopped inmediatelly after being started', async () => { + test('does not connect to streaming if it is stopped immediately after being started', async () => { const fetchAuthMock = jest.fn(); const pushManager = pushManagerFactory({ // @ts-ignore @@ -122,7 +122,7 @@ describe('pushManager in client-side', () => { describe('pushManager in server-side', () => { - test('does not connect to streaming if it is stopped inmediatelly after being started', async () => { + test('does not connect to streaming if it is stopped immediately after being started', async () => { const fetchAuthMock = jest.fn(); const pushManager = pushManagerFactory({ // @ts-ignore diff --git a/src/sync/submitters/__tests__/eventsSubmitter.spec.ts b/src/sync/submitters/__tests__/eventsSubmitter.spec.ts index d79d0c5e..3da0d1c8 100644 --- a/src/sync/submitters/__tests__/eventsSubmitter.spec.ts +++ b/src/sync/submitters/__tests__/eventsSubmitter.spec.ts @@ -31,7 +31,7 @@ describe('Events submitter', () => { eventsSubmitter.start(); expect(eventsSubmitter.isRunning()).toEqual(true); // Submitter should be flagged as running - expect(eventsSubmitter.isExecuting()).toEqual(false); // but not executed immediatelly if there is a push window + expect(eventsSubmitter.isExecuting()).toEqual(false); // but not executed immediately if there is a push window expect(eventsCacheMock.isEmpty).not.toBeCalled(); // If queue is full, submitter should be executed @@ -55,7 +55,7 @@ describe('Events submitter', () => { eventsSubmitter.start(); expect(eventsSubmitter.isRunning()).toEqual(true); // Submitter should be flagged as running - expect(eventsSubmitter.isExecuting()).toEqual(true); // and executes immediatelly if there isn't a push window + expect(eventsSubmitter.isExecuting()).toEqual(true); // and executes immediately if there isn't a push window expect(eventsCacheMock.isEmpty).toBeCalledTimes(1); // If queue is full, submitter is executed again after current execution is resolved diff --git a/src/sync/submitters/__tests__/telemetrySubmitter.spec.ts b/src/sync/submitters/__tests__/telemetrySubmitter.spec.ts index ed11ffbb..57a368c5 100644 --- a/src/sync/submitters/__tests__/telemetrySubmitter.spec.ts +++ b/src/sync/submitters/__tests__/telemetrySubmitter.spec.ts @@ -39,7 +39,7 @@ describe('Telemetry submitter', () => { telemetrySubmitter.start(); expect(telemetrySubmitter.isRunning()).toEqual(true); // Submitter should be flagged as running - expect(telemetrySubmitter.isExecuting()).toEqual(false); // but not executed immediatelly (first push window) + expect(telemetrySubmitter.isExecuting()).toEqual(false); // but not executed immediately (first push window) expect(popSpy).toBeCalledTimes(0); // Await first periodic execution From 25180c225e124d4fb6831c2792a29920210812f2 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Fri, 19 Jul 2024 15:49:57 -0300 Subject: [PATCH 026/146] Unit test --- src/sync/streaming/__tests__/pushManager.spec.ts | 9 ++++++++- src/sync/streaming/pushManager.ts | 2 +- 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/src/sync/streaming/__tests__/pushManager.spec.ts b/src/sync/streaming/__tests__/pushManager.spec.ts index 7a08bc9b..e4b86815 100644 --- a/src/sync/streaming/__tests__/pushManager.spec.ts +++ b/src/sync/streaming/__tests__/pushManager.spec.ts @@ -3,7 +3,7 @@ import { fullSettings, fullSettingsServerSide } from '../../../utils/settingsVal import { syncTaskFactory } from '../../__tests__/syncTask.mock'; // Test target -import { pushManagerFactory } from '../pushManager'; +import { pushManagerFactory, getDelay } from '../pushManager'; import { IPushManager } from '../types'; const paramsMock = { @@ -192,3 +192,10 @@ describe('pushManager in server-side', () => { }); }); + +test('getDelay', () => { + expect(getDelay({ i: 300, h: 0, s: 0 }, 'nicolas@split.io')).toBe(241); + expect(getDelay({ i: 60000, h: 0, s: 1 }, 'emi@split.io')).toBe(14389); + expect(getDelay({ i: 60000, h: 0, s: 0 }, 'emi@split.io')).toBe(24593); + expect(getDelay({}, 'emi@split.io')).toBe(24593); +}); diff --git a/src/sync/streaming/pushManager.ts b/src/sync/streaming/pushManager.ts index 6679df9f..61a8e3a5 100644 --- a/src/sync/streaming/pushManager.ts +++ b/src/sync/streaming/pushManager.ts @@ -23,7 +23,7 @@ import { TOKEN_REFRESH, AUTH_REJECTION, MY_LARGE_SEGMENT, MY_SEGMENT } from '../ import { ISdkFactoryContextSync } from '../../sdkFactory/types'; import { IUpdateWorker } from './UpdateWorkers/types'; -export function getDelay(parsedData: IMyLargeSegmentsUpdateData, matchingKey: string) { +export function getDelay(parsedData: Pick, matchingKey: string) { const interval = parsedData.i || 60000; // const hashType = parsedData.h || 0; const seed = parsedData.s || 0; From 4374f944a523bb50dfc6d63d761e617363562981 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Fri, 19 Jul 2024 16:52:02 -0300 Subject: [PATCH 027/146] Handle delay in UpdateWorker so that the timer can be cleared when synchronization is stopped --- src/sync/polling/types.ts | 2 +- src/sync/polling/updaters/mySegmentsUpdater.ts | 10 ++-------- .../UpdateWorkers/MySegmentsUpdateWorker.ts | 12 +++++++++++- .../__tests__/MySegmentsUpdateWorker.spec.ts | 10 +++++----- src/sync/streaming/pushManager.ts | 1 - 5 files changed, 19 insertions(+), 16 deletions(-) diff --git a/src/sync/polling/types.ts b/src/sync/polling/types.ts index 5b7991c2..d7187c8e 100644 --- a/src/sync/polling/types.ts +++ b/src/sync/polling/types.ts @@ -14,7 +14,7 @@ export type MySegmentsData = string[] | { add: boolean } -export interface IMySegmentsSyncTask extends ISyncTask<[segmentsData?: MySegmentsData, noCache?: boolean, delay?: number], boolean> { } +export interface IMySegmentsSyncTask extends ISyncTask<[segmentsData?: MySegmentsData, noCache?: boolean], boolean> { } export interface IPollingManager extends ITask { syncAll(): Promise diff --git a/src/sync/polling/updaters/mySegmentsUpdater.ts b/src/sync/polling/updaters/mySegmentsUpdater.ts index e409e71e..a914c676 100644 --- a/src/sync/polling/updaters/mySegmentsUpdater.ts +++ b/src/sync/polling/updaters/mySegmentsUpdater.ts @@ -94,14 +94,8 @@ export function mySegmentsUpdaterFactory( * (3) or `undefined`, for which the updater will fetch mySegments in order to sync the storage. * @param {boolean | undefined} noCache true to revalidate data to fetch */ - return function mySegmentsUpdater(segmentsData?: MySegmentsData, noCache?: boolean, delay?: number) { - return delay ? - new Promise(res => { - setTimeout(() => { - _mySegmentsUpdater(0, segmentsData, noCache).then(res); - }, delay); - }) : - _mySegmentsUpdater(0, segmentsData, noCache); + return function mySegmentsUpdater(segmentsData?: MySegmentsData, noCache?: boolean) { + return _mySegmentsUpdater(0, segmentsData, noCache); }; } diff --git a/src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts b/src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts index 2b9786b6..4ea9c36b 100644 --- a/src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts +++ b/src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts @@ -15,6 +15,7 @@ export function MySegmentsUpdateWorker(mySegmentsSyncTask: IMySegmentsSyncTask, let isHandlingEvent: boolean; let _segmentsData: MySegmentsData | undefined; // keeps the segmentsData (if included in notification payload) from the queued event with maximum changeNumber let _delay: undefined | number; + let _delayTimeoutID: undefined | number; const backoff = new Backoff(__handleMySegmentsUpdateCall); function __handleMySegmentsUpdateCall() { @@ -24,7 +25,15 @@ export function MySegmentsUpdateWorker(mySegmentsSyncTask: IMySegmentsSyncTask, const currentMaxChangeNumber = maxChangeNumber; // fetch mySegments revalidating data if cached - mySegmentsSyncTask.execute(_segmentsData, true, _delay).then((result) => { + const syncTask = _delay ? + new Promise(res => { + _delayTimeoutID = setTimeout(() => { + mySegmentsSyncTask.execute(_segmentsData, true).then(res); + }, _delay); + }) : + mySegmentsSyncTask.execute(_segmentsData, true); + + syncTask.then((result) => { if (!isHandlingEvent) return; // halt if `stop` has been called if (result !== false) {// Unlike `Splits|SegmentsUpdateWorker`, we cannot use `mySegmentsCache.getChangeNumber` since `/mySegments` endpoint doesn't provide this value. if (_segmentsData) telemetryTracker.trackUpdatesFromSSE(updateType); @@ -61,6 +70,7 @@ export function MySegmentsUpdateWorker(mySegmentsSyncTask: IMySegmentsSyncTask, }, stop() { + clearTimeout(_delayTimeoutID); isHandlingEvent = false; backoff.reset(); } diff --git a/src/sync/streaming/UpdateWorkers/__tests__/MySegmentsUpdateWorker.spec.ts b/src/sync/streaming/UpdateWorkers/__tests__/MySegmentsUpdateWorker.spec.ts index ccbaa01b..6726ed86 100644 --- a/src/sync/streaming/UpdateWorkers/__tests__/MySegmentsUpdateWorker.spec.ts +++ b/src/sync/streaming/UpdateWorkers/__tests__/MySegmentsUpdateWorker.spec.ts @@ -73,7 +73,7 @@ describe('MySegmentsUpdateWorker', () => { mySegmentsSyncTask.__resolveMySegmentsUpdaterCall(); // fetch success await new Promise(res => setTimeout(res, 10)); expect(mySegmentsSyncTask.execute).toBeCalledTimes(3); // doesn't synchronize MySegments while queue is empty - expect(mySegmentsSyncTask.execute.mock.calls).toEqual([[undefined, true, undefined], [undefined, true, undefined], [undefined, true, undefined]]); + expect(mySegmentsSyncTask.execute.mock.calls).toEqual([[undefined, true], [undefined, true], [undefined, true]]); // assert handling an event with segmentList after an event without segmentList, // to validate the special case than the fetch associated to the first event is resolved after a second event with payload arrives @@ -83,12 +83,12 @@ describe('MySegmentsUpdateWorker', () => { expect(mySegmentsSyncTask.isExecuting()).toBe(true); mySegmentUpdateWorker.put(120, ['some_segment']); expect(mySegmentsSyncTask.execute).toBeCalledTimes(1); // doesn't synchronize MySegments if `isExecuting` is true, even if payload (segmentList) is included - expect(mySegmentsSyncTask.execute).toHaveBeenLastCalledWith(undefined, true, undefined); + expect(mySegmentsSyncTask.execute).toHaveBeenLastCalledWith(undefined, true); mySegmentsSyncTask.__resolveMySegmentsUpdaterCall(); // fetch success await new Promise(res => setTimeout(res, 10)); expect(mySegmentsSyncTask.execute).toBeCalledTimes(2); // re-synchronizes MySegments once previous event was handled - expect(mySegmentsSyncTask.execute).toHaveBeenLastCalledWith(['some_segment'], true, undefined); // synchronizes MySegments with given segmentList + expect(mySegmentsSyncTask.execute).toHaveBeenLastCalledWith(['some_segment'], true); // synchronizes MySegments with given segmentList mySegmentsSyncTask.__resolveMySegmentsUpdaterCall(); // fetch success await new Promise(res => setTimeout(res, 10)); @@ -97,12 +97,12 @@ describe('MySegmentsUpdateWorker', () => { mySegmentUpdateWorker.put(130, ['other_segment']); mySegmentUpdateWorker.put(140); expect(mySegmentsSyncTask.execute).toBeCalledTimes(1); // synchronizes MySegments once, until event is handled - expect(mySegmentsSyncTask.execute).toHaveBeenLastCalledWith(['other_segment'], true, undefined); + expect(mySegmentsSyncTask.execute).toHaveBeenLastCalledWith(['other_segment'], true); mySegmentsSyncTask.__resolveMySegmentsUpdaterCall(); // fetch success await new Promise(res => setTimeout(res)); expect(mySegmentsSyncTask.execute).toBeCalledTimes(2); // re-synchronizes MySegments once previous event was handled - expect(mySegmentsSyncTask.execute).toHaveBeenLastCalledWith(undefined, true, undefined); // synchronizes MySegments without segmentList if the event doesn't have payload + expect(mySegmentsSyncTask.execute).toHaveBeenLastCalledWith(undefined, true); // synchronizes MySegments without segmentList if the event doesn't have payload mySegmentsSyncTask.__resolveMySegmentsUpdaterCall(); // fetch success await new Promise(res => setTimeout(res, 20)); // Wait to assert no more calls with backoff to `execute` diff --git a/src/sync/streaming/pushManager.ts b/src/sync/streaming/pushManager.ts index 61a8e3a5..580185c5 100644 --- a/src/sync/streaming/pushManager.ts +++ b/src/sync/streaming/pushManager.ts @@ -25,7 +25,6 @@ import { IUpdateWorker } from './UpdateWorkers/types'; export function getDelay(parsedData: Pick, matchingKey: string) { const interval = parsedData.i || 60000; - // const hashType = parsedData.h || 0; const seed = parsedData.s || 0; return hash(matchingKey, seed) % interval; From 4b6676fe9646b9d6914be7779e49952b818afa83 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Fri, 19 Jul 2024 17:54:13 -0300 Subject: [PATCH 028/146] Readiness event fixes --- .../__tests__/readinessManager.spec.ts | 25 ++++++++----------- src/readiness/readinessManager.ts | 8 +++--- src/storages/types.ts | 2 +- src/sync/polling/pollingManagerCS.ts | 23 +++++++++-------- .../polling/syncTasks/mySegmentsSyncTask.ts | 12 ++++++--- .../polling/updaters/mySegmentsUpdater.ts | 9 ++++--- 6 files changed, 40 insertions(+), 39 deletions(-) diff --git a/src/readiness/__tests__/readinessManager.spec.ts b/src/readiness/__tests__/readinessManager.spec.ts index fafa4019..a935fd50 100644 --- a/src/readiness/__tests__/readinessManager.spec.ts +++ b/src/readiness/__tests__/readinessManager.spec.ts @@ -281,21 +281,16 @@ test('READINESS MANAGER / Destroy before it was ready and timedout', (done) => { }); test('READINESS MANAGER / with large segments', () => { - const readinessManager = readinessManagerFactory(EventEmitter, { - startup: { readyTimeout: 0, waitForLargeSegments: false }, - sync: { largeSegmentsEnabled: true } - } as unknown as ISettings); - expect(readinessManager.largeSegments).toBeUndefined(); + [true, false].forEach(waitForLargeSegments => { - const readinessManagerWithLargeSegments = readinessManagerFactory(EventEmitter, { - startup: { readyTimeout: 0, waitForLargeSegments: true }, - sync: { largeSegmentsEnabled: true } - } as unknown as ISettings); + const rm = readinessManagerFactory(EventEmitter, { + startup: { readyTimeout: 0, waitForLargeSegments }, + sync: { largeSegmentsEnabled: true } + } as unknown as ISettings); - expect(readinessManagerWithLargeSegments.largeSegments).toBeDefined(); + expect(rm.largeSegments).toBeDefined(); - [readinessManager, readinessManagerWithLargeSegments].forEach(rm => { let counter = 0; rm.gate.on(SDK_READY, () => { @@ -305,10 +300,10 @@ test('READINESS MANAGER / with large segments', () => { rm.splits.emit(SDK_SPLITS_ARRIVED); rm.segments.emit(SDK_SEGMENTS_ARRIVED); - if (rm.largeSegments) { - expect(counter).toBe(0); // should not be called yet - rm.largeSegments.emit(SDK_SEGMENTS_ARRIVED); - } + + expect(counter).toBe(waitForLargeSegments ? 0 : 1); // should be called if waitForLargeSegments is false + rm.largeSegments!.emit(SDK_SEGMENTS_ARRIVED); + expect(counter).toBe(1); // should be called rm.splits.emit(SDK_SPLITS_ARRIVED); diff --git a/src/readiness/readinessManager.ts b/src/readiness/readinessManager.ts index a4a5290d..400c8cca 100644 --- a/src/readiness/readinessManager.ts +++ b/src/readiness/readinessManager.ts @@ -18,10 +18,8 @@ function splitsEventEmitterFactory(EventEmitter: new () => IEventEmitter): ISpli return splitsEventEmitter; } -function segmentsEventEmitterFactory(EventEmitter: new () => IEventEmitter): ISegmentsEventEmitter { - const segmentsEventEmitter = objectAssign(new EventEmitter(), { - segmentsArrived: false - }); +function segmentsEventEmitterFactory(EventEmitter: new () => IEventEmitter, segmentsArrived = false): ISegmentsEventEmitter { + const segmentsEventEmitter = objectAssign(new EventEmitter(), { segmentsArrived }); segmentsEventEmitter.once(SDK_SEGMENTS_ARRIVED, () => { segmentsEventEmitter.segmentsArrived = true; }); @@ -39,7 +37,7 @@ export function readinessManagerFactory( const { startup: { readyTimeout, waitForLargeSegments }, sync: { largeSegmentsEnabled } } = settings; const segments: ISegmentsEventEmitter = segmentsEventEmitterFactory(EventEmitter); - const largeSegments = largeSegmentsEnabled && waitForLargeSegments ? segmentsEventEmitterFactory(EventEmitter) : undefined; + const largeSegments = largeSegmentsEnabled ? segmentsEventEmitterFactory(EventEmitter, !waitForLargeSegments) : undefined; const gate: IReadinessEventEmitter = new EventEmitter(); // emit SDK_READY_FROM_CACHE diff --git a/src/storages/types.ts b/src/storages/types.ts index 10e83604..ed3e6b35 100644 --- a/src/storages/types.ts +++ b/src/storages/types.ts @@ -204,7 +204,7 @@ export interface ISplitsCacheBase { getSplitNames(): MaybeThenable, // should never reject or throw an exception. Instead return true by default, asssuming the TT might exist. trafficTypeExists(trafficType: string): MaybeThenable, - // only for Client-Side + // only for Client-Side. Returns true if the storage is not synchronized yet (getChangeNumber() === 1) or contains a FF using the given matcher usesMatcher(matcherType: string): MaybeThenable, clear(): MaybeThenable, // should never reject or throw an exception. Instead return false by default, to avoid emitting SDK_READY_FROM_CACHE. diff --git a/src/sync/polling/pollingManagerCS.ts b/src/sync/polling/pollingManagerCS.ts index 91ce27b2..fbb8a223 100644 --- a/src/sync/polling/pollingManagerCS.ts +++ b/src/sync/polling/pollingManagerCS.ts @@ -58,38 +58,39 @@ export function pollingManagerCSFactory( function add(matchingKey: string, readiness: IReadinessManager, storage: IStorageSync) { const msSyncTask = mySegmentsSyncTaskFactory( splitApi.fetchMySegments, + () => storage.splits.usesMatcher(IN_SEGMENT), storage.segments, - () => { - if (storage.splits.usesMatcher(IN_SEGMENT)) readiness.segments.emit(SDK_SEGMENTS_ARRIVED); - }, + readiness.segments, settings, matchingKey, - settings.scheduler.segmentsRefreshRate + settings.scheduler.segmentsRefreshRate, + 'mySegmentsUpdater' ); let mlsSyncTask; if (settings.sync.largeSegmentsEnabled) { mlsSyncTask = mySegmentsSyncTaskFactory( splitApi.fetchMyLargeSegments, + () => storage.splits.usesMatcher(IN_LARGE_SEGMENT), storage.largeSegments!, - () => { - if (readiness.largeSegments && storage.splits.usesMatcher(IN_LARGE_SEGMENT)) readiness.largeSegments.emit(SDK_SEGMENTS_ARRIVED); - }, + readiness.largeSegments!, settings, matchingKey, - settings.scheduler.largeSegmentsRefreshRate + settings.scheduler.largeSegmentsRefreshRate, + 'myLargeSegmentsUpdater' ); } // smart ready function smartReady() { if (!readiness.isReady()) { - if (!storage.splits.usesMatcher(IN_SEGMENT)) readiness.segments.emit(SDK_SEGMENTS_ARRIVED); if (readiness.largeSegments && !storage.splits.usesMatcher(IN_LARGE_SEGMENT)) readiness.largeSegments.emit(SDK_SEGMENTS_ARRIVED); + if (!storage.splits.usesMatcher(IN_SEGMENT)) readiness.segments.emit(SDK_SEGMENTS_ARRIVED); } } - if (!storage.splits.usesMatcher(IN_SEGMENT) && !storage.splits.usesMatcher(IN_LARGE_SEGMENT)) setTimeout(smartReady, 0); - else readiness.splits.once(SDK_SPLITS_ARRIVED, smartReady); + + if (storage.splits.usesMatcher(IN_SEGMENT) && storage.splits.usesMatcher(IN_LARGE_SEGMENT)) readiness.splits.once(SDK_SPLITS_ARRIVED, smartReady); + else setTimeout(smartReady, 0); mySegmentsSyncTasks[matchingKey] = { msSyncTask: msSyncTask, mlsSyncTask: mlsSyncTask }; diff --git a/src/sync/polling/syncTasks/mySegmentsSyncTask.ts b/src/sync/polling/syncTasks/mySegmentsSyncTask.ts index f9c54ff0..194b181e 100644 --- a/src/sync/polling/syncTasks/mySegmentsSyncTask.ts +++ b/src/sync/polling/syncTasks/mySegmentsSyncTask.ts @@ -5,30 +5,34 @@ import { IFetchMySegments } from '../../../services/types'; import { mySegmentsFetcherFactory } from '../fetchers/mySegmentsFetcher'; import { ISettings } from '../../../types'; import { mySegmentsUpdaterFactory } from '../updaters/mySegmentsUpdater'; +import { ISegmentsEventEmitter } from '../../../readiness/types'; /** * Creates a sync task that periodically executes a `mySegmentsUpdater` task */ export function mySegmentsSyncTaskFactory( fetchMySegments: IFetchMySegments, + shouldNotify: () => boolean, mySegmentsCache: ISegmentsCacheSync, - notifyUpdate: () => void, + segmentEventEmitter: ISegmentsEventEmitter, settings: ISettings, matchingKey: string, - segmentsRefreshRate: number + segmentsRefreshRate: number, + NAME: string ): IMySegmentsSyncTask { return syncTaskFactory( settings.log, mySegmentsUpdaterFactory( settings.log, mySegmentsFetcherFactory(fetchMySegments), + shouldNotify, mySegmentsCache, - notifyUpdate, + segmentEventEmitter, settings.startup.requestTimeoutBeforeReady, settings.startup.retriesOnFailureBeforeReady, matchingKey ), segmentsRefreshRate, - 'mySegmentsUpdater', + NAME, ); } diff --git a/src/sync/polling/updaters/mySegmentsUpdater.ts b/src/sync/polling/updaters/mySegmentsUpdater.ts index a914c676..d7bbdafe 100644 --- a/src/sync/polling/updaters/mySegmentsUpdater.ts +++ b/src/sync/polling/updaters/mySegmentsUpdater.ts @@ -1,6 +1,8 @@ import { IMySegmentsFetcher } from '../fetchers/types'; import { ISegmentsCacheSync } from '../../../storages/types'; +import { ISegmentsEventEmitter } from '../../../readiness/types'; import { timeout } from '../../../utils/promise/timeout'; +import { SDK_SEGMENTS_ARRIVED } from '../../../readiness/constants'; import { ILogger } from '../../../logger/types'; import { SYNC_MYSEGMENTS_FETCH_RETRY } from '../../../logger/constants'; import { MySegmentsData } from '../types'; @@ -16,8 +18,9 @@ type IMySegmentsUpdater = (segmentList?: MySegmentsData, noCache?: boolean) => P export function mySegmentsUpdaterFactory( log: ILogger, mySegmentsFetcher: IMySegmentsFetcher, + shouldNotify: () => boolean, mySegmentsCache: ISegmentsCacheSync, - notifyUpdate: () => void, + segmentsEventEmitter: ISegmentsEventEmitter, requestTimeoutBeforeReady: number, retriesOnFailureBeforeReady: number, matchingKey: string @@ -52,9 +55,9 @@ export function mySegmentsUpdaterFactory( } // Notify update if required - if (shouldNotifyUpdate || readyOnAlreadyExistentState) { + if (shouldNotify() && (shouldNotifyUpdate || readyOnAlreadyExistentState)) { readyOnAlreadyExistentState = false; - notifyUpdate(); + segmentsEventEmitter.emit(SDK_SEGMENTS_ARRIVED); } } From e6cb5c3c32a9c5b4fb43e4b90f4c99b90180c8f9 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Fri, 19 Jul 2024 18:03:17 -0300 Subject: [PATCH 029/146] Refactor to simplify some code --- src/sync/polling/pollingManagerCS.ts | 6 ++---- src/sync/polling/syncTasks/mySegmentsSyncTask.ts | 7 ++----- src/sync/polling/updaters/mySegmentsUpdater.ts | 9 +++------ 3 files changed, 7 insertions(+), 15 deletions(-) diff --git a/src/sync/polling/pollingManagerCS.ts b/src/sync/polling/pollingManagerCS.ts index fbb8a223..a2d6ef7f 100644 --- a/src/sync/polling/pollingManagerCS.ts +++ b/src/sync/polling/pollingManagerCS.ts @@ -58,9 +58,8 @@ export function pollingManagerCSFactory( function add(matchingKey: string, readiness: IReadinessManager, storage: IStorageSync) { const msSyncTask = mySegmentsSyncTaskFactory( splitApi.fetchMySegments, - () => storage.splits.usesMatcher(IN_SEGMENT), storage.segments, - readiness.segments, + () => { if (storage.splits.usesMatcher(IN_SEGMENT)) readiness.segments.emit(SDK_SEGMENTS_ARRIVED); }, settings, matchingKey, settings.scheduler.segmentsRefreshRate, @@ -71,9 +70,8 @@ export function pollingManagerCSFactory( if (settings.sync.largeSegmentsEnabled) { mlsSyncTask = mySegmentsSyncTaskFactory( splitApi.fetchMyLargeSegments, - () => storage.splits.usesMatcher(IN_LARGE_SEGMENT), storage.largeSegments!, - readiness.largeSegments!, + () => { if (readiness.largeSegments && storage.splits.usesMatcher(IN_LARGE_SEGMENT)) readiness.largeSegments.emit(SDK_SEGMENTS_ARRIVED); }, settings, matchingKey, settings.scheduler.largeSegmentsRefreshRate, diff --git a/src/sync/polling/syncTasks/mySegmentsSyncTask.ts b/src/sync/polling/syncTasks/mySegmentsSyncTask.ts index 194b181e..4bf16e46 100644 --- a/src/sync/polling/syncTasks/mySegmentsSyncTask.ts +++ b/src/sync/polling/syncTasks/mySegmentsSyncTask.ts @@ -5,16 +5,14 @@ import { IFetchMySegments } from '../../../services/types'; import { mySegmentsFetcherFactory } from '../fetchers/mySegmentsFetcher'; import { ISettings } from '../../../types'; import { mySegmentsUpdaterFactory } from '../updaters/mySegmentsUpdater'; -import { ISegmentsEventEmitter } from '../../../readiness/types'; /** * Creates a sync task that periodically executes a `mySegmentsUpdater` task */ export function mySegmentsSyncTaskFactory( fetchMySegments: IFetchMySegments, - shouldNotify: () => boolean, mySegmentsCache: ISegmentsCacheSync, - segmentEventEmitter: ISegmentsEventEmitter, + notifyUpdate: () => void, settings: ISettings, matchingKey: string, segmentsRefreshRate: number, @@ -25,9 +23,8 @@ export function mySegmentsSyncTaskFactory( mySegmentsUpdaterFactory( settings.log, mySegmentsFetcherFactory(fetchMySegments), - shouldNotify, mySegmentsCache, - segmentEventEmitter, + notifyUpdate, settings.startup.requestTimeoutBeforeReady, settings.startup.retriesOnFailureBeforeReady, matchingKey diff --git a/src/sync/polling/updaters/mySegmentsUpdater.ts b/src/sync/polling/updaters/mySegmentsUpdater.ts index d7bbdafe..a914c676 100644 --- a/src/sync/polling/updaters/mySegmentsUpdater.ts +++ b/src/sync/polling/updaters/mySegmentsUpdater.ts @@ -1,8 +1,6 @@ import { IMySegmentsFetcher } from '../fetchers/types'; import { ISegmentsCacheSync } from '../../../storages/types'; -import { ISegmentsEventEmitter } from '../../../readiness/types'; import { timeout } from '../../../utils/promise/timeout'; -import { SDK_SEGMENTS_ARRIVED } from '../../../readiness/constants'; import { ILogger } from '../../../logger/types'; import { SYNC_MYSEGMENTS_FETCH_RETRY } from '../../../logger/constants'; import { MySegmentsData } from '../types'; @@ -18,9 +16,8 @@ type IMySegmentsUpdater = (segmentList?: MySegmentsData, noCache?: boolean) => P export function mySegmentsUpdaterFactory( log: ILogger, mySegmentsFetcher: IMySegmentsFetcher, - shouldNotify: () => boolean, mySegmentsCache: ISegmentsCacheSync, - segmentsEventEmitter: ISegmentsEventEmitter, + notifyUpdate: () => void, requestTimeoutBeforeReady: number, retriesOnFailureBeforeReady: number, matchingKey: string @@ -55,9 +52,9 @@ export function mySegmentsUpdaterFactory( } // Notify update if required - if (shouldNotify() && (shouldNotifyUpdate || readyOnAlreadyExistentState)) { + if (shouldNotifyUpdate || readyOnAlreadyExistentState) { readyOnAlreadyExistentState = false; - segmentsEventEmitter.emit(SDK_SEGMENTS_ARRIVED); + notifyUpdate(); } } From d2f42e81ae866bc945433d699598912e728750a4 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Fri, 19 Jul 2024 18:03:52 -0300 Subject: [PATCH 030/146] rc --- package-lock.json | 4 ++-- package.json | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/package-lock.json b/package-lock.json index 81ef37f9..d0b33b85 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "@splitsoftware/splitio-commons", - "version": "1.16.1-rc.0", + "version": "1.16.1-rc.1", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "@splitsoftware/splitio-commons", - "version": "1.16.1-rc.0", + "version": "1.16.1-rc.1", "license": "Apache-2.0", "dependencies": { "tslib": "^2.3.1" diff --git a/package.json b/package.json index e22a46d9..25759e5d 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@splitsoftware/splitio-commons", - "version": "1.16.1-rc.0", + "version": "1.16.1-rc.1", "description": "Split JavaScript SDK common components", "main": "cjs/index.js", "module": "esm/index.js", From cd00e3fe198e405bfefb37c65ce41f43ad587243 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Tue, 23 Jul 2024 10:50:19 -0300 Subject: [PATCH 031/146] Fix typos --- .github/workflows/ci.yml | 2 +- src/logger/messages/info.ts | 2 +- .../UpdateWorkers/__tests__/SplitsUpdateWorker.spec.ts | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 181fa4af..4ecfe2ab 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -10,7 +10,7 @@ on: jobs: build: - name: Build + name: CI runs-on: ubuntu-latest services: redis: diff --git a/src/logger/messages/info.ts b/src/logger/messages/info.ts index 64e0e64a..23e659fe 100644 --- a/src/logger/messages/info.ts +++ b/src/logger/messages/info.ts @@ -22,7 +22,7 @@ export const codesInfo: [number, string][] = codesWarn.concat([ [c.POLLING_START, c.LOG_PREFIX_SYNC_POLLING + 'Starting polling'], [c.POLLING_STOP, c.LOG_PREFIX_SYNC_POLLING + 'Stopping polling'], [c.SYNC_SPLITS_FETCH_RETRY, c.LOG_PREFIX_SYNC_SPLITS + 'Retrying download of feature flags #%s. Reason: %s'], - [c.SUBMITTERS_PUSH_FULL_QUEUE, c.LOG_PREFIX_SYNC_SUBMITTERS + 'Flushing full %s queue and reseting timer.'], + [c.SUBMITTERS_PUSH_FULL_QUEUE, c.LOG_PREFIX_SYNC_SUBMITTERS + 'Flushing full %s queue and resetting timer.'], [c.SUBMITTERS_PUSH, c.LOG_PREFIX_SYNC_SUBMITTERS + 'Pushing %s.'], [c.STREAMING_REFRESH_TOKEN, c.LOG_PREFIX_SYNC_STREAMING + 'Refreshing streaming token in %s seconds, and connecting streaming in %s seconds.'], [c.STREAMING_RECONNECT, c.LOG_PREFIX_SYNC_STREAMING + 'Attempting to reconnect streaming in %s seconds.'], diff --git a/src/sync/streaming/UpdateWorkers/__tests__/SplitsUpdateWorker.spec.ts b/src/sync/streaming/UpdateWorkers/__tests__/SplitsUpdateWorker.spec.ts index af5c6336..99c61bae 100644 --- a/src/sync/streaming/UpdateWorkers/__tests__/SplitsUpdateWorker.spec.ts +++ b/src/sync/streaming/UpdateWorkers/__tests__/SplitsUpdateWorker.spec.ts @@ -206,7 +206,7 @@ describe('SplitsUpdateWorker', () => { splitUpdateWorker.stop(); - await new Promise(res => setTimeout(res, 20)); // Wait to assert no more calls to `execute` after reseting + await new Promise(res => setTimeout(res, 20)); // Wait to assert no more calls to `execute` after resetting expect(splitsSyncTask.execute).toBeCalledTimes(1); }); From 284b125baa0f89cf430124fdd691f999d59ff043 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Tue, 23 Jul 2024 11:37:19 -0300 Subject: [PATCH 032/146] Update types --- .../UpdateWorkers/MySegmentsUpdateWorker.ts | 2 +- .../UpdateWorkers/SegmentsUpdateWorker.ts | 2 +- .../UpdateWorkers/SplitsUpdateWorker.ts | 2 +- src/sync/streaming/UpdateWorkers/types.ts | 4 ++-- src/sync/streaming/parseUtils.ts | 16 ++++++---------- src/sync/streaming/pushManager.ts | 3 +-- 6 files changed, 12 insertions(+), 17 deletions(-) diff --git a/src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts b/src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts index 4ea9c36b..f0706ccf 100644 --- a/src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts +++ b/src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts @@ -7,7 +7,7 @@ import { UpdatesFromSSEEnum } from '../../submitters/types'; /** * MySegmentsUpdateWorker factory */ -export function MySegmentsUpdateWorker(mySegmentsSyncTask: IMySegmentsSyncTask, telemetryTracker: ITelemetryTracker, updateType: UpdatesFromSSEEnum): IUpdateWorker { +export function MySegmentsUpdateWorker(mySegmentsSyncTask: IMySegmentsSyncTask, telemetryTracker: ITelemetryTracker, updateType: UpdatesFromSSEEnum): IUpdateWorker<[changeNumber: number, segmentsData?: MySegmentsData, delay?: number]> { let maxChangeNumber = 0; // keeps the maximum changeNumber among queued events let currentChangeNumber = -1; diff --git a/src/sync/streaming/UpdateWorkers/SegmentsUpdateWorker.ts b/src/sync/streaming/UpdateWorkers/SegmentsUpdateWorker.ts index 93454f0e..8ca485cd 100644 --- a/src/sync/streaming/UpdateWorkers/SegmentsUpdateWorker.ts +++ b/src/sync/streaming/UpdateWorkers/SegmentsUpdateWorker.ts @@ -9,7 +9,7 @@ import { IUpdateWorker } from './types'; /** * SegmentsUpdateWorker factory */ -export function SegmentsUpdateWorker(log: ILogger, segmentsSyncTask: ISegmentsSyncTask, segmentsCache: ISegmentsCacheSync): IUpdateWorker { +export function SegmentsUpdateWorker(log: ILogger, segmentsSyncTask: ISegmentsSyncTask, segmentsCache: ISegmentsCacheSync): IUpdateWorker<[ISegmentUpdateData]> { // Handles retries with CDN bypass per segment name function SegmentUpdateWorker(segment: string) { diff --git a/src/sync/streaming/UpdateWorkers/SplitsUpdateWorker.ts b/src/sync/streaming/UpdateWorkers/SplitsUpdateWorker.ts index e9336aba..a01ed94a 100644 --- a/src/sync/streaming/UpdateWorkers/SplitsUpdateWorker.ts +++ b/src/sync/streaming/UpdateWorkers/SplitsUpdateWorker.ts @@ -14,7 +14,7 @@ import { IUpdateWorker } from './types'; /** * SplitsUpdateWorker factory */ -export function SplitsUpdateWorker(log: ILogger, splitsCache: ISplitsCacheSync, splitsSyncTask: ISplitsSyncTask, splitsEventEmitter: ISplitsEventEmitter, telemetryTracker: ITelemetryTracker, segmentsSyncTask?: ISegmentsSyncTask): IUpdateWorker & { killSplit(event: ISplitKillData): void } { +export function SplitsUpdateWorker(log: ILogger, splitsCache: ISplitsCacheSync, splitsSyncTask: ISplitsSyncTask, splitsEventEmitter: ISplitsEventEmitter, telemetryTracker: ITelemetryTracker, segmentsSyncTask?: ISegmentsSyncTask): IUpdateWorker<[updateData: ISplitUpdateData, payload?: ISplit]> & { killSplit(event: ISplitKillData): void } { let maxChangeNumber = 0; let handleNewEvent = false; diff --git a/src/sync/streaming/UpdateWorkers/types.ts b/src/sync/streaming/UpdateWorkers/types.ts index cd899aa9..6e3066b7 100644 --- a/src/sync/streaming/UpdateWorkers/types.ts +++ b/src/sync/streaming/UpdateWorkers/types.ts @@ -1,4 +1,4 @@ -export interface IUpdateWorker { +export interface IUpdateWorker { stop(): void // clear scheduled tasks (backoff) - put(...args: any[]): void // handle new update event + put(...args: T): void // handle new update event } diff --git a/src/sync/streaming/parseUtils.ts b/src/sync/streaming/parseUtils.ts index 83fca519..b34cfa2c 100644 --- a/src/sync/streaming/parseUtils.ts +++ b/src/sync/streaming/parseUtils.ts @@ -1,6 +1,7 @@ import { algorithms } from '../../utils/decompress'; import { decodeFromBase64 } from '../../utils/base64'; import { Compression, KeyList } from './SSEHandler/types'; +import { ISplit } from '../../dtos/types'; const GZIP = 1; const ZLIB = 2; @@ -42,7 +43,7 @@ function decompress(data: string, compression: Compression) { * @returns {{a?: string[], r?: string[] }} * @throws if data string cannot be decoded, decompressed or parsed */ -export function parseKeyList(data: string, compression: Compression, avoidPrecisionLoss: boolean = true): KeyList { +export function parseKeyList(data: string, compression: Compression, avoidPrecisionLoss = true): KeyList { const binKeyList = decompress(data, compression); let strKeyList = Uint8ArrayToString(binKeyList); // replace numbers to strings, to avoid losing precision @@ -80,14 +81,9 @@ export function isInBitmap(bitmap: Uint8Array, hash64hex: string) { /** * Parse feature flags notifications for instant feature flag updates - * - * @param {ISplitUpdateData} data - * @returns {KeyList} */ -export function parseFFUpdatePayload(compression: Compression, data: string): KeyList | undefined { - const avoidPrecisionLoss = false; - if (compression > 0) - return parseKeyList(data, compression, avoidPrecisionLoss); - else - return JSON.parse(decodeFromBase64(data)); +export function parseFFUpdatePayload(compression: Compression, data: string): ISplit | undefined { + return compression > 0 ? + parseKeyList(data, compression, false) : + JSON.parse(decodeFromBase64(data)); } diff --git a/src/sync/streaming/pushManager.ts b/src/sync/streaming/pushManager.ts index 580185c5..5d03fb39 100644 --- a/src/sync/streaming/pushManager.ts +++ b/src/sync/streaming/pushManager.ts @@ -21,7 +21,6 @@ import { Hash64, hash64 } from '../../utils/murmur3/murmur3_64'; import { IAuthTokenPushEnabled } from './AuthClient/types'; import { TOKEN_REFRESH, AUTH_REJECTION, MY_LARGE_SEGMENT, MY_SEGMENT } from '../../utils/constants'; import { ISdkFactoryContextSync } from '../../sdkFactory/types'; -import { IUpdateWorker } from './UpdateWorkers/types'; export function getDelay(parsedData: Pick, matchingKey: string) { const interval = parsedData.i || 60000; @@ -72,7 +71,7 @@ export function pushManagerFactory( const userKeyHashes: Record = {}; // [Only for client-side] map of user keys to their corresponding hash64 and MySegmentsUpdateWorkers. // Hash64 is used to process MY_SEGMENTS_UPDATE_V2 events and dispatch actions to the corresponding MySegmentsUpdateWorker. - const clients: Record = {}; + const clients: Record, workerLarge?: ReturnType }> = {}; // [Only for client-side] variable to flag that a new client was added. It is needed to reconnect streaming. let connectForNewClient = false; From c075f6b96411fd4ac95ae15b625c9fd672e62823 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Tue, 23 Jul 2024 14:26:34 -0300 Subject: [PATCH 033/146] Handle MY_LARGE_SEGMENTS_UPDATE segment removal event with multiple segment names --- src/sync/polling/types.ts | 2 +- .../polling/updaters/mySegmentsUpdater.ts | 24 +++++++++---------- src/sync/streaming/pushManager.ts | 22 ++++++++--------- 3 files changed, 23 insertions(+), 25 deletions(-) diff --git a/src/sync/polling/types.ts b/src/sync/polling/types.ts index d7187c8e..efc7d5d9 100644 --- a/src/sync/polling/types.ts +++ b/src/sync/polling/types.ts @@ -12,7 +12,7 @@ export type MySegmentsData = string[] | { name: string, /* action: `true` for add, and `false` for delete */ add: boolean -} +}[] export interface IMySegmentsSyncTask extends ISyncTask<[segmentsData?: MySegmentsData, noCache?: boolean], boolean> { } diff --git a/src/sync/polling/updaters/mySegmentsUpdater.ts b/src/sync/polling/updaters/mySegmentsUpdater.ts index a914c676..c988ef2c 100644 --- a/src/sync/polling/updaters/mySegmentsUpdater.ts +++ b/src/sync/polling/updaters/mySegmentsUpdater.ts @@ -4,6 +4,7 @@ import { timeout } from '../../../utils/promise/timeout'; import { ILogger } from '../../../logger/types'; import { SYNC_MYSEGMENTS_FETCH_RETRY } from '../../../logger/constants'; import { MySegmentsData } from '../types'; +import { isObject } from '../../../utils/lang'; type IMySegmentsUpdater = (segmentList?: MySegmentsData, noCache?: boolean) => Promise @@ -36,19 +37,18 @@ export function mySegmentsUpdaterFactory( function updateSegments(segmentsData: MySegmentsData) { let shouldNotifyUpdate; - if (Array.isArray(segmentsData)) { - // Update the list of segment names available - shouldNotifyUpdate = mySegmentsCache.resetSegments(segmentsData); + if (isObject(segmentsData[0])) { + // Add/Delete the segment names + (segmentsData as { name: string, add: boolean }[]).forEach(({ name, add }) => { + if (mySegmentsCache.isInSegment(name) !== add) { + shouldNotifyUpdate = true; + if (add) mySegmentsCache.addToSegment(name); + else mySegmentsCache.removeFromSegment(name); + } + }); } else { - // Add/Delete the segment - const { name, add } = segmentsData; - if (mySegmentsCache.isInSegment(name) !== add) { - shouldNotifyUpdate = true; - if (add) mySegmentsCache.addToSegment(name); - else mySegmentsCache.removeFromSegment(name); - } else { - shouldNotifyUpdate = false; - } + // Reset the list of segment names + shouldNotifyUpdate = mySegmentsCache.resetSegments(segmentsData as string[]); } // Notify update if required diff --git a/src/sync/streaming/pushManager.ts b/src/sync/streaming/pushManager.ts index 5d03fb39..f89db08f 100644 --- a/src/sync/streaming/pushManager.ts +++ b/src/sync/streaming/pushManager.ts @@ -283,14 +283,14 @@ export function pushManagerFactory( const add = added.has(hash64.dec) ? true : removed.has(hash64.dec) ? false : undefined; if (add !== undefined) { isLS ? - workerLarge && workerLarge.put(parsedData.changeNumber, { + workerLarge && workerLarge.put(parsedData.changeNumber, [{ name: parsedData.largeSegments[0], add - }) : - worker.put(parsedData.changeNumber, { + }]) : + worker.put(parsedData.changeNumber, [{ name: parsedData.segmentName, add - }); + }]); } }); return; @@ -303,16 +303,14 @@ export function pushManagerFactory( forOwn(clients, ({ worker, workerLarge }) => { isLS ? - workerLarge && parsedData.largeSegments.forEach(largeSegment => { - workerLarge.put(parsedData.changeNumber, { - name: largeSegment, - add: false - }); - }) : - worker.put(parsedData.changeNumber, { + workerLarge && workerLarge.put(parsedData.changeNumber, parsedData.largeSegments.map(largeSegment => ({ + name: largeSegment, + add: false + }))) : + worker.put(parsedData.changeNumber, [{ name: parsedData.segmentName, add: false - }); + }]); }); return; } From f9ea1ca1f0a349acb81c80db385b017159649635 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Tue, 23 Jul 2024 18:18:07 -0300 Subject: [PATCH 034/146] rc --- package-lock.json | 4 ++-- package.json | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/package-lock.json b/package-lock.json index d0b33b85..79105462 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "@splitsoftware/splitio-commons", - "version": "1.16.1-rc.1", + "version": "1.16.1-rc.2", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "@splitsoftware/splitio-commons", - "version": "1.16.1-rc.1", + "version": "1.16.1-rc.2", "license": "Apache-2.0", "dependencies": { "tslib": "^2.3.1" diff --git a/package.json b/package.json index 25759e5d..bfe6be0d 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@splitsoftware/splitio-commons", - "version": "1.16.1-rc.1", + "version": "1.16.1-rc.2", "description": "Split JavaScript SDK common components", "main": "cjs/index.js", "module": "esm/index.js", From 86af495eb0d55fd92c48cac12ad4e1abbe929501 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Tue, 23 Jul 2024 21:58:21 -0300 Subject: [PATCH 035/146] rc with fixed type --- package-lock.json | 4 ++-- package.json | 2 +- src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/package-lock.json b/package-lock.json index 79105462..30350cad 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "@splitsoftware/splitio-commons", - "version": "1.16.1-rc.2", + "version": "1.16.1-rc.3", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "@splitsoftware/splitio-commons", - "version": "1.16.1-rc.2", + "version": "1.16.1-rc.3", "license": "Apache-2.0", "dependencies": { "tslib": "^2.3.1" diff --git a/package.json b/package.json index bfe6be0d..8e646a33 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@splitsoftware/splitio-commons", - "version": "1.16.1-rc.2", + "version": "1.16.1-rc.3", "description": "Split JavaScript SDK common components", "main": "cjs/index.js", "module": "esm/index.js", diff --git a/src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts b/src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts index f0706ccf..64511b79 100644 --- a/src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts +++ b/src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts @@ -15,7 +15,7 @@ export function MySegmentsUpdateWorker(mySegmentsSyncTask: IMySegmentsSyncTask, let isHandlingEvent: boolean; let _segmentsData: MySegmentsData | undefined; // keeps the segmentsData (if included in notification payload) from the queued event with maximum changeNumber let _delay: undefined | number; - let _delayTimeoutID: undefined | number; + let _delayTimeoutID: any; const backoff = new Backoff(__handleMySegmentsUpdateCall); function __handleMySegmentsUpdateCall() { From 2a5a2ca81b6af128dd8b9d804692d90280328a2e Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Thu, 25 Jul 2024 18:32:47 +0100 Subject: [PATCH 036/146] Handle some corner cases in MySegmentsUpdateWorker --- package-lock.json | 4 +- package.json | 2 +- .../UpdateWorkers/MySegmentsUpdateWorker.ts | 11 +++-- .../__tests__/MySegmentsUpdateWorker.spec.ts | 40 ++++++++++++++++++- 4 files changed, 48 insertions(+), 9 deletions(-) diff --git a/package-lock.json b/package-lock.json index 30350cad..67ca48ca 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "@splitsoftware/splitio-commons", - "version": "1.16.1-rc.3", + "version": "1.16.1-rc.4", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "@splitsoftware/splitio-commons", - "version": "1.16.1-rc.3", + "version": "1.16.1-rc.4", "license": "Apache-2.0", "dependencies": { "tslib": "^2.3.1" diff --git a/package.json b/package.json index 8e646a33..2c3e67aa 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@splitsoftware/splitio-commons", - "version": "1.16.1-rc.3", + "version": "1.16.1-rc.4", "description": "Split JavaScript SDK common components", "main": "cjs/index.js", "module": "esm/index.js", diff --git a/src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts b/src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts index 64511b79..5a912abf 100644 --- a/src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts +++ b/src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts @@ -28,6 +28,7 @@ export function MySegmentsUpdateWorker(mySegmentsSyncTask: IMySegmentsSyncTask, const syncTask = _delay ? new Promise(res => { _delayTimeoutID = setTimeout(() => { + _delay = undefined; mySegmentsSyncTask.execute(_segmentsData, true).then(res); }, _delay); }) : @@ -52,13 +53,15 @@ export function MySegmentsUpdateWorker(mySegmentsSyncTask: IMySegmentsSyncTask, return { /** - * Invoked by NotificationProcessor on MY_SEGMENTS_UPDATE event + * Invoked by NotificationProcessor on MY_(LARGE)_SEGMENTS_UPDATE notifications * - * @param {number} changeNumber change number of the MY_SEGMENTS_UPDATE notification - * @param {SegmentsData | undefined} segmentsData might be undefined + * @param changeNumber change number of the notification + * @param segmentsData data for KeyList or SegmentRemoval instant updates + * @param delay optional time to wait for BoundedFetchRequest or BoundedFetchRequest updates */ put(changeNumber: number, segmentsData?: MySegmentsData, delay?: number) { - if (changeNumber <= currentChangeNumber || changeNumber <= maxChangeNumber) return; + // Ignore event if it is outdated or if there is a pending fetch request (_delay is set) + if (changeNumber <= currentChangeNumber || changeNumber <= maxChangeNumber || _delay) return; maxChangeNumber = changeNumber; handleNewEvent = true; diff --git a/src/sync/streaming/UpdateWorkers/__tests__/MySegmentsUpdateWorker.spec.ts b/src/sync/streaming/UpdateWorkers/__tests__/MySegmentsUpdateWorker.spec.ts index 6726ed86..bcce94f3 100644 --- a/src/sync/streaming/UpdateWorkers/__tests__/MySegmentsUpdateWorker.spec.ts +++ b/src/sync/streaming/UpdateWorkers/__tests__/MySegmentsUpdateWorker.spec.ts @@ -128,15 +128,51 @@ describe('MySegmentsUpdateWorker', () => { test('stop', async () => { // setup const mySegmentsSyncTask = mySegmentsSyncTaskMock([false]); - Backoff.__TEST__BASE_MILLIS = 1; const mySegmentUpdateWorker = MySegmentsUpdateWorker(mySegmentsSyncTask, telemetryTracker); mySegmentUpdateWorker.put(100); + mySegmentUpdateWorker.stop(); + + await new Promise(res => setTimeout(res, 20)); // Wait to assert no more calls to `execute` after stopping + expect(mySegmentsSyncTask.execute).toBeCalledTimes(1); + mySegmentUpdateWorker.put(100, undefined, 10); mySegmentUpdateWorker.stop(); - await new Promise(res => setTimeout(res, 20)); // Wait to assert no more calls to `execute` after reseting + await new Promise(res => setTimeout(res, 20)); // Wait to assert no calls to `execute` after stopping (fetch request with delay is cleared) expect(mySegmentsSyncTask.execute).toBeCalledTimes(1); }); + test('put with delay', async () => { + // setup + const mySegmentsSyncTask = mySegmentsSyncTaskMock(); + const mySegmentUpdateWorker = MySegmentsUpdateWorker(mySegmentsSyncTask, telemetryTracker); + + // If a delayed fetch request is queued while another fetch request is waiting, it is discarded + mySegmentUpdateWorker.put(100, undefined, 50); + mySegmentUpdateWorker.put(150, undefined, 100); + + await new Promise(res => setTimeout(res, 60)); + expect(mySegmentsSyncTask.execute).toBeCalledTimes(1); + expect(mySegmentsSyncTask.execute).toHaveBeenLastCalledWith(undefined, true); + mySegmentsSyncTask.__resolveMySegmentsUpdaterCall(); // fetch success + + await new Promise(res => setTimeout(res, 60)); + expect(mySegmentsSyncTask.execute).toBeCalledTimes(1); + + // If an event with segmentData (i.e., an instant update) is queued while a delayed fetch request is waiting, the instant update is discarded + mySegmentUpdateWorker.put(200, undefined, 50); + await new Promise(res => setTimeout(res, 10)); + mySegmentUpdateWorker.put(230, ['some_segment']); + + await new Promise(res => setTimeout(res, 60)); + expect(mySegmentsSyncTask.execute).toBeCalledTimes(2); + expect(mySegmentsSyncTask.execute).toHaveBeenLastCalledWith(undefined, true); + mySegmentsSyncTask.__resolveMySegmentsUpdaterCall(); // fetch success + await new Promise(res => setTimeout(res)); + + mySegmentUpdateWorker.put(250, ['some_segment']); + expect(mySegmentsSyncTask.execute).toBeCalledTimes(3); + expect(mySegmentsSyncTask.execute).toHaveBeenLastCalledWith(['some_segment'], true); + }); }); From 271303c78f591e26b7d60ce71faaf0377d5972f1 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Thu, 25 Jul 2024 19:36:53 +0100 Subject: [PATCH 037/146] Polishing --- src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts | 1 + .../UpdateWorkers/__tests__/MySegmentsUpdateWorker.spec.ts | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts b/src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts index 5a912abf..42dd4dc6 100644 --- a/src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts +++ b/src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts @@ -74,6 +74,7 @@ export function MySegmentsUpdateWorker(mySegmentsSyncTask: IMySegmentsSyncTask, stop() { clearTimeout(_delayTimeoutID); + _delay = undefined; isHandlingEvent = false; backoff.reset(); } diff --git a/src/sync/streaming/UpdateWorkers/__tests__/MySegmentsUpdateWorker.spec.ts b/src/sync/streaming/UpdateWorkers/__tests__/MySegmentsUpdateWorker.spec.ts index bcce94f3..a374edec 100644 --- a/src/sync/streaming/UpdateWorkers/__tests__/MySegmentsUpdateWorker.spec.ts +++ b/src/sync/streaming/UpdateWorkers/__tests__/MySegmentsUpdateWorker.spec.ts @@ -136,7 +136,7 @@ describe('MySegmentsUpdateWorker', () => { await new Promise(res => setTimeout(res, 20)); // Wait to assert no more calls to `execute` after stopping expect(mySegmentsSyncTask.execute).toBeCalledTimes(1); - mySegmentUpdateWorker.put(100, undefined, 10); + mySegmentUpdateWorker.put(150, undefined, 10); mySegmentUpdateWorker.stop(); await new Promise(res => setTimeout(res, 20)); // Wait to assert no calls to `execute` after stopping (fetch request with delay is cleared) From b7fe46fca57171a34fd9a6e65eb4360695efe058 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Thu, 25 Jul 2024 19:58:18 +0100 Subject: [PATCH 038/146] rc --- package-lock.json | 4 ++-- package.json | 2 +- src/sync/streaming/SSEClient/index.ts | 10 ++++------ 3 files changed, 7 insertions(+), 9 deletions(-) diff --git a/package-lock.json b/package-lock.json index 67ca48ca..1011ba70 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "@splitsoftware/splitio-commons", - "version": "1.16.1-rc.4", + "version": "1.16.1-rc.5", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "@splitsoftware/splitio-commons", - "version": "1.16.1-rc.4", + "version": "1.16.1-rc.5", "license": "Apache-2.0", "dependencies": { "tslib": "^2.3.1" diff --git a/package.json b/package.json index 2c3e67aa..514d3a3d 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@splitsoftware/splitio-commons", - "version": "1.16.1-rc.4", + "version": "1.16.1-rc.5", "description": "Split JavaScript SDK common components", "main": "cjs/index.js", "module": "esm/index.js", diff --git a/src/sync/streaming/SSEClient/index.ts b/src/sync/streaming/SSEClient/index.ts index cb6c0a14..6615bfbf 100644 --- a/src/sync/streaming/SSEClient/index.ts +++ b/src/sync/streaming/SSEClient/index.ts @@ -76,12 +76,10 @@ export class SSEClient implements ISSEClient { open(authToken: IAuthTokenPushEnabled) { this.close(); // it closes connection if previously opened - const channelsQueryParam = Object.keys(authToken.channels).map( - function (channel) { - const params = CONTROL_CHANNEL_REGEX.test(channel) ? '[?occupancy=metrics.publishers]' : ''; - return encodeURIComponent(params + channel); - } - ).join(','); + const channelsQueryParam = Object.keys(authToken.channels).map((channel) => { + const params = CONTROL_CHANNEL_REGEX.test(channel) ? '[?occupancy=metrics.publishers]' : ''; + return encodeURIComponent(params + channel); + }).join(','); const url = `${this.streamingUrl}?channels=${channelsQueryParam}&accessToken=${authToken.token}&v=${ABLY_API_VERSION}&heartbeats=true`; // same results using `&heartbeats=false` this.connection = new this.eventSource!( From 293a8245201f6cbdbe1c9f829b979a154f7f453e Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Tue, 30 Jul 2024 20:40:24 +0100 Subject: [PATCH 039/146] Filter myLargeSegments channels if largeSegmentsEnabled is false --- .../SSEClient/__tests__/index.spec.ts | 37 +++++++++++++++---- src/sync/streaming/SSEClient/index.ts | 7 +++- 2 files changed, 36 insertions(+), 8 deletions(-) diff --git a/src/sync/streaming/SSEClient/__tests__/index.spec.ts b/src/sync/streaming/SSEClient/__tests__/index.spec.ts index 757229b9..8187e969 100644 --- a/src/sync/streaming/SSEClient/__tests__/index.spec.ts +++ b/src/sync/streaming/SSEClient/__tests__/index.spec.ts @@ -16,18 +16,18 @@ const EXPECTED_HEADERS = { SplitSDKVersion: settings.version }; -test('SSClient / instance creation throws error if EventSource is not provided', () => { +test('SSEClient / instance creation throws error if EventSource is not provided', () => { expect(() => { new SSEClient(settings); }).toThrow(Error); expect(() => { new SSEClient(settings, false, {}); }).toThrow(Error); expect(() => { new SSEClient(settings, false, { getEventSource: () => undefined }); }).toThrow(Error); }); -test('SSClient / instance creation success if EventSource is provided', () => { +test('SSEClient / instance creation success if EventSource is provided', () => { const instance = new SSEClient(settings, false, { getEventSource: () => EventSourceMock }); expect(instance.eventSource).toBe(EventSourceMock); }); -test('SSClient / setEventHandler, open and close methods', () => { +test('SSEClient / setEventHandler, open and close methods', () => { // instance event handler const handler = { handleOpen: jest.fn(), @@ -80,7 +80,7 @@ test('SSClient / setEventHandler, open and close methods', () => { }); -test('SSClient / open method: URL with metadata query params', () => { +test('SSEClient / open method: URL with metadata query params', () => { const instance = new SSEClient(settings, false, { getEventSource: () => EventSourceMock }); instance.open(authDataSample); @@ -91,7 +91,7 @@ test('SSClient / open method: URL with metadata query params', () => { expect(instance.connection.__eventSourceInitDict).toEqual({}); // No headers are passed for streaming connection }); -test('SSClient / open method: URL and metadata headers with IP and Hostname', () => { +test('SSEClient / open method: URL and metadata headers with IP and Hostname', () => { const settingsWithRuntime = { ...settings, @@ -113,7 +113,7 @@ test('SSClient / open method: URL and metadata headers with IP and Hostname', () }); // Headers are properly set for streaming connection }); -test('SSClient / open method: URL and metadata headers without IP and Hostname', () => { +test('SSEClient / open method: URL and metadata headers without IP and Hostname', () => { const instance = new SSEClient(settings, true, { getEventSource: () => EventSourceMock }); instance.open(authDataSample); @@ -122,7 +122,7 @@ test('SSClient / open method: URL and metadata headers without IP and Hostname', expect(instance.connection.__eventSourceInitDict).toEqual({ headers: EXPECTED_HEADERS }); // Headers are properly set for streaming connection }); -test('SSClient / open method: URL, metadata headers and options', () => { +test('SSEClient / open method: URL, metadata headers and options', () => { const platform = { getEventSource: jest.fn(() => EventSourceMock), getOptions: jest.fn(() => ({ withCredentials: true })) }; const instance = new SSEClient(settings, true, platform); @@ -135,3 +135,26 @@ test('SSClient / open method: URL, metadata headers and options', () => { expect(platform.getEventSource.mock.calls).toEqual([[settings]]); expect(platform.getOptions.mock.calls).toEqual([[settings]]); }); + +test('SSEClient / open method: largeSegmentsEnabled true', () => { + const authDataWithMyLargeSegmentsChannel = { + ...authDataSample, + channels: { ...authDataSample.channels, 'NzM2MDI5Mzc0_MzQyODU4NDUyNg==_myLargeSegments': ['subscribe'] }, + }; + + let instance = new SSEClient({ + ...settings, + sync: { largeSegmentsEnabled: false } + }, true, { getEventSource: () => EventSourceMock }); + + instance.open(authDataWithMyLargeSegmentsChannel); + expect(instance.connection.url).toBe(EXPECTED_URL); + + instance = new SSEClient({ + ...settings, + sync: { largeSegmentsEnabled: true } + }, true, { getEventSource: () => EventSourceMock }); + + instance.open(authDataWithMyLargeSegmentsChannel); + expect(instance.connection.url).toBe(EXPECTED_URL.replace('&accessToken', ',NzM2MDI5Mzc0_MzQyODU4NDUyNg%3D%3D_myLargeSegments&accessToken')); +}); diff --git a/src/sync/streaming/SSEClient/index.ts b/src/sync/streaming/SSEClient/index.ts index 6615bfbf..97218524 100644 --- a/src/sync/streaming/SSEClient/index.ts +++ b/src/sync/streaming/SSEClient/index.ts @@ -1,7 +1,7 @@ import { IPlatform } from '../../../sdkFactory/types'; import { IEventSourceConstructor } from '../../../services/types'; import { ISettings } from '../../../types'; -import { isString } from '../../../utils/lang'; +import { endsWith, isString } from '../../../utils/lang'; import { objectAssign } from '../../../utils/lang/objectAssign'; import { IAuthTokenPushEnabled } from '../AuthClient/types'; import { ISSEClient, ISseEventHandler } from './types'; @@ -42,6 +42,7 @@ export class SSEClient implements ISSEClient { useHeaders?: boolean; headers: Record; options?: object; + lse?: boolean; /** * SSEClient constructor. @@ -61,6 +62,7 @@ export class SSEClient implements ISSEClient { this.useHeaders = useHeaders; this.headers = buildSSEHeaders(settings); this.options = getOptions && getOptions(settings); + this.lse = settings.sync.largeSegmentsEnabled; } setEventHandler(handler: ISseEventHandler) { @@ -79,7 +81,10 @@ export class SSEClient implements ISSEClient { const channelsQueryParam = Object.keys(authToken.channels).map((channel) => { const params = CONTROL_CHANNEL_REGEX.test(channel) ? '[?occupancy=metrics.publishers]' : ''; return encodeURIComponent(params + channel); + }).filter(channel => { + return this.lse || !endsWith(channel, 'myLargeSegments'); }).join(','); + const url = `${this.streamingUrl}?channels=${channelsQueryParam}&accessToken=${authToken.token}&v=${ABLY_API_VERSION}&heartbeats=true`; // same results using `&heartbeats=false` this.connection = new this.eventSource!( From 5dda9de9d23f6acd19fd4a279a4914fb82996d37 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Tue, 30 Jul 2024 20:41:56 +0100 Subject: [PATCH 040/146] Update default FLAG_SPEC_VERSION to 1.2 --- src/utils/constants/index.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/utils/constants/index.ts b/src/utils/constants/index.ts index 41a14e0a..e11a2d77 100644 --- a/src/utils/constants/index.ts +++ b/src/utils/constants/index.ts @@ -106,7 +106,7 @@ export const DISABLED = 0; export const ENABLED = 1; export const PAUSED = 2; -export const FLAG_SPEC_VERSION = '1.1'; +export const FLAG_SPEC_VERSION = '1.2'; // Matcher types export const IN_SEGMENT = 'IN_SEGMENT'; From fbd7ee93b74d9a38f9b97c589d879b033fa26ab8 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Tue, 30 Jul 2024 20:42:19 +0100 Subject: [PATCH 041/146] rc --- package-lock.json | 4 ++-- package.json | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/package-lock.json b/package-lock.json index 1011ba70..c1e5b86d 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "@splitsoftware/splitio-commons", - "version": "1.16.1-rc.5", + "version": "1.16.1-rc.6", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "@splitsoftware/splitio-commons", - "version": "1.16.1-rc.5", + "version": "1.16.1-rc.6", "license": "Apache-2.0", "dependencies": { "tslib": "^2.3.1" diff --git a/package.json b/package.json index 514d3a3d..9988d119 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@splitsoftware/splitio-commons", - "version": "1.16.1-rc.5", + "version": "1.16.1-rc.6", "description": "Split JavaScript SDK common components", "main": "cjs/index.js", "module": "esm/index.js", From 12c7233bae01ca9e6fb1de06cc4ac4abe5a9efb7 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Wed, 31 Jul 2024 16:36:07 +0100 Subject: [PATCH 042/146] Update test --- src/storages/__tests__/KeyBuilder.spec.ts | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/src/storages/__tests__/KeyBuilder.spec.ts b/src/storages/__tests__/KeyBuilder.spec.ts index 890ca61a..e0494ec9 100644 --- a/src/storages/__tests__/KeyBuilder.spec.ts +++ b/src/storages/__tests__/KeyBuilder.spec.ts @@ -116,16 +116,16 @@ test('KEYS / latency and exception keys (telemetry)', () => { test('getStorageHash', () => { expect(getStorageHash({ core: { authorizationKey: '' }, - sync: { __splitFiltersValidation: { queryString: '&names=p1__split,p2__split' }, flagSpecVersion: '1.1' } - } as ISettings)).toBe('fdf7bd89'); + sync: { __splitFiltersValidation: { queryString: '&names=p1__split,p2__split' }, flagSpecVersion: '1.2' } + } as ISettings)).toBe('7ccd6b31'); expect(getStorageHash({ core: { authorizationKey: '' }, - sync: { __splitFiltersValidation: { queryString: '&names=p2__split,p3__split' }, flagSpecVersion: '1.1' } - } as ISettings)).toBe('ee4ec91'); + sync: { __splitFiltersValidation: { queryString: '&names=p2__split,p3__split' }, flagSpecVersion: '1.2' } + } as ISettings)).toBe('2a25d0e1'); expect(getStorageHash({ core: { authorizationKey: '' }, - sync: { __splitFiltersValidation: { queryString: null }, flagSpecVersion: '1.1' } - } as ISettings)).toBe('2a2c20bb'); + sync: { __splitFiltersValidation: { queryString: null }, flagSpecVersion: '1.2' } + } as ISettings)).toBe('db8943b4'); }); From 50e00b60800423166db86028a4a57ac15c141eda Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Thu, 1 Aug 2024 18:56:46 +0100 Subject: [PATCH 043/146] Updates in telemetry --- package-lock.json | 4 ++-- package.json | 2 +- src/sync/submitters/telemetrySubmitter.ts | 6 ++++-- src/sync/submitters/types.ts | 3 ++- 4 files changed, 9 insertions(+), 6 deletions(-) diff --git a/package-lock.json b/package-lock.json index c1e5b86d..c4d82dc1 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "@splitsoftware/splitio-commons", - "version": "1.16.1-rc.6", + "version": "1.16.1-rc.7", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "@splitsoftware/splitio-commons", - "version": "1.16.1-rc.6", + "version": "1.16.1-rc.7", "license": "Apache-2.0", "dependencies": { "tslib": "^2.3.1" diff --git a/package.json b/package.json index 9988d119..efc151a1 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@splitsoftware/splitio-commons", - "version": "1.16.1-rc.6", + "version": "1.16.1-rc.7", "description": "Split JavaScript SDK common components", "main": "cjs/index.js", "module": "esm/index.js", diff --git a/src/sync/submitters/telemetrySubmitter.ts b/src/sync/submitters/telemetrySubmitter.ts index 5b5d09bb..881cb5ed 100644 --- a/src/sync/submitters/telemetrySubmitter.ts +++ b/src/sync/submitters/telemetrySubmitter.ts @@ -71,17 +71,19 @@ export function telemetryCacheConfigAdapter(telemetry: ITelemetryCacheSync, sett pop(): TelemetryConfigStatsPayload { const { urls, scheduler } = settings; const isClientSide = settings.core.key !== undefined; + const largeSegmentsEnabled = isClientSide && settings.sync.largeSegmentsEnabled; const { flagSetsTotal, flagSetsIgnored } = getTelemetryFlagSetsStats(settings.sync.__splitFiltersValidation); return objectAssign(getTelemetryConfigStats(settings.mode, settings.storage.type), { sE: settings.streamingEnabled, - lE: isClientSide ? settings.sync.largeSegmentsEnabled : undefined, + lsE: largeSegmentsEnabled ? largeSegmentsEnabled : undefined, + wls: largeSegmentsEnabled ? settings.startup.waitForLargeSegments : undefined, rR: { sp: scheduler.featuresRefreshRate / 1000, se: isClientSide ? undefined : scheduler.segmentsRefreshRate / 1000, ms: isClientSide ? scheduler.segmentsRefreshRate / 1000 : undefined, - mls: isClientSide && settings.sync.largeSegmentsEnabled ? scheduler.largeSegmentsRefreshRate / 1000 : undefined, + mls: largeSegmentsEnabled ? scheduler.largeSegmentsRefreshRate / 1000 : undefined, im: scheduler.impressionsRefreshRate / 1000, ev: scheduler.eventsPushRate / 1000, te: scheduler.telemetryRefreshRate / 1000, diff --git a/src/sync/submitters/types.ts b/src/sync/submitters/types.ts index 8f63f76f..72e91b9d 100644 --- a/src/sync/submitters/types.ts +++ b/src/sync/submitters/types.ts @@ -231,7 +231,8 @@ export type TelemetryConfigStats = { // 'metrics/config' JSON request body export type TelemetryConfigStatsPayload = TelemetryConfigStats & { sE: boolean, // streamingEnabled - lE?: boolean, // largeSegmentsEnabled + lsE?: boolean, // largeSegmentsEnabled + wls?: boolean, // waitForLargeSegments rR: RefreshRates, // refreshRates uO: UrlOverrides, // urlOverrides iQ: number, // impressionsQueueSize From dcd20bddae9e78514af3d9bcbc40302a76163388 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Tue, 6 Aug 2024 17:43:41 +0100 Subject: [PATCH 044/146] Specialize a few argument types to avoid TS errors --- src/services/splitApi.ts | 2 +- src/services/splitHttpClient.ts | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/services/splitApi.ts b/src/services/splitApi.ts index 51f1a02a..f7e8857b 100644 --- a/src/services/splitApi.ts +++ b/src/services/splitApi.ts @@ -22,7 +22,7 @@ function userKeyToQueryParam(userKey: string) { */ export function splitApiFactory( settings: ISettings, - platform: IPlatform, + platform: Pick, telemetryTracker: ITelemetryTracker ): ISplitApi { diff --git a/src/services/splitHttpClient.ts b/src/services/splitHttpClient.ts index 75369efc..c53d4b7c 100644 --- a/src/services/splitHttpClient.ts +++ b/src/services/splitHttpClient.ts @@ -12,7 +12,7 @@ const messageNoFetch = 'Global fetch API is not available.'; * @param settings SDK settings, used to access authorizationKey, logger instance and metadata (SDK version, ip and hostname) to set additional headers * @param platform object containing environment-specific dependencies */ -export function splitHttpClientFactory(settings: ISettings, { getOptions, getFetch }: IPlatform): ISplitHttpClient { +export function splitHttpClientFactory(settings: ISettings, { getOptions, getFetch }: Pick): ISplitHttpClient { const { log, core: { authorizationKey }, version, runtime: { ip, hostname } } = settings; const options = getOptions && getOptions(settings); From 9d21c55148b5f2859214218f9794d1b832440844 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Tue, 6 Aug 2024 17:45:28 +0100 Subject: [PATCH 045/146] Handle logic for 'h' none --- ...ge.MY_LARGE_SEGMENTS_UPDATE.UNBOUNDED.1457552650000.json | 2 +- src/sync/streaming/SSEHandler/__tests__/index.spec.ts | 2 +- src/sync/streaming/SSEHandler/types.ts | 2 +- src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts | 2 +- src/sync/streaming/__tests__/pushManager.spec.ts | 6 +++--- src/sync/streaming/pushManager.ts | 2 ++ 6 files changed, 9 insertions(+), 7 deletions(-) diff --git a/src/__tests__/mocks/message.MY_LARGE_SEGMENTS_UPDATE.UNBOUNDED.1457552650000.json b/src/__tests__/mocks/message.MY_LARGE_SEGMENTS_UPDATE.UNBOUNDED.1457552650000.json index 8563135d..f6fa5982 100644 --- a/src/__tests__/mocks/message.MY_LARGE_SEGMENTS_UPDATE.UNBOUNDED.1457552650000.json +++ b/src/__tests__/mocks/message.MY_LARGE_SEGMENTS_UPDATE.UNBOUNDED.1457552650000.json @@ -1,4 +1,4 @@ { "type": "message", - "data": "{\"data\":\"{\\\"type\\\":\\\"MY_LARGE_SEGMENTS_UPDATE\\\",\\\"changeNumber\\\":1457552650000,\\\"largeSegments\\\":[],\\\"c\\\": 0,\\\"u\\\": 0,\\\"d\\\":\\\"\\\",\\\"i\\\":300,\\\"h\\\":0,\\\"s\\\":0}\"}" + "data": "{\"data\":\"{\\\"type\\\":\\\"MY_LARGE_SEGMENTS_UPDATE\\\",\\\"changeNumber\\\":1457552650000,\\\"largeSegments\\\":[],\\\"c\\\": 0,\\\"u\\\": 0,\\\"d\\\":\\\"\\\",\\\"i\\\":300,\\\"h\\\":1,\\\"s\\\":0}\"}" } \ No newline at end of file diff --git a/src/sync/streaming/SSEHandler/__tests__/index.spec.ts b/src/sync/streaming/SSEHandler/__tests__/index.spec.ts index 5c0e7093..75034c4c 100644 --- a/src/sync/streaming/SSEHandler/__tests__/index.spec.ts +++ b/src/sync/streaming/SSEHandler/__tests__/index.spec.ts @@ -173,7 +173,7 @@ test('`handlerMessage` for update notifications (NotificationProcessor) and stre sseHandler.handleMessage(segmentRemovalMessage); expect(pushEmitter.emit).toHaveBeenLastCalledWith(MY_SEGMENTS_UPDATE_V2, ...expectedParams); // must emit MY_SEGMENTS_UPDATE_V2 with the message parsed data - expectedParams = [{ type: 'MY_LARGE_SEGMENTS_UPDATE', changeNumber: 1457552650000, c: 0, d: '', u: 0, largeSegments: [], i: 300, h: 0, s: 0 }]; + expectedParams = [{ type: 'MY_LARGE_SEGMENTS_UPDATE', changeNumber: 1457552650000, c: 0, d: '', u: 0, largeSegments: [], i: 300, h: 1, s: 0 }]; sseHandler.handleMessage(largeSegmentUnboundedMessage); expect(pushEmitter.emit).toHaveBeenLastCalledWith(MY_LARGE_SEGMENTS_UPDATE, ...expectedParams); // must emit MY_SEGMENTS_UPDATE_V2 with the message parsed data diff --git a/src/sync/streaming/SSEHandler/types.ts b/src/sync/streaming/SSEHandler/types.ts index 19fb54e4..08e7b72a 100644 --- a/src/sync/streaming/SSEHandler/types.ts +++ b/src/sync/streaming/SSEHandler/types.ts @@ -43,7 +43,7 @@ export interface IMyLargeSegmentsUpdateData { d: string, u: UpdateStrategy, i?: number, // time interval in millis - h?: number, // hash function. 0 for murmur3_32, 1 for murmur3_64 + h?: number, // hash function s?: number, // seed for hash function } diff --git a/src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts b/src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts index 42dd4dc6..aa8d6482 100644 --- a/src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts +++ b/src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts @@ -36,7 +36,7 @@ export function MySegmentsUpdateWorker(mySegmentsSyncTask: IMySegmentsSyncTask, syncTask.then((result) => { if (!isHandlingEvent) return; // halt if `stop` has been called - if (result !== false) {// Unlike `Splits|SegmentsUpdateWorker`, we cannot use `mySegmentsCache.getChangeNumber` since `/mySegments` endpoint doesn't provide this value. + if (result !== false) { // Unlike `Splits|SegmentsUpdateWorker`, we cannot use `mySegmentsCache.getChangeNumber` since `/mySegments` endpoint doesn't provide this value. if (_segmentsData) telemetryTracker.trackUpdatesFromSSE(updateType); currentChangeNumber = Math.max(currentChangeNumber, currentMaxChangeNumber); // use `currentMaxChangeNumber`, in case that `maxChangeNumber` was updated during fetch. } diff --git a/src/sync/streaming/__tests__/pushManager.spec.ts b/src/sync/streaming/__tests__/pushManager.spec.ts index e4b86815..83ffb106 100644 --- a/src/sync/streaming/__tests__/pushManager.spec.ts +++ b/src/sync/streaming/__tests__/pushManager.spec.ts @@ -194,8 +194,8 @@ describe('pushManager in server-side', () => { }); test('getDelay', () => { - expect(getDelay({ i: 300, h: 0, s: 0 }, 'nicolas@split.io')).toBe(241); - expect(getDelay({ i: 60000, h: 0, s: 1 }, 'emi@split.io')).toBe(14389); - expect(getDelay({ i: 60000, h: 0, s: 0 }, 'emi@split.io')).toBe(24593); + expect(getDelay({ i: 300, h: 1, s: 0 }, 'nicolas@split.io')).toBe(241); + expect(getDelay({ i: 60000, h: 1, s: 1 }, 'emi@split.io')).toBe(14389); + expect(getDelay({ i: 60000, h: 1, s: 0 }, 'emi@split.io')).toBe(24593); expect(getDelay({}, 'emi@split.io')).toBe(24593); }); diff --git a/src/sync/streaming/pushManager.ts b/src/sync/streaming/pushManager.ts index f89db08f..f8363034 100644 --- a/src/sync/streaming/pushManager.ts +++ b/src/sync/streaming/pushManager.ts @@ -23,6 +23,8 @@ import { TOKEN_REFRESH, AUTH_REJECTION, MY_LARGE_SEGMENT, MY_SEGMENT } from '../ import { ISdkFactoryContextSync } from '../../sdkFactory/types'; export function getDelay(parsedData: Pick, matchingKey: string) { + if (parsedData.h === 0) return 0; + const interval = parsedData.i || 60000; const seed = parsedData.s || 0; From b952f32090fada1e6b3b0494b489a37206f7adb9 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Tue, 6 Aug 2024 17:46:02 +0100 Subject: [PATCH 046/146] rc --- package-lock.json | 4 ++-- package.json | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/package-lock.json b/package-lock.json index c4d82dc1..b87c6245 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "@splitsoftware/splitio-commons", - "version": "1.16.1-rc.7", + "version": "1.16.1-rc.8", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "@splitsoftware/splitio-commons", - "version": "1.16.1-rc.7", + "version": "1.16.1-rc.8", "license": "Apache-2.0", "dependencies": { "tslib": "^2.3.1" diff --git a/package.json b/package.json index efc151a1..d5fb0eeb 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@splitsoftware/splitio-commons", - "version": "1.16.1-rc.7", + "version": "1.16.1-rc.8", "description": "Split JavaScript SDK common components", "main": "cjs/index.js", "module": "esm/index.js", From 9052572d277e0dce020e069aa1e3924753addb72 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Wed, 7 Aug 2024 14:46:23 +0000 Subject: [PATCH 047/146] Update IMyLargeSegmentsResponse type --- src/dtos/types.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/dtos/types.ts b/src/dtos/types.ts index dbe181e5..15f7f8de 100644 --- a/src/dtos/types.ts +++ b/src/dtos/types.ts @@ -235,7 +235,7 @@ export interface IMySegmentsResponse { /** Interface of the parsed JSON response of `/myLargeSegments/{userKey}` */ export interface IMyLargeSegmentsResponse { myLargeSegments: string[], - changeNumber: number + till: number } /** Metadata internal type for storages */ From 5f6074ca21723d307009de1f5816c1dc5787c572 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Wed, 21 Aug 2024 00:53:54 +0200 Subject: [PATCH 048/146] Remove LS configs and unify endpoint --- package-lock.json | 4 +- package.json | 2 +- src/dtos/types.ts | 12 +-- src/logger/constants.ts | 1 + src/logger/messages/info.ts | 1 + .../__tests__/readinessManager.spec.ts | 44 +-------- src/readiness/readinessManager.ts | 14 ++- src/readiness/types.ts | 1 - src/services/__tests__/splitApi.spec.ts | 7 +- src/services/splitApi.ts | 7 +- src/services/types.ts | 1 - src/storages/AbstractSplitsCacheAsync.ts | 4 +- src/storages/AbstractSplitsCacheSync.ts | 10 ++- src/storages/KeyBuilder.ts | 3 - .../inLocalStorage/SplitsCacheInLocal.ts | 25 ++---- .../__tests__/SplitsCacheInLocal.spec.ts | 15 ++-- src/storages/inMemory/SplitsCacheInMemory.ts | 20 ++--- src/storages/types.ts | 11 +-- src/sync/__tests__/syncManagerOnline.spec.ts | 6 +- .../polling/fetchers/mySegmentsFetcher.ts | 11 +-- src/sync/polling/fetchers/types.ts | 4 +- src/sync/polling/pollingManagerCS.ts | 90 ++++++------------- .../polling/syncTasks/mySegmentsSyncTask.ts | 19 ++-- src/sync/polling/types.ts | 13 +-- .../polling/updaters/mySegmentsUpdater.ts | 29 +++--- .../SSEClient/__tests__/index.spec.ts | 23 ----- src/sync/streaming/SSEClient/index.ts | 7 +- .../UpdateWorkers/MySegmentsUpdateWorker.ts | 6 +- src/sync/streaming/pushManager.ts | 36 ++++---- src/sync/submitters/telemetrySubmitter.ts | 4 - src/sync/submitters/types.ts | 8 +- src/sync/syncManagerOnline.ts | 30 +++---- src/types.ts | 27 +----- src/utils/constants/index.ts | 1 - .../__tests__/index.spec.ts | 3 +- .../__tests__/settings.mocks.ts | 7 +- src/utils/settingsValidation/index.ts | 6 +- 37 files changed, 159 insertions(+), 353 deletions(-) diff --git a/package-lock.json b/package-lock.json index b87c6245..8019d143 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "@splitsoftware/splitio-commons", - "version": "1.16.1-rc.8", + "version": "1.16.1-rc.9", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "@splitsoftware/splitio-commons", - "version": "1.16.1-rc.8", + "version": "1.16.1-rc.9", "license": "Apache-2.0", "dependencies": { "tslib": "^2.3.1" diff --git a/package.json b/package.json index d5fb0eeb..f999f4e6 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@splitsoftware/splitio-commons", - "version": "1.16.1-rc.8", + "version": "1.16.1-rc.9", "description": "Split JavaScript SDK common components", "main": "cjs/index.js", "module": "esm/index.js", diff --git a/src/dtos/types.ts b/src/dtos/types.ts index 15f7f8de..7a71311d 100644 --- a/src/dtos/types.ts +++ b/src/dtos/types.ts @@ -227,15 +227,11 @@ export interface ISegmentChangesResponse { /** Interface of the parsed JSON response of `/mySegments/{userKey}` */ export interface IMySegmentsResponse { mySegments: { - id: string, + id?: string, name: string - }[] -} - -/** Interface of the parsed JSON response of `/myLargeSegments/{userKey}` */ -export interface IMyLargeSegmentsResponse { - myLargeSegments: string[], - till: number + }[], + myLargeSegments?: string[], + till?: number } /** Metadata internal type for storages */ diff --git a/src/logger/constants.ts b/src/logger/constants.ts index d7adf667..36f6a139 100644 --- a/src/logger/constants.ts +++ b/src/logger/constants.ts @@ -36,6 +36,7 @@ export const IMPRESSION = 102; export const IMPRESSION_QUEUEING = 103; export const NEW_SHARED_CLIENT = 104; export const NEW_FACTORY = 105; +export const POLLING_SMART_PAUSING = 106; export const POLLING_START = 107; export const POLLING_STOP = 108; export const SYNC_SPLITS_FETCH_RETRY = 109; diff --git a/src/logger/messages/info.ts b/src/logger/messages/info.ts index 23e659fe..94333bf6 100644 --- a/src/logger/messages/info.ts +++ b/src/logger/messages/info.ts @@ -19,6 +19,7 @@ export const codesInfo: [number, string][] = codesWarn.concat([ [c.USER_CONSENT_INITIAL, 'Starting the SDK with %s user consent. No data will be sent.'], // synchronizer + [c.POLLING_SMART_PAUSING, c.LOG_PREFIX_SYNC_POLLING + 'Turning segments data polling %s.'], [c.POLLING_START, c.LOG_PREFIX_SYNC_POLLING + 'Starting polling'], [c.POLLING_STOP, c.LOG_PREFIX_SYNC_POLLING + 'Stopping polling'], [c.SYNC_SPLITS_FETCH_RETRY, c.LOG_PREFIX_SYNC_SPLITS + 'Retrying download of feature flags #%s. Reason: %s'], diff --git a/src/readiness/__tests__/readinessManager.spec.ts b/src/readiness/__tests__/readinessManager.spec.ts index a935fd50..b8dc4846 100644 --- a/src/readiness/__tests__/readinessManager.spec.ts +++ b/src/readiness/__tests__/readinessManager.spec.ts @@ -7,20 +7,14 @@ import { ISettings } from '../../types'; const settings = { startup: { readyTimeout: 0, - waitForLargeSegments: false - }, - sync: { - largeSegmentEnabled: false } } as unknown as ISettings; const settingsWithTimeout = { - ...settings, startup: { - ...settings.startup, readyTimeout: 50 } -}; +} as unknown as ISettings; const statusFlagsCount = 5; @@ -279,39 +273,3 @@ test('READINESS MANAGER / Destroy before it was ready and timedout', (done) => { }, settingsWithTimeout.startup.readyTimeout * 1.5); }); - -test('READINESS MANAGER / with large segments', () => { - - [true, false].forEach(waitForLargeSegments => { - - const rm = readinessManagerFactory(EventEmitter, { - startup: { readyTimeout: 0, waitForLargeSegments }, - sync: { largeSegmentsEnabled: true } - } as unknown as ISettings); - - expect(rm.largeSegments).toBeDefined(); - - let counter = 0; - - rm.gate.on(SDK_READY, () => { - expect(rm.isReady()).toBe(true); - counter++; - }); - - rm.splits.emit(SDK_SPLITS_ARRIVED); - rm.segments.emit(SDK_SEGMENTS_ARRIVED); - - expect(counter).toBe(waitForLargeSegments ? 0 : 1); // should be called if waitForLargeSegments is false - rm.largeSegments!.emit(SDK_SEGMENTS_ARRIVED); - - expect(counter).toBe(1); // should be called - - rm.splits.emit(SDK_SPLITS_ARRIVED); - rm.segments.emit(SDK_SEGMENTS_ARRIVED); - rm.splits.emit(SDK_SPLITS_ARRIVED); - rm.segments.emit(SDK_SEGMENTS_ARRIVED); - if (rm.largeSegments) rm.largeSegments.emit(SDK_SEGMENTS_ARRIVED); - - expect(counter).toBe(1); // should be called once - }); -}); diff --git a/src/readiness/readinessManager.ts b/src/readiness/readinessManager.ts index 400c8cca..1257c3a6 100644 --- a/src/readiness/readinessManager.ts +++ b/src/readiness/readinessManager.ts @@ -18,8 +18,10 @@ function splitsEventEmitterFactory(EventEmitter: new () => IEventEmitter): ISpli return splitsEventEmitter; } -function segmentsEventEmitterFactory(EventEmitter: new () => IEventEmitter, segmentsArrived = false): ISegmentsEventEmitter { - const segmentsEventEmitter = objectAssign(new EventEmitter(), { segmentsArrived }); +function segmentsEventEmitterFactory(EventEmitter: new () => IEventEmitter): ISegmentsEventEmitter { + const segmentsEventEmitter = objectAssign(new EventEmitter(), { + segmentsArrived: false + }); segmentsEventEmitter.once(SDK_SEGMENTS_ARRIVED, () => { segmentsEventEmitter.segmentsArrived = true; }); @@ -34,10 +36,9 @@ export function readinessManagerFactory( settings: ISettings, splits: ISplitsEventEmitter = splitsEventEmitterFactory(EventEmitter)): IReadinessManager { - const { startup: { readyTimeout, waitForLargeSegments }, sync: { largeSegmentsEnabled } } = settings; + const readyTimeout = settings.startup.readyTimeout; const segments: ISegmentsEventEmitter = segmentsEventEmitterFactory(EventEmitter); - const largeSegments = largeSegmentsEnabled ? segmentsEventEmitterFactory(EventEmitter, !waitForLargeSegments) : undefined; const gate: IReadinessEventEmitter = new EventEmitter(); // emit SDK_READY_FROM_CACHE @@ -63,7 +64,6 @@ export function readinessManagerFactory( let isReady = false; splits.on(SDK_SPLITS_ARRIVED, checkIsReadyOrUpdate); segments.on(SDK_SEGMENTS_ARRIVED, checkIsReadyOrUpdate); - if (largeSegments) largeSegments.on(SDK_SEGMENTS_ARRIVED, checkIsReadyOrUpdate); let isDestroyed = false; @@ -89,7 +89,7 @@ export function readinessManagerFactory( setTimeout(() => { throw e; }, 0); } } else { - if (splits.splitsArrived && segments.segmentsArrived && (!largeSegments || largeSegments.segmentsArrived)) { + if (splits.splitsArrived && segments.segmentsArrived) { clearTimeout(readyTimeoutId); isReady = true; try { @@ -107,7 +107,6 @@ export function readinessManagerFactory( return { splits, segments, - largeSegments, gate, shared() { @@ -126,7 +125,6 @@ export function readinessManagerFactory( isDestroyed = true; segments.removeAllListeners(); - if (largeSegments) largeSegments.removeAllListeners(); gate.removeAllListeners(); clearTimeout(readyTimeoutId); diff --git a/src/readiness/types.ts b/src/readiness/types.ts index c5cd9b0f..21793d97 100644 --- a/src/readiness/types.ts +++ b/src/readiness/types.ts @@ -45,7 +45,6 @@ export interface IReadinessManager { /** Event emitters */ splits: ISplitsEventEmitter, segments: ISegmentsEventEmitter, - largeSegments?: ISegmentsEventEmitter, // Undefined if largeSegmentsEnabled or waitForLargeSegments are false gate: IReadinessEventEmitter, /** Readiness status */ diff --git a/src/services/__tests__/splitApi.spec.ts b/src/services/__tests__/splitApi.spec.ts index d5550f93..07d6dec4 100644 --- a/src/services/__tests__/splitApi.spec.ts +++ b/src/services/__tests__/splitApi.spec.ts @@ -60,12 +60,7 @@ describe('splitApi', () => { splitApi.postMetricsUsage('fake-body'); assertHeaders(settings, fetchMock.mock.calls[8][1].headers); - splitApi.fetchMyLargeSegments('userKey'); - [url, { headers }] = fetchMock.mock.calls[9]; - assertHeaders(settings, headers); - expect(url).toBe('sdk/myLargeSegments/userKey'); - - expect(telemetryTrackerMock.trackHttp).toBeCalledTimes(10); + expect(telemetryTrackerMock.trackHttp).toBeCalledTimes(9); telemetryTrackerMock.trackHttp.mockClear(); fetchMock.mockClear(); diff --git a/src/services/splitApi.ts b/src/services/splitApi.ts index f7e8857b..a5526081 100644 --- a/src/services/splitApi.ts +++ b/src/services/splitApi.ts @@ -4,7 +4,7 @@ import { splitHttpClientFactory } from './splitHttpClient'; import { ISplitApi } from './types'; import { objectAssign } from '../utils/lang/objectAssign'; import { ITelemetryTracker } from '../trackers/types'; -import { SPLITS, IMPRESSIONS, IMPRESSIONS_COUNT, EVENTS, TELEMETRY, TOKEN, SEGMENT, MY_SEGMENT, MY_LARGE_SEGMENT } from '../utils/constants'; +import { SPLITS, IMPRESSIONS, IMPRESSIONS_COUNT, EVENTS, TELEMETRY, TOKEN, SEGMENT, MY_SEGMENT } from '../utils/constants'; import { ERROR_TOO_MANY_SETS } from '../logger/constants'; const noCacheHeaderOptions = { headers: { 'Cache-Control': 'no-cache' } }; @@ -78,11 +78,6 @@ export function splitApiFactory( return splitHttpClient(url, noCache ? noCacheHeaderOptions : undefined, telemetryTracker.trackHttp(MY_SEGMENT)); }, - fetchMyLargeSegments(userMatchingKey: string, noCache?: boolean) { - const url = `${urls.sdk}/myLargeSegments/${encodeURIComponent(userMatchingKey)}`; - return splitHttpClient(url, noCache ? noCacheHeaderOptions : undefined, telemetryTracker.trackHttp(MY_LARGE_SEGMENT)); - }, - /** * Post events. * diff --git a/src/services/types.ts b/src/services/types.ts index a5dfde2a..116ccec5 100644 --- a/src/services/types.ts +++ b/src/services/types.ts @@ -62,7 +62,6 @@ export interface ISplitApi { fetchSplitChanges: IFetchSplitChanges fetchSegmentChanges: IFetchSegmentChanges fetchMySegments: IFetchMySegments - fetchMyLargeSegments: IFetchMySegments postEventsBulk: IPostEventsBulk postUniqueKeysBulkCs: IPostUniqueKeysBulkCs postUniqueKeysBulkSs: IPostUniqueKeysBulkSs diff --git a/src/storages/AbstractSplitsCacheAsync.ts b/src/storages/AbstractSplitsCacheAsync.ts index 4abb6e34..9e4e136c 100644 --- a/src/storages/AbstractSplitsCacheAsync.ts +++ b/src/storages/AbstractSplitsCacheAsync.ts @@ -22,9 +22,9 @@ export abstract class AbstractSplitsCacheAsync implements ISplitsCacheAsync { abstract trafficTypeExists(trafficType: string): Promise abstract clear(): Promise - // @TODO revisit segment-related methods ('usesMatcher', 'getRegisteredSegments', 'registerSegments') + // @TODO revisit segment-related methods ('usesSegments', 'getRegisteredSegments', 'registerSegments') // noop, just keeping the interface. This is used by standalone client-side API only, and so only implemented by InMemory and InLocalStorage. - usesMatcher(): Promise { + usesSegments(): Promise { return Promise.resolve(true); } diff --git a/src/storages/AbstractSplitsCacheSync.ts b/src/storages/AbstractSplitsCacheSync.ts index 1d33eb93..461056ff 100644 --- a/src/storages/AbstractSplitsCacheSync.ts +++ b/src/storages/AbstractSplitsCacheSync.ts @@ -2,6 +2,7 @@ import { ISplitsCacheSync } from './types'; import { ISplit } from '../dtos/types'; import { objectAssign } from '../utils/lang/objectAssign'; import { ISet } from '../utils/lang/sets'; +import { IN_SEGMENT, IN_LARGE_SEGMENT } from '../utils/constants'; /** * This class provides a skeletal implementation of the ISplitsCacheSync interface @@ -43,7 +44,7 @@ export abstract class AbstractSplitsCacheSync implements ISplitsCacheSync { abstract trafficTypeExists(trafficType: string): boolean - abstract usesMatcher(matcherType: string): boolean + abstract usesSegments(): boolean abstract clear(): void @@ -85,15 +86,16 @@ export abstract class AbstractSplitsCacheSync implements ISplitsCacheSync { /** * Given a parsed split, it returns a boolean flagging if its conditions use segments matchers (rules & whitelists). - * This util is intended to simplify the implementation of `splitsCache::usesMatcher` method + * This util is intended to simplify the implementation of `splitsCache::usesSegments` method */ -export function usesMatcher(split: ISplit, matcherType: string) { +export function usesSegments(split: ISplit) { const conditions = split.conditions || []; for (let i = 0; i < conditions.length; i++) { const matchers = conditions[i].matcherGroup.matchers; for (let j = 0; j < matchers.length; j++) { - if (matchers[j].matcherType === matcherType) return true; + const matcher = matchers[j].matcherType; + if (matcher === IN_SEGMENT || matcher === IN_LARGE_SEGMENT) return true; } } diff --git a/src/storages/KeyBuilder.ts b/src/storages/KeyBuilder.ts index 60ab7af8..e70b251b 100644 --- a/src/storages/KeyBuilder.ts +++ b/src/storages/KeyBuilder.ts @@ -51,9 +51,6 @@ export class KeyBuilder { buildSplitsWithSegmentCountKey() { return `${this.prefix}.splits.usingSegments`; } - buildSplitsWithLargeSegmentCountKey() { - return `${this.prefix}.splits.usingLargeSegments`; - } buildSegmentNameKey(segmentName: string) { return `${this.prefix}.segment.${segmentName}`; diff --git a/src/storages/inLocalStorage/SplitsCacheInLocal.ts b/src/storages/inLocalStorage/SplitsCacheInLocal.ts index 30f47f67..ccd4859f 100644 --- a/src/storages/inLocalStorage/SplitsCacheInLocal.ts +++ b/src/storages/inLocalStorage/SplitsCacheInLocal.ts @@ -1,5 +1,5 @@ import { ISplit } from '../../dtos/types'; -import { AbstractSplitsCacheSync, usesMatcher } from '../AbstractSplitsCacheSync'; +import { AbstractSplitsCacheSync, usesSegments } from '../AbstractSplitsCacheSync'; import { isFiniteNumber, toNumber, isNaNNumber } from '../../utils/lang'; import { KeyBuilderCS } from '../KeyBuilderCS'; import { ILogger } from '../../logger/types'; @@ -7,7 +7,6 @@ import { LOG_PREFIX } from './constants'; import { ISet, _Set, setToArray } from '../../utils/lang/sets'; import { ISettings } from '../../types'; import { getStorageHash } from '../KeyBuilder'; -import { IN_LARGE_SEGMENT, IN_SEGMENT } from '../../utils/constants'; /** * ISplitsCacheSync implementation that stores split definitions in browser LocalStorage. @@ -51,15 +50,10 @@ export class SplitsCacheInLocal extends AbstractSplitsCacheSync { const ttKey = this.keys.buildTrafficTypeKey(split.trafficTypeName); this._decrementCount(ttKey); - if (usesMatcher(split, IN_SEGMENT)) { + if (usesSegments(split)) { const segmentsCountKey = this.keys.buildSplitsWithSegmentCountKey(); this._decrementCount(segmentsCountKey); } - - if (usesMatcher(split, IN_LARGE_SEGMENT)) { - const segmentsCountKey = this.keys.buildSplitsWithLargeSegmentCountKey(); - this._decrementCount(segmentsCountKey); - } } } catch (e) { this.log.error(LOG_PREFIX + e); @@ -73,17 +67,11 @@ export class SplitsCacheInLocal extends AbstractSplitsCacheSync { // @ts-expect-error localStorage.setItem(ttKey, toNumber(localStorage.getItem(ttKey)) + 1); - if (usesMatcher(split, IN_SEGMENT)) { + if (usesSegments(split)) { const segmentsCountKey = this.keys.buildSplitsWithSegmentCountKey(); // @ts-expect-error localStorage.setItem(segmentsCountKey, toNumber(localStorage.getItem(segmentsCountKey)) + 1); } - - if (usesMatcher(split, IN_LARGE_SEGMENT)) { - const segmentsCountKey = this.keys.buildSplitsWithLargeSegmentCountKey(); - // @ts-expect-error - localStorage.setItem(segmentsCountKey, toNumber(localStorage.getItem(segmentsCountKey)) + 1); - } } } catch (e) { this.log.error(LOG_PREFIX + e); @@ -215,14 +203,11 @@ export class SplitsCacheInLocal extends AbstractSplitsCacheSync { return isFiniteNumber(ttCount) && ttCount > 0; } - usesMatcher(matcherType: string) { + usesSegments() { // If cache hasn't been synchronized with the cloud, assume we need them. if (!this.hasSync) return true; - const storedCount = localStorage.getItem(matcherType === IN_SEGMENT ? - this.keys.buildSplitsWithSegmentCountKey() : - this.keys.buildSplitsWithLargeSegmentCountKey() - ); + const storedCount = localStorage.getItem(this.keys.buildSplitsWithSegmentCountKey()); const splitsWithSegmentsCount = storedCount === null ? 0 : toNumber(storedCount); if (isFiniteNumber(splitsWithSegmentsCount)) { diff --git a/src/storages/inLocalStorage/__tests__/SplitsCacheInLocal.spec.ts b/src/storages/inLocalStorage/__tests__/SplitsCacheInLocal.spec.ts index bf857888..732ca8b7 100644 --- a/src/storages/inLocalStorage/__tests__/SplitsCacheInLocal.spec.ts +++ b/src/storages/inLocalStorage/__tests__/SplitsCacheInLocal.spec.ts @@ -4,7 +4,6 @@ import { splitWithUserTT, splitWithAccountTT, splitWithAccountTTAndUsesSegments, import { ISplit } from '../../../dtos/types'; import { _Set } from '../../../utils/lang/sets'; import { fullSettings } from '../../../utils/settingsValidation/__tests__/settings.mocks'; -import { IN_SEGMENT } from '../../../utils/constants'; test('SPLIT CACHE / LocalStorage', () => { @@ -142,26 +141,26 @@ test('SPLIT CACHE / LocalStorage / killLocally', () => { }); -test('SPLIT CACHE / LocalStorage / usesMatcher', () => { +test('SPLIT CACHE / LocalStorage / usesSegments', () => { const cache = new SplitsCacheInLocal(fullSettings, new KeyBuilderCS('SPLITIO', 'user')); - expect(cache.usesMatcher(IN_SEGMENT)).toBe(true); // true initially, until data is synchronized + expect(cache.usesSegments()).toBe(true); // true initially, until data is synchronized cache.setChangeNumber(1); // to indicate that data has been synced. cache.addSplits([['split1', splitWithUserTT], ['split2', splitWithAccountTT],]); - expect(cache.usesMatcher(IN_SEGMENT)).toBe(false); // 0 splits using segments + expect(cache.usesSegments()).toBe(false); // 0 splits using segments cache.addSplit('split3', splitWithAccountTTAndUsesSegments); - expect(cache.usesMatcher(IN_SEGMENT)).toBe(true); // 1 split using segments + expect(cache.usesSegments()).toBe(true); // 1 split using segments cache.addSplit('split4', splitWithAccountTTAndUsesSegments); - expect(cache.usesMatcher(IN_SEGMENT)).toBe(true); // 2 splits using segments + expect(cache.usesSegments()).toBe(true); // 2 splits using segments cache.removeSplit('split3'); - expect(cache.usesMatcher(IN_SEGMENT)).toBe(true); // 1 split using segments + expect(cache.usesSegments()).toBe(true); // 1 split using segments cache.removeSplit('split4'); - expect(cache.usesMatcher(IN_SEGMENT)).toBe(false); // 0 splits using segments + expect(cache.usesSegments()).toBe(false); // 0 splits using segments }); test('SPLIT CACHE / LocalStorage / flag set cache tests', () => { diff --git a/src/storages/inMemory/SplitsCacheInMemory.ts b/src/storages/inMemory/SplitsCacheInMemory.ts index b8f8c06b..9294cc43 100644 --- a/src/storages/inMemory/SplitsCacheInMemory.ts +++ b/src/storages/inMemory/SplitsCacheInMemory.ts @@ -1,8 +1,7 @@ import { ISplit, ISplitFiltersValidation } from '../../dtos/types'; -import { AbstractSplitsCacheSync, usesMatcher } from '../AbstractSplitsCacheSync'; +import { AbstractSplitsCacheSync, usesSegments } from '../AbstractSplitsCacheSync'; import { isFiniteNumber } from '../../utils/lang'; import { ISet, _Set } from '../../utils/lang/sets'; -import { IN_LARGE_SEGMENT, IN_SEGMENT } from '../../utils/constants'; /** * Default ISplitsCacheSync implementation that stores split definitions in memory. @@ -15,7 +14,6 @@ export class SplitsCacheInMemory extends AbstractSplitsCacheSync { private ttCache: Record = {}; private changeNumber: number = -1; private segmentsCount: number = 0; - private largeSegmentsCount: number = 0; private flagSetsCache: Record> = {}; constructor(splitFiltersValidation?: ISplitFiltersValidation) { @@ -28,7 +26,6 @@ export class SplitsCacheInMemory extends AbstractSplitsCacheSync { this.ttCache = {}; this.changeNumber = -1; this.segmentsCount = 0; - this.largeSegmentsCount = 0; } addSplit(name: string, split: ISplit): boolean { @@ -41,9 +38,8 @@ export class SplitsCacheInMemory extends AbstractSplitsCacheSync { this.removeFromFlagSets(previousSplit.name, previousSplit.sets); - // Substract from segments count for the previous version of this Split. - if (usesMatcher(previousSplit, IN_SEGMENT)) this.segmentsCount--; - if (usesMatcher(previousSplit, IN_LARGE_SEGMENT)) this.largeSegmentsCount--; + // Subtract from segments count for the previous version of this Split + if (usesSegments(previousSplit)) this.segmentsCount--; } if (split) { @@ -55,8 +51,7 @@ export class SplitsCacheInMemory extends AbstractSplitsCacheSync { this.addToFlagSets(split); // Add to segments count for the new version of the Split - if (usesMatcher(split, IN_SEGMENT)) this.segmentsCount++; - if (usesMatcher(split, IN_LARGE_SEGMENT)) this.largeSegmentsCount++; + if (usesSegments(split)) this.segmentsCount++; return true; } else { @@ -76,8 +71,7 @@ export class SplitsCacheInMemory extends AbstractSplitsCacheSync { this.removeFromFlagSets(split.name, split.sets); // Update the segments count. - if (usesMatcher(split, IN_SEGMENT)) this.segmentsCount--; - if (usesMatcher(split, IN_LARGE_SEGMENT)) this.largeSegmentsCount--; + if (usesSegments(split)) this.segmentsCount--; return true; } else { @@ -106,8 +100,8 @@ export class SplitsCacheInMemory extends AbstractSplitsCacheSync { return isFiniteNumber(this.ttCache[trafficType]) && this.ttCache[trafficType] > 0; } - usesMatcher(matcherType: string): boolean { - return this.getChangeNumber() === -1 || (matcherType === IN_SEGMENT ? this.segmentsCount > 0 : this.largeSegmentsCount > 0); + usesSegments(): boolean { + return this.getChangeNumber() === -1 || this.segmentsCount > 0; } getNamesByFlagSets(flagSets: string[]): ISet[] { diff --git a/src/storages/types.ts b/src/storages/types.ts index ed3e6b35..b0ead7f9 100644 --- a/src/storages/types.ts +++ b/src/storages/types.ts @@ -204,8 +204,8 @@ export interface ISplitsCacheBase { getSplitNames(): MaybeThenable, // should never reject or throw an exception. Instead return true by default, asssuming the TT might exist. trafficTypeExists(trafficType: string): MaybeThenable, - // only for Client-Side. Returns true if the storage is not synchronized yet (getChangeNumber() === 1) or contains a FF using the given matcher - usesMatcher(matcherType: string): MaybeThenable, + // only for Client-Side. Returns true if the storage is not synchronized yet (getChangeNumber() === -1) or contains a FF using segments or large segments + usesSegments(): MaybeThenable, clear(): MaybeThenable, // should never reject or throw an exception. Instead return false by default, to avoid emitting SDK_READY_FROM_CACHE. checkCache(): MaybeThenable, @@ -223,7 +223,7 @@ export interface ISplitsCacheSync extends ISplitsCacheBase { getAll(): ISplit[], getSplitNames(): string[], trafficTypeExists(trafficType: string): boolean, - usesMatcher(matcherType: string): boolean, + usesSegments(): boolean, clear(): void, checkCache(): boolean, killLocally(name: string, defaultTreatment: string, changeNumber: number): boolean, @@ -240,7 +240,7 @@ export interface ISplitsCacheAsync extends ISplitsCacheBase { getAll(): Promise, getSplitNames(): Promise, trafficTypeExists(trafficType: string): Promise, - usesMatcher(matcherType: string): Promise, + usesSegments(): Promise, clear(): Promise, checkCache(): Promise, killLocally(name: string, defaultTreatment: string, changeNumber: number): Promise, @@ -270,7 +270,7 @@ export interface ISegmentsCacheSync extends ISegmentsCacheBase { getKeysCount(): number // only used for telemetry setChangeNumber(name: string, changeNumber: number): boolean getChangeNumber(name: string): number - resetSegments(names: string[]): boolean // only for Sync Client-Side + resetSegments(names: string[], changeNumber?: number): boolean // only for Sync Client-Side clear(): void } @@ -478,6 +478,7 @@ export interface IStorageSync extends IStorageBase< ITelemetryCacheSync, IUniqueKeysCacheSync > { + // Defined in client-side largeSegments?: ISegmentsCacheSync, } diff --git a/src/sync/__tests__/syncManagerOnline.spec.ts b/src/sync/__tests__/syncManagerOnline.spec.ts index bb0bd72b..a83568ef 100644 --- a/src/sync/__tests__/syncManagerOnline.spec.ts +++ b/src/sync/__tests__/syncManagerOnline.spec.ts @@ -12,7 +12,7 @@ jest.mock('../submitters/submitterManager', () => { // Mocked storageManager const storageManagerMock = { splits: { - usesMatcher: () => false + usesSegments: () => false } }; @@ -29,9 +29,7 @@ const pollingManagerMock = { start: jest.fn(), stop: jest.fn(), isRunning: jest.fn(), - add: jest.fn(() => ({ - msSyncTask: { isRunning: () => true } - })), + add: jest.fn(() => { return { isRunning: () => true }; }), get: jest.fn() }; diff --git a/src/sync/polling/fetchers/mySegmentsFetcher.ts b/src/sync/polling/fetchers/mySegmentsFetcher.ts index 9b342c75..0d09cc36 100644 --- a/src/sync/polling/fetchers/mySegmentsFetcher.ts +++ b/src/sync/polling/fetchers/mySegmentsFetcher.ts @@ -1,5 +1,5 @@ import { IFetchMySegments, IResponse } from '../../../services/types'; -import { IMySegmentsResponse, IMyLargeSegmentsResponse } from '../../../dtos/types'; +import { IMySegmentsResponse } from '../../../dtos/types'; import { IMySegmentsFetcher } from './types'; /** @@ -13,19 +13,14 @@ export function mySegmentsFetcherFactory(fetchMySegments: IFetchMySegments): IMy noCache?: boolean, // Optional decorator for `fetchMySegments` promise, such as timeout or time tracker decorator?: (promise: Promise) => Promise - ): Promise { + ): Promise { let mySegmentsPromise = fetchMySegments(userMatchingKey, noCache); if (decorator) mySegmentsPromise = decorator(mySegmentsPromise); // Extract segment names return mySegmentsPromise - .then(resp => resp.json()) - .then((json: IMySegmentsResponse | IMyLargeSegmentsResponse) => { - return (json as IMySegmentsResponse).mySegments ? - (json as IMySegmentsResponse).mySegments.map((segment) => segment.name) : - (json as IMyLargeSegmentsResponse).myLargeSegments; - }); + .then(resp => resp.json()); }; } diff --git a/src/sync/polling/fetchers/types.ts b/src/sync/polling/fetchers/types.ts index 19ccd7bb..0e06efee 100644 --- a/src/sync/polling/fetchers/types.ts +++ b/src/sync/polling/fetchers/types.ts @@ -1,4 +1,4 @@ -import { ISplitChangesResponse, ISegmentChangesResponse } from '../../../dtos/types'; +import { ISplitChangesResponse, ISegmentChangesResponse, IMySegmentsResponse } from '../../../dtos/types'; import { IResponse } from '../../../services/types'; export type ISplitChangesFetcher = ( @@ -20,4 +20,4 @@ export type IMySegmentsFetcher = ( userMatchingKey: string, noCache?: boolean, decorator?: (promise: Promise) => Promise -) => Promise +) => Promise diff --git a/src/sync/polling/pollingManagerCS.ts b/src/sync/polling/pollingManagerCS.ts index a2d6ef7f..2bfbd95e 100644 --- a/src/sync/polling/pollingManagerCS.ts +++ b/src/sync/polling/pollingManagerCS.ts @@ -6,9 +6,8 @@ import { mySegmentsSyncTaskFactory } from './syncTasks/mySegmentsSyncTask'; import { splitsSyncTaskFactory } from './syncTasks/splitsSyncTask'; import { getMatching } from '../../utils/key'; import { SDK_SPLITS_ARRIVED, SDK_SEGMENTS_ARRIVED } from '../../readiness/constants'; -import { POLLING_START, POLLING_STOP } from '../../logger/constants'; +import { POLLING_SMART_PAUSING, POLLING_START, POLLING_STOP } from '../../logger/constants'; import { ISdkFactoryContextSync } from '../../sdkFactory/types'; -import { IN_LARGE_SEGMENT, IN_SEGMENT } from '../../utils/constants'; /** * Expose start / stop mechanism for polling data from services. @@ -23,92 +22,62 @@ export function pollingManagerCSFactory( const splitsSyncTask = splitsSyncTaskFactory(splitApi.fetchSplitChanges, storage, readiness, settings, true); - // Map of matching keys to their corresponding MySegmentsSyncTask for segments and large segments. - const mySegmentsSyncTasks: Record = {}; + // Map of matching keys to their corresponding MySegmentsSyncTask. + const mySegmentsSyncTasks: Record = {}; const matchingKey = getMatching(settings.core.key); - const { msSyncTask, mlsSyncTask } = add(matchingKey, readiness, storage); + const mySegmentsSyncTask = add(matchingKey, readiness, storage); function startMySegmentsSyncTasks() { - const splitsHaveSegments = storage.splits.usesMatcher(IN_SEGMENT); - const splitsHaveLargeSegments = storage.splits.usesMatcher(IN_LARGE_SEGMENT); - - forOwn(mySegmentsSyncTasks, ({ msSyncTask, mlsSyncTask }) => { - if (splitsHaveSegments) msSyncTask.start(); - else msSyncTask.stop(); // smart pausing - - if (mlsSyncTask) { - if (splitsHaveLargeSegments) mlsSyncTask.start(); - else mlsSyncTask.stop(); // smart pausing - } + forOwn(mySegmentsSyncTasks, (mySegmentsSyncTask) => { + mySegmentsSyncTask.start(); }); } function stopMySegmentsSyncTasks() { - forOwn(mySegmentsSyncTasks, ({ msSyncTask, mlsSyncTask }) => { - msSyncTask.stop(); - mlsSyncTask && mlsSyncTask.stop(); + forOwn(mySegmentsSyncTasks, (mySegmentsSyncTask) => { + if (mySegmentsSyncTask.isRunning()) mySegmentsSyncTask.stop(); }); } + // smart pausing readiness.splits.on(SDK_SPLITS_ARRIVED, () => { - if (splitsSyncTask.isRunning()) startMySegmentsSyncTasks(); + if (!splitsSyncTask.isRunning()) return; // noop if not doing polling + const splitsHaveSegments = storage.splits.usesSegments(); + if (splitsHaveSegments !== mySegmentsSyncTask.isRunning()) { + log.info(POLLING_SMART_PAUSING, [splitsHaveSegments ? 'ON' : 'OFF']); + if (splitsHaveSegments) { + startMySegmentsSyncTasks(); + } else { + stopMySegmentsSyncTasks(); + } + } }); function add(matchingKey: string, readiness: IReadinessManager, storage: IStorageSync) { - const msSyncTask = mySegmentsSyncTaskFactory( - splitApi.fetchMySegments, - storage.segments, - () => { if (storage.splits.usesMatcher(IN_SEGMENT)) readiness.segments.emit(SDK_SEGMENTS_ARRIVED); }, - settings, - matchingKey, - settings.scheduler.segmentsRefreshRate, - 'mySegmentsUpdater' - ); - - let mlsSyncTask; - if (settings.sync.largeSegmentsEnabled) { - mlsSyncTask = mySegmentsSyncTaskFactory( - splitApi.fetchMyLargeSegments, - storage.largeSegments!, - () => { if (readiness.largeSegments && storage.splits.usesMatcher(IN_LARGE_SEGMENT)) readiness.largeSegments.emit(SDK_SEGMENTS_ARRIVED); }, - settings, - matchingKey, - settings.scheduler.largeSegmentsRefreshRate, - 'myLargeSegmentsUpdater' - ); - } + const mySegmentsSyncTask = mySegmentsSyncTaskFactory(splitApi.fetchMySegments, storage, readiness, settings, matchingKey); // smart ready function smartReady() { - if (!readiness.isReady()) { - if (readiness.largeSegments && !storage.splits.usesMatcher(IN_LARGE_SEGMENT)) readiness.largeSegments.emit(SDK_SEGMENTS_ARRIVED); - if (!storage.splits.usesMatcher(IN_SEGMENT)) readiness.segments.emit(SDK_SEGMENTS_ARRIVED); - } + if (!readiness.isReady() && !storage.splits.usesSegments()) readiness.segments.emit(SDK_SEGMENTS_ARRIVED); } + if (!storage.splits.usesSegments()) setTimeout(smartReady, 0); + else readiness.splits.once(SDK_SPLITS_ARRIVED, smartReady); - if (storage.splits.usesMatcher(IN_SEGMENT) && storage.splits.usesMatcher(IN_LARGE_SEGMENT)) readiness.splits.once(SDK_SPLITS_ARRIVED, smartReady); - else setTimeout(smartReady, 0); - - mySegmentsSyncTasks[matchingKey] = { msSyncTask: msSyncTask, mlsSyncTask: mlsSyncTask }; - - return { - msSyncTask, - mlsSyncTask - }; + mySegmentsSyncTasks[matchingKey] = mySegmentsSyncTask; + return mySegmentsSyncTask; } return { splitsSyncTask, - segmentsSyncTask: msSyncTask, - largeSegmentsSyncTask: mlsSyncTask, + segmentsSyncTask: mySegmentsSyncTask, // Start periodic fetching (polling) start() { log.info(POLLING_START); splitsSyncTask.start(); - startMySegmentsSyncTasks(); + if (storage.splits.usesSegments()) startMySegmentsSyncTasks(); }, // Stop periodic fetching (polling) @@ -125,9 +94,8 @@ export function pollingManagerCSFactory( // fetch splits and segments syncAll() { const promises = [splitsSyncTask.execute()]; - forOwn(mySegmentsSyncTasks, function ({ msSyncTask, mlsSyncTask }) { - promises.push(msSyncTask.execute()); - mlsSyncTask && promises.push(mlsSyncTask.execute()); + forOwn(mySegmentsSyncTasks, (mySegmentsSyncTask) => { + promises.push(mySegmentsSyncTask.execute()); }); return Promise.all(promises); }, diff --git a/src/sync/polling/syncTasks/mySegmentsSyncTask.ts b/src/sync/polling/syncTasks/mySegmentsSyncTask.ts index 4bf16e46..4e43fe44 100644 --- a/src/sync/polling/syncTasks/mySegmentsSyncTask.ts +++ b/src/sync/polling/syncTasks/mySegmentsSyncTask.ts @@ -1,4 +1,5 @@ -import { ISegmentsCacheSync } from '../../../storages/types'; +import { IStorageSync } from '../../../storages/types'; +import { IReadinessManager } from '../../../readiness/types'; import { syncTaskFactory } from '../../syncTask'; import { IMySegmentsSyncTask } from '../types'; import { IFetchMySegments } from '../../../services/types'; @@ -11,25 +12,23 @@ import { mySegmentsUpdaterFactory } from '../updaters/mySegmentsUpdater'; */ export function mySegmentsSyncTaskFactory( fetchMySegments: IFetchMySegments, - mySegmentsCache: ISegmentsCacheSync, - notifyUpdate: () => void, + storage: IStorageSync, + readiness: IReadinessManager, settings: ISettings, - matchingKey: string, - segmentsRefreshRate: number, - NAME: string + matchingKey: string ): IMySegmentsSyncTask { return syncTaskFactory( settings.log, mySegmentsUpdaterFactory( settings.log, mySegmentsFetcherFactory(fetchMySegments), - mySegmentsCache, - notifyUpdate, + storage, + readiness.segments, settings.startup.requestTimeoutBeforeReady, settings.startup.retriesOnFailureBeforeReady, matchingKey ), - segmentsRefreshRate, - NAME, + settings.scheduler.segmentsRefreshRate, + 'mySegmentsUpdater', ); } diff --git a/src/sync/polling/types.ts b/src/sync/polling/types.ts index efc7d5d9..ce17ae5c 100644 --- a/src/sync/polling/types.ts +++ b/src/sync/polling/types.ts @@ -1,4 +1,4 @@ -import { ISplit } from '../../dtos/types'; +import { IMySegmentsResponse, ISplit } from '../../dtos/types'; import { IReadinessManager } from '../../readiness/types'; import { IStorageSync } from '../../storages/types'; import { ITask, ISyncTask } from '../types'; @@ -7,9 +7,11 @@ export interface ISplitsSyncTask extends ISyncTask<[noCache?: boolean, till?: nu export interface ISegmentsSyncTask extends ISyncTask<[fetchOnlyNew?: boolean, segmentName?: string, noCache?: boolean, till?: number], boolean> { } -export type MySegmentsData = string[] | { +export type MySegmentsData = IMySegmentsResponse | { + /* segment type */ + isLS?: boolean /* segment name */ - name: string, + name: string /* action: `true` for add, and `false` for delete */ add: boolean }[] @@ -20,14 +22,13 @@ export interface IPollingManager extends ITask { syncAll(): Promise splitsSyncTask: ISplitsSyncTask segmentsSyncTask: ISyncTask - largeSegmentsSyncTask?: ISyncTask } /** * PollingManager for client-side with support for multiple clients */ export interface IPollingManagerCS extends IPollingManager { - add(matchingKey: string, readiness: IReadinessManager, storage: IStorageSync): { msSyncTask: IMySegmentsSyncTask, mlsSyncTask?: IMySegmentsSyncTask } + add(matchingKey: string, readiness: IReadinessManager, storage: IStorageSync): IMySegmentsSyncTask remove(matchingKey: string): void; - get(matchingKey: string): { msSyncTask: IMySegmentsSyncTask, mlsSyncTask?: IMySegmentsSyncTask } | undefined + get(matchingKey: string): IMySegmentsSyncTask | undefined } diff --git a/src/sync/polling/updaters/mySegmentsUpdater.ts b/src/sync/polling/updaters/mySegmentsUpdater.ts index c988ef2c..fade62b3 100644 --- a/src/sync/polling/updaters/mySegmentsUpdater.ts +++ b/src/sync/polling/updaters/mySegmentsUpdater.ts @@ -1,10 +1,11 @@ import { IMySegmentsFetcher } from '../fetchers/types'; -import { ISegmentsCacheSync } from '../../../storages/types'; +import { IStorageSync } from '../../../storages/types'; +import { ISegmentsEventEmitter } from '../../../readiness/types'; import { timeout } from '../../../utils/promise/timeout'; +import { SDK_SEGMENTS_ARRIVED } from '../../../readiness/constants'; import { ILogger } from '../../../logger/types'; import { SYNC_MYSEGMENTS_FETCH_RETRY } from '../../../logger/constants'; import { MySegmentsData } from '../types'; -import { isObject } from '../../../utils/lang'; type IMySegmentsUpdater = (segmentList?: MySegmentsData, noCache?: boolean) => Promise @@ -17,13 +18,14 @@ type IMySegmentsUpdater = (segmentList?: MySegmentsData, noCache?: boolean) => P export function mySegmentsUpdaterFactory( log: ILogger, mySegmentsFetcher: IMySegmentsFetcher, - mySegmentsCache: ISegmentsCacheSync, - notifyUpdate: () => void, + storage: IStorageSync, + segmentsEventEmitter: ISegmentsEventEmitter, requestTimeoutBeforeReady: number, retriesOnFailureBeforeReady: number, matchingKey: string ): IMySegmentsUpdater { + const { splits, segments, largeSegments } = storage; let readyOnAlreadyExistentState = true; let startingUp = true; @@ -37,24 +39,27 @@ export function mySegmentsUpdaterFactory( function updateSegments(segmentsData: MySegmentsData) { let shouldNotifyUpdate; - if (isObject(segmentsData[0])) { + if (Array.isArray(segmentsData)) { // Add/Delete the segment names - (segmentsData as { name: string, add: boolean }[]).forEach(({ name, add }) => { - if (mySegmentsCache.isInSegment(name) !== add) { + (segmentsData as { isLS?: boolean, name: string, add: boolean }[]).forEach(({ isLS, name, add }) => { + const cache = isLS ? largeSegments : segments; + if (cache!.isInSegment(name) !== add) { shouldNotifyUpdate = true; - if (add) mySegmentsCache.addToSegment(name); - else mySegmentsCache.removeFromSegment(name); + if (add) cache!.addToSegment(name); + else cache!.removeFromSegment(name); } }); } else { // Reset the list of segment names - shouldNotifyUpdate = mySegmentsCache.resetSegments(segmentsData as string[]); + const mySegmentsUpdated = segments.resetSegments(segmentsData.mySegments.map((segment) => segment.name)); + const myLargeSegmentsUpdated = largeSegments!.resetSegments(segmentsData.myLargeSegments || []/*, segmentsData.till*/); + shouldNotifyUpdate = mySegmentsUpdated || myLargeSegmentsUpdated; } // Notify update if required - if (shouldNotifyUpdate || readyOnAlreadyExistentState) { + if (splits.usesSegments() && (shouldNotifyUpdate || readyOnAlreadyExistentState)) { readyOnAlreadyExistentState = false; - notifyUpdate(); + segmentsEventEmitter.emit(SDK_SEGMENTS_ARRIVED); } } diff --git a/src/sync/streaming/SSEClient/__tests__/index.spec.ts b/src/sync/streaming/SSEClient/__tests__/index.spec.ts index 8187e969..8013ba8d 100644 --- a/src/sync/streaming/SSEClient/__tests__/index.spec.ts +++ b/src/sync/streaming/SSEClient/__tests__/index.spec.ts @@ -135,26 +135,3 @@ test('SSEClient / open method: URL, metadata headers and options', () => { expect(platform.getEventSource.mock.calls).toEqual([[settings]]); expect(platform.getOptions.mock.calls).toEqual([[settings]]); }); - -test('SSEClient / open method: largeSegmentsEnabled true', () => { - const authDataWithMyLargeSegmentsChannel = { - ...authDataSample, - channels: { ...authDataSample.channels, 'NzM2MDI5Mzc0_MzQyODU4NDUyNg==_myLargeSegments': ['subscribe'] }, - }; - - let instance = new SSEClient({ - ...settings, - sync: { largeSegmentsEnabled: false } - }, true, { getEventSource: () => EventSourceMock }); - - instance.open(authDataWithMyLargeSegmentsChannel); - expect(instance.connection.url).toBe(EXPECTED_URL); - - instance = new SSEClient({ - ...settings, - sync: { largeSegmentsEnabled: true } - }, true, { getEventSource: () => EventSourceMock }); - - instance.open(authDataWithMyLargeSegmentsChannel); - expect(instance.connection.url).toBe(EXPECTED_URL.replace('&accessToken', ',NzM2MDI5Mzc0_MzQyODU4NDUyNg%3D%3D_myLargeSegments&accessToken')); -}); diff --git a/src/sync/streaming/SSEClient/index.ts b/src/sync/streaming/SSEClient/index.ts index 97218524..6615bfbf 100644 --- a/src/sync/streaming/SSEClient/index.ts +++ b/src/sync/streaming/SSEClient/index.ts @@ -1,7 +1,7 @@ import { IPlatform } from '../../../sdkFactory/types'; import { IEventSourceConstructor } from '../../../services/types'; import { ISettings } from '../../../types'; -import { endsWith, isString } from '../../../utils/lang'; +import { isString } from '../../../utils/lang'; import { objectAssign } from '../../../utils/lang/objectAssign'; import { IAuthTokenPushEnabled } from '../AuthClient/types'; import { ISSEClient, ISseEventHandler } from './types'; @@ -42,7 +42,6 @@ export class SSEClient implements ISSEClient { useHeaders?: boolean; headers: Record; options?: object; - lse?: boolean; /** * SSEClient constructor. @@ -62,7 +61,6 @@ export class SSEClient implements ISSEClient { this.useHeaders = useHeaders; this.headers = buildSSEHeaders(settings); this.options = getOptions && getOptions(settings); - this.lse = settings.sync.largeSegmentsEnabled; } setEventHandler(handler: ISseEventHandler) { @@ -81,10 +79,7 @@ export class SSEClient implements ISSEClient { const channelsQueryParam = Object.keys(authToken.channels).map((channel) => { const params = CONTROL_CHANNEL_REGEX.test(channel) ? '[?occupancy=metrics.publishers]' : ''; return encodeURIComponent(params + channel); - }).filter(channel => { - return this.lse || !endsWith(channel, 'myLargeSegments'); }).join(','); - const url = `${this.streamingUrl}?channels=${channelsQueryParam}&accessToken=${authToken.token}&v=${ABLY_API_VERSION}&heartbeats=true`; // same results using `&heartbeats=false` this.connection = new this.eventSource!( diff --git a/src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts b/src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts index aa8d6482..9a6dbd3b 100644 --- a/src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts +++ b/src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts @@ -1,13 +1,13 @@ import { IMySegmentsSyncTask, MySegmentsData } from '../../polling/types'; import { Backoff } from '../../../utils/Backoff'; import { IUpdateWorker } from './types'; +import { MY_SEGMENT } from '../../../utils/constants'; import { ITelemetryTracker } from '../../../trackers/types'; -import { UpdatesFromSSEEnum } from '../../submitters/types'; /** * MySegmentsUpdateWorker factory */ -export function MySegmentsUpdateWorker(mySegmentsSyncTask: IMySegmentsSyncTask, telemetryTracker: ITelemetryTracker, updateType: UpdatesFromSSEEnum): IUpdateWorker<[changeNumber: number, segmentsData?: MySegmentsData, delay?: number]> { +export function MySegmentsUpdateWorker(mySegmentsSyncTask: IMySegmentsSyncTask, telemetryTracker: ITelemetryTracker): IUpdateWorker<[changeNumber: number, segmentsData?: MySegmentsData, delay?: number]> { let maxChangeNumber = 0; // keeps the maximum changeNumber among queued events let currentChangeNumber = -1; @@ -37,7 +37,7 @@ export function MySegmentsUpdateWorker(mySegmentsSyncTask: IMySegmentsSyncTask, syncTask.then((result) => { if (!isHandlingEvent) return; // halt if `stop` has been called if (result !== false) { // Unlike `Splits|SegmentsUpdateWorker`, we cannot use `mySegmentsCache.getChangeNumber` since `/mySegments` endpoint doesn't provide this value. - if (_segmentsData) telemetryTracker.trackUpdatesFromSSE(updateType); + if (_segmentsData) telemetryTracker.trackUpdatesFromSSE(MY_SEGMENT); currentChangeNumber = Math.max(currentChangeNumber, currentMaxChangeNumber); // use `currentMaxChangeNumber`, in case that `maxChangeNumber` was updated during fetch. } if (handleNewEvent) { diff --git a/src/sync/streaming/pushManager.ts b/src/sync/streaming/pushManager.ts index f8363034..cb5f7d61 100644 --- a/src/sync/streaming/pushManager.ts +++ b/src/sync/streaming/pushManager.ts @@ -19,7 +19,7 @@ import { ISet, _Set } from '../../utils/lang/sets'; import { hash } from '../../utils/murmur3/murmur3'; import { Hash64, hash64 } from '../../utils/murmur3/murmur3_64'; import { IAuthTokenPushEnabled } from './AuthClient/types'; -import { TOKEN_REFRESH, AUTH_REJECTION, MY_LARGE_SEGMENT, MY_SEGMENT } from '../../utils/constants'; +import { TOKEN_REFRESH, AUTH_REJECTION } from '../../utils/constants'; import { ISdkFactoryContextSync } from '../../sdkFactory/types'; export function getDelay(parsedData: Pick, matchingKey: string) { @@ -73,7 +73,7 @@ export function pushManagerFactory( const userKeyHashes: Record = {}; // [Only for client-side] map of user keys to their corresponding hash64 and MySegmentsUpdateWorkers. // Hash64 is used to process MY_SEGMENTS_UPDATE_V2 events and dispatch actions to the corresponding MySegmentsUpdateWorker. - const clients: Record, workerLarge?: ReturnType }> = {}; + const clients: Record }> = {}; // [Only for client-side] variable to flag that a new client was added. It is needed to reconnect streaming. let connectForNewClient = false; @@ -180,10 +180,7 @@ export function pushManagerFactory( // cancel scheduled fetch retries of Splits, Segments, and MySegments Update Workers function stopWorkers() { splitsUpdateWorker.stop(); - if (userKey) forOwn(clients, ({ worker, workerLarge }) => { - worker.stop(); - workerLarge && workerLarge.stop(); - }); + if (userKey) forOwn(clients, ({ worker }) => worker.stop()); else segmentsUpdateWorker!.stop(); } @@ -261,10 +258,10 @@ export function pushManagerFactory( break; } - forOwn(clients, ({ hash64, worker, workerLarge }, matchingKey) => { + forOwn(clients, ({ hash64, worker }, matchingKey) => { if (isInBitmap(bitmap, hash64.hex)) { isLS ? - workerLarge && workerLarge.put(parsedData.changeNumber, undefined, getDelay(parsedData, matchingKey)) : + worker.put(parsedData.changeNumber, undefined, getDelay(parsedData, matchingKey)) : worker.put(parsedData.changeNumber); } }); @@ -281,11 +278,12 @@ export function pushManagerFactory( break; } - forOwn(clients, ({ hash64, worker, workerLarge }) => { + forOwn(clients, ({ hash64, worker }) => { const add = added.has(hash64.dec) ? true : removed.has(hash64.dec) ? false : undefined; if (add !== undefined) { isLS ? - workerLarge && workerLarge.put(parsedData.changeNumber, [{ + worker.put(parsedData.changeNumber, [{ + isLS, name: parsedData.largeSegments[0], add }]) : @@ -303,9 +301,10 @@ export function pushManagerFactory( break; } - forOwn(clients, ({ worker, workerLarge }) => { + forOwn(clients, ({ worker }) => { isLS ? - workerLarge && workerLarge.put(parsedData.changeNumber, parsedData.largeSegments.map(largeSegment => ({ + worker.put(parsedData.changeNumber, parsedData.largeSegments.map(largeSegment => ({ + isLS, name: largeSegment, add: false }))) : @@ -318,9 +317,9 @@ export function pushManagerFactory( } // `UpdateStrategy.UnboundedFetchRequest` and fallbacks of other cases - forOwn(clients, ({ worker, workerLarge }, matchingKey) => { + forOwn(clients, ({ worker }, matchingKey) => { isLS ? - workerLarge && workerLarge.put(parsedData.changeNumber, undefined, getDelay(parsedData, matchingKey)) : + worker.put(parsedData.changeNumber, undefined, getDelay(parsedData, matchingKey)) : worker.put(parsedData.changeNumber); }); } @@ -332,7 +331,7 @@ export function pushManagerFactory( if (userKey && clients[userKey]) { // check existence since it can be undefined if client has been destroyed clients[userKey].worker.put( parsedData.changeNumber, - parsedData.includesPayload ? parsedData.segmentList ? parsedData.segmentList : [] : undefined); + parsedData.includesPayload ? { mySegments: parsedData.segmentList ? parsedData.segmentList.map(segment => ({ name: segment })) : [] } : undefined); } }); @@ -360,7 +359,7 @@ export function pushManagerFactory( if (disabled || disconnected === false) return; disconnected = false; - if (userKey) this.add(userKey, pollingManager.segmentsSyncTask, pollingManager.largeSegmentsSyncTask!); // client-side + if (userKey) this.add(userKey, pollingManager.segmentsSyncTask); // client-side else setTimeout(connectPush); // server-side runs in next cycle as in client-side, for consistency with client-side }, @@ -370,15 +369,14 @@ export function pushManagerFactory( }, // [Only for client-side] - add(userKey: string, mySegmentsSyncTask: IMySegmentsSyncTask, myLargeSegmentsSyncTask?: IMySegmentsSyncTask) { + add(userKey: string, mySegmentsSyncTask: IMySegmentsSyncTask) { const hash = hashUserKey(userKey); if (!userKeyHashes[hash]) { userKeyHashes[hash] = userKey; clients[userKey] = { hash64: hash64(userKey), - worker: MySegmentsUpdateWorker(mySegmentsSyncTask, telemetryTracker, MY_SEGMENT), - workerLarge: myLargeSegmentsSyncTask ? MySegmentsUpdateWorker(myLargeSegmentsSyncTask, telemetryTracker, MY_LARGE_SEGMENT) : undefined + worker: MySegmentsUpdateWorker(mySegmentsSyncTask, telemetryTracker) }; connectForNewClient = true; // we must reconnect on start, to listen the channel for the new user key diff --git a/src/sync/submitters/telemetrySubmitter.ts b/src/sync/submitters/telemetrySubmitter.ts index 881cb5ed..a2289e08 100644 --- a/src/sync/submitters/telemetrySubmitter.ts +++ b/src/sync/submitters/telemetrySubmitter.ts @@ -71,19 +71,15 @@ export function telemetryCacheConfigAdapter(telemetry: ITelemetryCacheSync, sett pop(): TelemetryConfigStatsPayload { const { urls, scheduler } = settings; const isClientSide = settings.core.key !== undefined; - const largeSegmentsEnabled = isClientSide && settings.sync.largeSegmentsEnabled; const { flagSetsTotal, flagSetsIgnored } = getTelemetryFlagSetsStats(settings.sync.__splitFiltersValidation); return objectAssign(getTelemetryConfigStats(settings.mode, settings.storage.type), { sE: settings.streamingEnabled, - lsE: largeSegmentsEnabled ? largeSegmentsEnabled : undefined, - wls: largeSegmentsEnabled ? settings.startup.waitForLargeSegments : undefined, rR: { sp: scheduler.featuresRefreshRate / 1000, se: isClientSide ? undefined : scheduler.segmentsRefreshRate / 1000, ms: isClientSide ? scheduler.segmentsRefreshRate / 1000 : undefined, - mls: largeSegmentsEnabled ? scheduler.largeSegmentsRefreshRate / 1000 : undefined, im: scheduler.impressionsRefreshRate / 1000, ev: scheduler.eventsPushRate / 1000, te: scheduler.telemetryRefreshRate / 1000, diff --git a/src/sync/submitters/types.ts b/src/sync/submitters/types.ts index 72e91b9d..5e18dfd9 100644 --- a/src/sync/submitters/types.ts +++ b/src/sync/submitters/types.ts @@ -103,7 +103,7 @@ export type DROPPED = 1; export type DEDUPED = 2; export type ImpressionDataType = QUEUED | DROPPED | DEDUPED export type EventDataType = QUEUED | DROPPED; -export type UpdatesFromSSEEnum = SPLITS | MY_SEGMENT | MY_LARGE_SEGMENT; +export type UpdatesFromSSEEnum = SPLITS | MY_SEGMENT; export type SPLITS = 'sp'; export type IMPRESSIONS = 'im'; @@ -113,8 +113,7 @@ export type TELEMETRY = 'te'; export type TOKEN = 'to'; export type SEGMENT = 'se'; export type MY_SEGMENT = 'ms'; -export type MY_LARGE_SEGMENT = 'mls'; -export type OperationType = SPLITS | IMPRESSIONS | IMPRESSIONS_COUNT | EVENTS | TELEMETRY | TOKEN | SEGMENT | MY_SEGMENT | MY_LARGE_SEGMENT; +export type OperationType = SPLITS | IMPRESSIONS | IMPRESSIONS_COUNT | EVENTS | TELEMETRY | TOKEN | SEGMENT | MY_SEGMENT; export type LastSync = Partial> export type HttpErrors = Partial> @@ -205,7 +204,6 @@ export type RefreshRates = { sp: number, // splits se?: number, // segments ms?: number, // mySegments - mls?: number, // myLargeSegments im: number, // impressions ev: number, // events te: number, // telemetry @@ -231,8 +229,6 @@ export type TelemetryConfigStats = { // 'metrics/config' JSON request body export type TelemetryConfigStatsPayload = TelemetryConfigStats & { sE: boolean, // streamingEnabled - lsE?: boolean, // largeSegmentsEnabled - wls?: boolean, // waitForLargeSegments rR: RefreshRates, // refreshRates uO: UrlOverrides, // urlOverrides iQ: number, // impressionsQueueSize diff --git a/src/sync/syncManagerOnline.ts b/src/sync/syncManagerOnline.ts index 5a2d9f50..b6407630 100644 --- a/src/sync/syncManagerOnline.ts +++ b/src/sync/syncManagerOnline.ts @@ -7,7 +7,7 @@ import { IPollingManager, IPollingManagerCS } from './polling/types'; import { PUSH_SUBSYSTEM_UP, PUSH_SUBSYSTEM_DOWN } from './streaming/constants'; import { SYNC_START_POLLING, SYNC_CONTINUE_POLLING, SYNC_STOP_POLLING } from '../logger/constants'; import { isConsentGranted } from '../consent'; -import { IN_LARGE_SEGMENT, IN_SEGMENT, POLLING, STREAMING, SYNC_MODE_UPDATE } from '../utils/constants'; +import { POLLING, STREAMING, SYNC_MODE_UPDATE } from '../utils/constants'; import { ISdkFactoryContextSync } from '../sdkFactory/types'; /** @@ -141,44 +141,36 @@ export function syncManagerOnlineFactory( shared(matchingKey: string, readinessManager: IReadinessManager, storage: IStorageSync) { if (!pollingManager) return; - const { msSyncTask, mlsSyncTask } = (pollingManager as IPollingManagerCS).add(matchingKey, readinessManager, storage); + const mySegmentsSyncTask = (pollingManager as IPollingManagerCS).add(matchingKey, readinessManager, storage); return { - isRunning: msSyncTask.isRunning, + isRunning: mySegmentsSyncTask.isRunning, start() { if (syncEnabled) { if (pushManager) { if (pollingManager!.isRunning()) { // if doing polling, we must start the periodic fetch of data - if (storage.splits.usesMatcher(IN_SEGMENT)) msSyncTask.start(); - if (mlsSyncTask && storage.splits.usesMatcher(IN_LARGE_SEGMENT)) mlsSyncTask.start(); + if (storage.splits.usesSegments()) mySegmentsSyncTask.start(); } else { // if not polling, we must execute the sync task for the initial fetch // of segments since `syncAll` was already executed when starting the main client - msSyncTask.execute(); - mlsSyncTask && mlsSyncTask.execute(); + mySegmentsSyncTask.execute(); } - pushManager.add(matchingKey, msSyncTask, mlsSyncTask); + pushManager.add(matchingKey, mySegmentsSyncTask); } else { - if (storage.splits.usesMatcher(IN_SEGMENT)) msSyncTask.start(); - if (mlsSyncTask && storage.splits.usesMatcher(IN_LARGE_SEGMENT)) mlsSyncTask.start(); + if (storage.splits.usesSegments()) mySegmentsSyncTask.start(); } } else { - if (!readinessManager.isReady()) { - msSyncTask.execute(); - mlsSyncTask && mlsSyncTask.execute(); - } + if (!readinessManager.isReady()) mySegmentsSyncTask.execute(); } }, stop() { // check in case `client.destroy()` has been invoked more than once for the same client - const syncTasks = (pollingManager as IPollingManagerCS).get(matchingKey); - if (syncTasks) { - const { msSyncTask, mlsSyncTask } = syncTasks; + const mySegmentsSyncTask = (pollingManager as IPollingManagerCS).get(matchingKey); + if (mySegmentsSyncTask) { // stop syncing if (pushManager) pushManager.remove(matchingKey); - if (msSyncTask.isRunning()) msSyncTask.stop(); - if (mlsSyncTask && mlsSyncTask.isRunning()) mlsSyncTask.stop(); + if (mySegmentsSyncTask.isRunning()) mySegmentsSyncTask.stop(); (pollingManager as IPollingManagerCS).remove(matchingKey); } diff --git a/src/types.ts b/src/types.ts index f259c1c0..475b1822 100644 --- a/src/types.ts +++ b/src/types.ts @@ -86,7 +86,6 @@ export interface ISettings { metricsRefreshRate?: number, telemetryRefreshRate: number, segmentsRefreshRate: number, - largeSegmentsRefreshRate: number, offlineRefreshRate: number, eventsPushRate: number, eventsQueueSize: number, @@ -96,8 +95,7 @@ export interface ISettings { readyTimeout: number, requestTimeoutBeforeReady: number, retriesOnFailureBeforeReady: number, - eventsFirstPushWindow: number, - waitForLargeSegments: boolean + eventsFirstPushWindow: number }, readonly storage: IStorageSyncFactory | IStorageAsyncFactory, readonly integrations: Array<{ @@ -121,7 +119,6 @@ export interface ISettings { __splitFiltersValidation: ISplitFiltersValidation, localhostMode?: SplitIO.LocalhostFactory, enabled: boolean, - largeSegmentsEnabled: boolean, flagSpecVersion: string }, readonly runtime: { @@ -816,13 +813,6 @@ export namespace SplitIO { * @default 10 */ eventsFirstPushWindow?: number, - /** - * Whether the SDK should wait for large segments to be ready before emitting SDK_READY event. - * It only applies if largeSegmentsEnabled is true. - * @property {number} waitForLargeSegments - * @default true - */ - waitForLargeSegments?: boolean }, /** * SDK scheduler settings. @@ -867,13 +857,6 @@ export namespace SplitIO { * @default 60 */ segmentsRefreshRate?: number, - /** - * The SDK polls Split servers for changes to large segment definitions. This parameter controls this polling period in seconds. - * It only applies if largeSegmentsEnabled is true. - * @property {number} largeSegmentsRefreshRate - * @default 60 - */ - largeSegmentsRefreshRate?: number, /** * The SDK posts the queued events data in bulks. This parameter controls the posting rate in seconds. * @property {number} eventsPushRate @@ -946,14 +929,6 @@ export namespace SplitIO { * @property {Object} urls */ urls?: UrlSettings, - sync?: ISharedSettings['sync'] & { - /** - * Enables synchronization of large segments. - * @property {boolean} largeSegmentsEnabled - * @default false - */ - largeSegmentsEnabled?: boolean - } } /** * Settings interface for SDK instances created on NodeJS. diff --git a/src/utils/constants/index.ts b/src/utils/constants/index.ts index e11a2d77..e7703a2a 100644 --- a/src/utils/constants/index.ts +++ b/src/utils/constants/index.ts @@ -76,7 +76,6 @@ export const TELEMETRY = 'te'; export const TOKEN = 'to'; export const SEGMENT = 'se'; export const MY_SEGMENT = 'ms'; -export const MY_LARGE_SEGMENT = 'mls'; export const TREATMENT = 't'; export const TREATMENTS = 'ts'; diff --git a/src/utils/settingsValidation/__tests__/index.spec.ts b/src/utils/settingsValidation/__tests__/index.spec.ts index 358b278e..8e0238c4 100644 --- a/src/utils/settingsValidation/__tests__/index.spec.ts +++ b/src/utils/settingsValidation/__tests__/index.spec.ts @@ -14,8 +14,7 @@ const minimalSettingsParams = { requestTimeoutBeforeReady: 5, retriesOnFailureBeforeReady: 1, readyTimeout: 10, - eventsFirstPushWindow: 10, - waitForLargeSegments: false + eventsFirstPushWindow: 10 }, version: 'javascript-test', }, diff --git a/src/utils/settingsValidation/__tests__/settings.mocks.ts b/src/utils/settingsValidation/__tests__/settings.mocks.ts index 4efd8a85..d5f4b2e5 100644 --- a/src/utils/settingsValidation/__tests__/settings.mocks.ts +++ b/src/utils/settingsValidation/__tests__/settings.mocks.ts @@ -52,7 +52,6 @@ export const fullSettings: ISettings = { impressionsRefreshRate: 1, telemetryRefreshRate: 1, segmentsRefreshRate: 1, - largeSegmentsRefreshRate: 1, offlineRefreshRate: 1, eventsPushRate: 1, eventsQueueSize: 1, @@ -63,8 +62,7 @@ export const fullSettings: ISettings = { readyTimeout: 1, requestTimeoutBeforeReady: 1, retriesOnFailureBeforeReady: 1, - eventsFirstPushWindow: 1, - waitForLargeSegments: false + eventsFirstPushWindow: 1 }, features: 'path/to/file', storage: InMemoryStorageCSFactory, @@ -82,8 +80,7 @@ export const fullSettings: ISettings = { groupedFilters: { bySet: [], byName: [], byPrefix: [] }, }, enabled: true, - flagSpecVersion: '1.1', - largeSegmentsEnabled: false + flagSpecVersion: '1.2' }, version: 'jest', runtime: { diff --git a/src/utils/settingsValidation/index.ts b/src/utils/settingsValidation/index.ts index e4ad9921..c6641c9a 100644 --- a/src/utils/settingsValidation/index.ts +++ b/src/utils/settingsValidation/index.ts @@ -32,8 +32,6 @@ export const base = { featuresRefreshRate: 60, // fetch segments updates each 60 sec segmentsRefreshRate: 60, - // fetch large segments updates each 60 sec - largeSegmentsRefreshRate: 60, // publish telemetry stats each 3600 secs (1 hour) telemetryRefreshRate: 3600, // publish evaluations each 300 sec (default value for OPTIMIZED impressions mode) @@ -87,8 +85,7 @@ export const base = { impressionsMode: OPTIMIZED, localhostMode: undefined, enabled: true, - flagSpecVersion: FLAG_SPEC_VERSION, - largeSegmentsEnabled: false + flagSpecVersion: FLAG_SPEC_VERSION }, // Logger @@ -135,7 +132,6 @@ export function settingsValidation(config: unknown, validationParams: ISettingsV const { scheduler, startup } = withDefaults; scheduler.featuresRefreshRate = fromSecondsToMillis(scheduler.featuresRefreshRate); scheduler.segmentsRefreshRate = fromSecondsToMillis(scheduler.segmentsRefreshRate); - scheduler.largeSegmentsRefreshRate = fromSecondsToMillis(scheduler.largeSegmentsRefreshRate); scheduler.offlineRefreshRate = fromSecondsToMillis(scheduler.offlineRefreshRate); scheduler.eventsPushRate = fromSecondsToMillis(scheduler.eventsPushRate); scheduler.telemetryRefreshRate = fromSecondsToMillis(validateMinValue('telemetryRefreshRate', scheduler.telemetryRefreshRate, 60)); From 56b76985c944675daf93614b9411d9c5ed581b54 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Wed, 21 Aug 2024 01:02:06 +0200 Subject: [PATCH 049/146] Update PushManager to have 2 MySegmentUpdateWorker instances per client, to handle the changeNumber of each notification type --- .../UpdateWorkers/MySegmentsUpdateWorker.ts | 6 ++-- src/sync/streaming/pushManager.ts | 28 +++++++++++-------- src/sync/submitters/types.ts | 3 +- src/utils/constants/index.ts | 1 + 4 files changed, 22 insertions(+), 16 deletions(-) diff --git a/src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts b/src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts index 9a6dbd3b..aa8d6482 100644 --- a/src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts +++ b/src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts @@ -1,13 +1,13 @@ import { IMySegmentsSyncTask, MySegmentsData } from '../../polling/types'; import { Backoff } from '../../../utils/Backoff'; import { IUpdateWorker } from './types'; -import { MY_SEGMENT } from '../../../utils/constants'; import { ITelemetryTracker } from '../../../trackers/types'; +import { UpdatesFromSSEEnum } from '../../submitters/types'; /** * MySegmentsUpdateWorker factory */ -export function MySegmentsUpdateWorker(mySegmentsSyncTask: IMySegmentsSyncTask, telemetryTracker: ITelemetryTracker): IUpdateWorker<[changeNumber: number, segmentsData?: MySegmentsData, delay?: number]> { +export function MySegmentsUpdateWorker(mySegmentsSyncTask: IMySegmentsSyncTask, telemetryTracker: ITelemetryTracker, updateType: UpdatesFromSSEEnum): IUpdateWorker<[changeNumber: number, segmentsData?: MySegmentsData, delay?: number]> { let maxChangeNumber = 0; // keeps the maximum changeNumber among queued events let currentChangeNumber = -1; @@ -37,7 +37,7 @@ export function MySegmentsUpdateWorker(mySegmentsSyncTask: IMySegmentsSyncTask, syncTask.then((result) => { if (!isHandlingEvent) return; // halt if `stop` has been called if (result !== false) { // Unlike `Splits|SegmentsUpdateWorker`, we cannot use `mySegmentsCache.getChangeNumber` since `/mySegments` endpoint doesn't provide this value. - if (_segmentsData) telemetryTracker.trackUpdatesFromSSE(MY_SEGMENT); + if (_segmentsData) telemetryTracker.trackUpdatesFromSSE(updateType); currentChangeNumber = Math.max(currentChangeNumber, currentMaxChangeNumber); // use `currentMaxChangeNumber`, in case that `maxChangeNumber` was updated during fetch. } if (handleNewEvent) { diff --git a/src/sync/streaming/pushManager.ts b/src/sync/streaming/pushManager.ts index cb5f7d61..c2ccd602 100644 --- a/src/sync/streaming/pushManager.ts +++ b/src/sync/streaming/pushManager.ts @@ -19,7 +19,7 @@ import { ISet, _Set } from '../../utils/lang/sets'; import { hash } from '../../utils/murmur3/murmur3'; import { Hash64, hash64 } from '../../utils/murmur3/murmur3_64'; import { IAuthTokenPushEnabled } from './AuthClient/types'; -import { TOKEN_REFRESH, AUTH_REJECTION } from '../../utils/constants'; +import { TOKEN_REFRESH, AUTH_REJECTION, MY_LARGE_SEGMENT, MY_SEGMENT } from '../../utils/constants'; import { ISdkFactoryContextSync } from '../../sdkFactory/types'; export function getDelay(parsedData: Pick, matchingKey: string) { @@ -73,7 +73,7 @@ export function pushManagerFactory( const userKeyHashes: Record = {}; // [Only for client-side] map of user keys to their corresponding hash64 and MySegmentsUpdateWorkers. // Hash64 is used to process MY_SEGMENTS_UPDATE_V2 events and dispatch actions to the corresponding MySegmentsUpdateWorker. - const clients: Record }> = {}; + const clients: Record, workerLarge: ReturnType }> = {}; // [Only for client-side] variable to flag that a new client was added. It is needed to reconnect streaming. let connectForNewClient = false; @@ -180,7 +180,10 @@ export function pushManagerFactory( // cancel scheduled fetch retries of Splits, Segments, and MySegments Update Workers function stopWorkers() { splitsUpdateWorker.stop(); - if (userKey) forOwn(clients, ({ worker }) => worker.stop()); + if (userKey) forOwn(clients, ({ worker, workerLarge }) => { + worker.stop(); + workerLarge.stop(); + }); else segmentsUpdateWorker!.stop(); } @@ -258,10 +261,10 @@ export function pushManagerFactory( break; } - forOwn(clients, ({ hash64, worker }, matchingKey) => { + forOwn(clients, ({ hash64, worker, workerLarge }, matchingKey) => { if (isInBitmap(bitmap, hash64.hex)) { isLS ? - worker.put(parsedData.changeNumber, undefined, getDelay(parsedData, matchingKey)) : + workerLarge.put(parsedData.changeNumber, undefined, getDelay(parsedData, matchingKey)) : worker.put(parsedData.changeNumber); } }); @@ -278,11 +281,11 @@ export function pushManagerFactory( break; } - forOwn(clients, ({ hash64, worker }) => { + forOwn(clients, ({ hash64, worker, workerLarge }) => { const add = added.has(hash64.dec) ? true : removed.has(hash64.dec) ? false : undefined; if (add !== undefined) { isLS ? - worker.put(parsedData.changeNumber, [{ + workerLarge.put(parsedData.changeNumber, [{ isLS, name: parsedData.largeSegments[0], add @@ -301,9 +304,9 @@ export function pushManagerFactory( break; } - forOwn(clients, ({ worker }) => { + forOwn(clients, ({ worker, workerLarge }) => { isLS ? - worker.put(parsedData.changeNumber, parsedData.largeSegments.map(largeSegment => ({ + workerLarge.put(parsedData.changeNumber, parsedData.largeSegments.map(largeSegment => ({ isLS, name: largeSegment, add: false @@ -317,9 +320,9 @@ export function pushManagerFactory( } // `UpdateStrategy.UnboundedFetchRequest` and fallbacks of other cases - forOwn(clients, ({ worker }, matchingKey) => { + forOwn(clients, ({ worker, workerLarge }, matchingKey) => { isLS ? - worker.put(parsedData.changeNumber, undefined, getDelay(parsedData, matchingKey)) : + workerLarge.put(parsedData.changeNumber, undefined, getDelay(parsedData, matchingKey)) : worker.put(parsedData.changeNumber); }); } @@ -376,7 +379,8 @@ export function pushManagerFactory( userKeyHashes[hash] = userKey; clients[userKey] = { hash64: hash64(userKey), - worker: MySegmentsUpdateWorker(mySegmentsSyncTask, telemetryTracker) + worker: MySegmentsUpdateWorker(mySegmentsSyncTask, telemetryTracker, MY_SEGMENT), + workerLarge: MySegmentsUpdateWorker(mySegmentsSyncTask, telemetryTracker, MY_LARGE_SEGMENT) }; connectForNewClient = true; // we must reconnect on start, to listen the channel for the new user key diff --git a/src/sync/submitters/types.ts b/src/sync/submitters/types.ts index 5e18dfd9..7dfa0985 100644 --- a/src/sync/submitters/types.ts +++ b/src/sync/submitters/types.ts @@ -103,7 +103,7 @@ export type DROPPED = 1; export type DEDUPED = 2; export type ImpressionDataType = QUEUED | DROPPED | DEDUPED export type EventDataType = QUEUED | DROPPED; -export type UpdatesFromSSEEnum = SPLITS | MY_SEGMENT; +export type UpdatesFromSSEEnum = SPLITS | MY_SEGMENT | MY_LARGE_SEGMENT; export type SPLITS = 'sp'; export type IMPRESSIONS = 'im'; @@ -113,6 +113,7 @@ export type TELEMETRY = 'te'; export type TOKEN = 'to'; export type SEGMENT = 'se'; export type MY_SEGMENT = 'ms'; +export type MY_LARGE_SEGMENT = 'mls'; export type OperationType = SPLITS | IMPRESSIONS | IMPRESSIONS_COUNT | EVENTS | TELEMETRY | TOKEN | SEGMENT | MY_SEGMENT; export type LastSync = Partial> diff --git a/src/utils/constants/index.ts b/src/utils/constants/index.ts index e7703a2a..e11a2d77 100644 --- a/src/utils/constants/index.ts +++ b/src/utils/constants/index.ts @@ -76,6 +76,7 @@ export const TELEMETRY = 'te'; export const TOKEN = 'to'; export const SEGMENT = 'se'; export const MY_SEGMENT = 'ms'; +export const MY_LARGE_SEGMENT = 'mls'; export const TREATMENT = 't'; export const TREATMENTS = 'ts'; From 004b70763b5bbec055e916a7f51ea6d4e35fbf5e Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Fri, 23 Aug 2024 23:30:50 +0200 Subject: [PATCH 050/146] Unify MY_SEGMENTS_UPDATE_V3 and MY_LARGE_SEGMENTS_UPDATE notifications --- ..._UPDATE.SEGMENT_REMOVAL.1457552653000.json | 2 +- ...GMENTS_UPDATE.UNBOUNDED.1457552650000.json | 2 +- ...UPDATE.nicolas@split.io.1457552640000.json | 4 -- ..._UPDATE_V3.BOUNDED.GZIP.1457552651000.json | 4 ++ ..._UPDATE_V3.KEYLIST.GZIP.1457552652000.json | 4 ++ ...DATE_V3.SEGMENT_REMOVAL.1457552653000.json | 4 ++ ...NTS_UPDATE_V3.UNBOUNDED.1457552650000.json | 4 ++ ...message.V2.BOUNDED.GZIP.1457552651000.json | 4 -- ...message.V2.KEYLIST.GZIP.1457552652000.json | 4 -- ...sage.V2.SEGMENT_REMOVAL.1457552653000.json | 4 -- .../message.V2.UNBOUNDED.1457552650000.json | 4 -- src/logger/constants.ts | 2 +- src/logger/messages/warn.ts | 2 +- .../SSEHandler/__tests__/index.spec.ts | 41 +++++------ src/sync/streaming/SSEHandler/index.ts | 24 +++---- src/sync/streaming/SSEHandler/types.ts | 32 +++------ src/sync/streaming/constants.ts | 3 +- src/sync/streaming/pushManager.ts | 70 ++++++------------- src/sync/streaming/types.ts | 18 +++-- 19 files changed, 91 insertions(+), 141 deletions(-) delete mode 100644 src/__tests__/mocks/message.MY_SEGMENTS_UPDATE.nicolas@split.io.1457552640000.json create mode 100644 src/__tests__/mocks/message.MY_SEGMENTS_UPDATE_V3.BOUNDED.GZIP.1457552651000.json create mode 100644 src/__tests__/mocks/message.MY_SEGMENTS_UPDATE_V3.KEYLIST.GZIP.1457552652000.json create mode 100644 src/__tests__/mocks/message.MY_SEGMENTS_UPDATE_V3.SEGMENT_REMOVAL.1457552653000.json create mode 100644 src/__tests__/mocks/message.MY_SEGMENTS_UPDATE_V3.UNBOUNDED.1457552650000.json delete mode 100644 src/__tests__/mocks/message.V2.BOUNDED.GZIP.1457552651000.json delete mode 100644 src/__tests__/mocks/message.V2.KEYLIST.GZIP.1457552652000.json delete mode 100644 src/__tests__/mocks/message.V2.SEGMENT_REMOVAL.1457552653000.json delete mode 100644 src/__tests__/mocks/message.V2.UNBOUNDED.1457552650000.json diff --git a/src/__tests__/mocks/message.MY_LARGE_SEGMENTS_UPDATE.SEGMENT_REMOVAL.1457552653000.json b/src/__tests__/mocks/message.MY_LARGE_SEGMENTS_UPDATE.SEGMENT_REMOVAL.1457552653000.json index 69e900dd..78c8661b 100644 --- a/src/__tests__/mocks/message.MY_LARGE_SEGMENTS_UPDATE.SEGMENT_REMOVAL.1457552653000.json +++ b/src/__tests__/mocks/message.MY_LARGE_SEGMENTS_UPDATE.SEGMENT_REMOVAL.1457552653000.json @@ -1,4 +1,4 @@ { "type": "message", - "data": "{\"data\":\"{\\\"type\\\":\\\"MY_LARGE_SEGMENTS_UPDATE\\\",\\\"changeNumber\\\":1457552653000,\\\"largeSegments\\\":[\\\"employees\\\"],\\\"c\\\": 0,\\\"u\\\": 3,\\\"d\\\":\\\"\\\"}\"}" + "data": "{\"data\":\"{\\\"t\\\":\\\"MY_LARGE_SEGMENTS_UPDATE\\\",\\\"cn\\\":1457552653000,\\\"l\\\":[\\\"employees\\\"],\\\"c\\\": 0,\\\"u\\\": 3,\\\"d\\\":\\\"\\\"}\"}" } \ No newline at end of file diff --git a/src/__tests__/mocks/message.MY_LARGE_SEGMENTS_UPDATE.UNBOUNDED.1457552650000.json b/src/__tests__/mocks/message.MY_LARGE_SEGMENTS_UPDATE.UNBOUNDED.1457552650000.json index f6fa5982..4780aac1 100644 --- a/src/__tests__/mocks/message.MY_LARGE_SEGMENTS_UPDATE.UNBOUNDED.1457552650000.json +++ b/src/__tests__/mocks/message.MY_LARGE_SEGMENTS_UPDATE.UNBOUNDED.1457552650000.json @@ -1,4 +1,4 @@ { "type": "message", - "data": "{\"data\":\"{\\\"type\\\":\\\"MY_LARGE_SEGMENTS_UPDATE\\\",\\\"changeNumber\\\":1457552650000,\\\"largeSegments\\\":[],\\\"c\\\": 0,\\\"u\\\": 0,\\\"d\\\":\\\"\\\",\\\"i\\\":300,\\\"h\\\":1,\\\"s\\\":0}\"}" + "data": "{\"data\":\"{\\\"t\\\":\\\"MY_LARGE_SEGMENTS_UPDATE\\\",\\\"cn\\\":1457552650000,\\\"l\\\":[],\\\"c\\\": 0,\\\"u\\\": 0,\\\"d\\\":\\\"\\\",\\\"i\\\":300,\\\"h\\\":1,\\\"s\\\":0}\"}" } \ No newline at end of file diff --git a/src/__tests__/mocks/message.MY_SEGMENTS_UPDATE.nicolas@split.io.1457552640000.json b/src/__tests__/mocks/message.MY_SEGMENTS_UPDATE.nicolas@split.io.1457552640000.json deleted file mode 100644 index 951d8a31..00000000 --- a/src/__tests__/mocks/message.MY_SEGMENTS_UPDATE.nicolas@split.io.1457552640000.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "type": "message", - "data": "{\"id\":\"mc4i3NENoA:0:0\",\"clientId\":\"NDEzMTY5Mzg0MA==:MTM2ODE2NDMxNA==\",\"timestamp\":1457552640900,\"encoding\":\"json\",\"channel\":\"NzM2MDI5Mzc0_NDEzMjQ1MzA0Nw==_NTcwOTc3MDQx_mySegments\",\"data\":\"{\\\"type\\\":\\\"MY_SEGMENTS_UPDATE\\\",\\\"changeNumber\\\":1457552640000,\\\"includesPayload\\\":false}\"}" -} \ No newline at end of file diff --git a/src/__tests__/mocks/message.MY_SEGMENTS_UPDATE_V3.BOUNDED.GZIP.1457552651000.json b/src/__tests__/mocks/message.MY_SEGMENTS_UPDATE_V3.BOUNDED.GZIP.1457552651000.json new file mode 100644 index 00000000..f99ade55 --- /dev/null +++ b/src/__tests__/mocks/message.MY_SEGMENTS_UPDATE_V3.BOUNDED.GZIP.1457552651000.json @@ -0,0 +1,4 @@ +{ + "type": "message", + "data": "{\"data\":\"{\\\"t\\\":\\\"MY_SEGMENTS_UPDATE_V3\\\",\\\"cn\\\":1457552651000,\\\"l\\\":[],\\\"c\\\": 1,\\\"u\\\": 1,\\\"d\\\":\\\"H4sIAAAAAAAA/2JABxzYeIxQLguYFIBLN8Bl4EABjc+EzOnAsA4QAAD//8YBvWeAAAAA\\\"}\"}" +} \ No newline at end of file diff --git a/src/__tests__/mocks/message.MY_SEGMENTS_UPDATE_V3.KEYLIST.GZIP.1457552652000.json b/src/__tests__/mocks/message.MY_SEGMENTS_UPDATE_V3.KEYLIST.GZIP.1457552652000.json new file mode 100644 index 00000000..3b218920 --- /dev/null +++ b/src/__tests__/mocks/message.MY_SEGMENTS_UPDATE_V3.KEYLIST.GZIP.1457552652000.json @@ -0,0 +1,4 @@ +{ + "type": "message", + "data": "{\"data\":\"{\\\"t\\\":\\\"MY_SEGMENTS_UPDATE_V3\\\",\\\"cn\\\":1457552652000,\\\"l\\\":[\\\"splitters\\\"],\\\"c\\\": 1,\\\"u\\\": 2,\\\"d\\\":\\\"H4sIAAAAAAAA/wTAsRHDUAgD0F2ofwEIkPAqPhdZIW0uu/v97GPXHU004ULuMGrYR6XUbIjlXULPPse+dt1yhJibBODjrTmj3GJ4emduuDDP/w0AAP//18WLsl0AAAA=\\\"}\"}" +} \ No newline at end of file diff --git a/src/__tests__/mocks/message.MY_SEGMENTS_UPDATE_V3.SEGMENT_REMOVAL.1457552653000.json b/src/__tests__/mocks/message.MY_SEGMENTS_UPDATE_V3.SEGMENT_REMOVAL.1457552653000.json new file mode 100644 index 00000000..05af28a4 --- /dev/null +++ b/src/__tests__/mocks/message.MY_SEGMENTS_UPDATE_V3.SEGMENT_REMOVAL.1457552653000.json @@ -0,0 +1,4 @@ +{ + "type": "message", + "data": "{\"data\":\"{\\\"t\\\":\\\"MY_SEGMENTS_UPDATE_V3\\\",\\\"cn\\\":1457552653000,\\\"l\\\":[\\\"splitters\\\"],\\\"c\\\": 0,\\\"u\\\": 3,\\\"d\\\":\\\"\\\"}\"}" +} \ No newline at end of file diff --git a/src/__tests__/mocks/message.MY_SEGMENTS_UPDATE_V3.UNBOUNDED.1457552650000.json b/src/__tests__/mocks/message.MY_SEGMENTS_UPDATE_V3.UNBOUNDED.1457552650000.json new file mode 100644 index 00000000..2f57352c --- /dev/null +++ b/src/__tests__/mocks/message.MY_SEGMENTS_UPDATE_V3.UNBOUNDED.1457552650000.json @@ -0,0 +1,4 @@ +{ + "type": "message", + "data": "{\"data\":\"{\\\"t\\\":\\\"MY_SEGMENTS_UPDATE_V3\\\",\\\"cn\\\":1457552650000,\\\"l\\\":[],\\\"c\\\": 0,\\\"u\\\": 0,\\\"d\\\":\\\"\\\"}\"}" +} \ No newline at end of file diff --git a/src/__tests__/mocks/message.V2.BOUNDED.GZIP.1457552651000.json b/src/__tests__/mocks/message.V2.BOUNDED.GZIP.1457552651000.json deleted file mode 100644 index 97c2a73c..00000000 --- a/src/__tests__/mocks/message.V2.BOUNDED.GZIP.1457552651000.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "type": "message", - "data": "{\"data\":\"{\\\"type\\\":\\\"MY_SEGMENTS_UPDATE_V2\\\",\\\"changeNumber\\\":1457552651000,\\\"segmentName\\\":\\\"\\\",\\\"c\\\": 1,\\\"u\\\": 1,\\\"d\\\":\\\"H4sIAAAAAAAA/2JABxzYeIxQLguYFIBLN8Bl4EABjc+EzOnAsA4QAAD//8YBvWeAAAAA\\\"}\"}" -} \ No newline at end of file diff --git a/src/__tests__/mocks/message.V2.KEYLIST.GZIP.1457552652000.json b/src/__tests__/mocks/message.V2.KEYLIST.GZIP.1457552652000.json deleted file mode 100644 index c44ee3ac..00000000 --- a/src/__tests__/mocks/message.V2.KEYLIST.GZIP.1457552652000.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "type": "message", - "data": "{\"data\":\"{\\\"type\\\":\\\"MY_SEGMENTS_UPDATE_V2\\\",\\\"changeNumber\\\":1457552652000,\\\"segmentName\\\":\\\"splitters\\\",\\\"c\\\": 1,\\\"u\\\": 2,\\\"d\\\":\\\"H4sIAAAAAAAA/wTAsRHDUAgD0F2ofwEIkPAqPhdZIW0uu/v97GPXHU004ULuMGrYR6XUbIjlXULPPse+dt1yhJibBODjrTmj3GJ4emduuDDP/w0AAP//18WLsl0AAAA=\\\"}\"}" -} \ No newline at end of file diff --git a/src/__tests__/mocks/message.V2.SEGMENT_REMOVAL.1457552653000.json b/src/__tests__/mocks/message.V2.SEGMENT_REMOVAL.1457552653000.json deleted file mode 100644 index aaf1a3f3..00000000 --- a/src/__tests__/mocks/message.V2.SEGMENT_REMOVAL.1457552653000.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "type": "message", - "data": "{\"data\":\"{\\\"type\\\":\\\"MY_SEGMENTS_UPDATE_V2\\\",\\\"changeNumber\\\":1457552653000,\\\"segmentName\\\":\\\"splitters\\\",\\\"c\\\": 0,\\\"u\\\": 3,\\\"d\\\":\\\"\\\"}\"}" -} \ No newline at end of file diff --git a/src/__tests__/mocks/message.V2.UNBOUNDED.1457552650000.json b/src/__tests__/mocks/message.V2.UNBOUNDED.1457552650000.json deleted file mode 100644 index a7a2e793..00000000 --- a/src/__tests__/mocks/message.V2.UNBOUNDED.1457552650000.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "type": "message", - "data": "{\"data\":\"{\\\"type\\\":\\\"MY_SEGMENTS_UPDATE_V2\\\",\\\"changeNumber\\\":1457552650000,\\\"segmentName\\\":\\\"\\\",\\\"c\\\": 0,\\\"u\\\": 0,\\\"d\\\":\\\"\\\"}\"}" -} \ No newline at end of file diff --git a/src/logger/constants.ts b/src/logger/constants.ts index 36f6a139..9b1582de 100644 --- a/src/logger/constants.ts +++ b/src/logger/constants.ts @@ -79,7 +79,7 @@ export const WARN_SPLITS_FILTER_IGNORED = 219; export const WARN_SPLITS_FILTER_INVALID = 220; export const WARN_SPLITS_FILTER_EMPTY = 221; export const WARN_SDK_KEY = 222; -export const STREAMING_PARSING_MY_SEGMENTS_UPDATE_V2 = 223; +export const STREAMING_PARSING_MY_SEGMENTS_UPDATE = 223; export const STREAMING_PARSING_SPLIT_UPDATE = 224; export const WARN_INVALID_FLAGSET = 225; export const WARN_LOWERCASE_FLAGSET = 226; diff --git a/src/logger/messages/warn.ts b/src/logger/messages/warn.ts index 6bad73e8..4e1aa331 100644 --- a/src/logger/messages/warn.ts +++ b/src/logger/messages/warn.ts @@ -32,7 +32,7 @@ export const codesWarn: [number, string][] = codesError.concat([ [c.WARN_SPLITS_FILTER_EMPTY, c.LOG_PREFIX_SETTINGS + ': feature flag filter configuration must be a non-empty array of filter objects.'], [c.WARN_SDK_KEY, c.LOG_PREFIX_SETTINGS + ': You already have %s. We recommend keeping only one instance of the factory at all times (Singleton pattern) and reusing it throughout your application'], - [c.STREAMING_PARSING_MY_SEGMENTS_UPDATE_V2, c.LOG_PREFIX_SYNC_STREAMING + 'Fetching MySegments due to an error processing %s notification: %s'], + [c.STREAMING_PARSING_MY_SEGMENTS_UPDATE, c.LOG_PREFIX_SYNC_STREAMING + 'Fetching MySegments due to an error processing %s notification: %s'], [c.STREAMING_PARSING_SPLIT_UPDATE, c.LOG_PREFIX_SYNC_STREAMING + 'Fetching SplitChanges due to an error processing SPLIT_UPDATE notification: %s'], [c.WARN_INVALID_FLAGSET, '%s: you passed %s, flag set must adhere to the regular expressions %s. This means a flag set must start with a letter or number, be in lowercase, alphanumeric and have a max length of 50 characters. %s was discarded.'], [c.WARN_LOWERCASE_FLAGSET, '%s: flag set %s should be all lowercase - converting string to lowercase.'], diff --git a/src/sync/streaming/SSEHandler/__tests__/index.spec.ts b/src/sync/streaming/SSEHandler/__tests__/index.spec.ts index 75034c4c..264e8e80 100644 --- a/src/sync/streaming/SSEHandler/__tests__/index.spec.ts +++ b/src/sync/streaming/SSEHandler/__tests__/index.spec.ts @@ -1,19 +1,18 @@ // @ts-nocheck import { SSEHandlerFactory } from '..'; -import { PUSH_SUBSYSTEM_UP, PUSH_NONRETRYABLE_ERROR, PUSH_SUBSYSTEM_DOWN, PUSH_RETRYABLE_ERROR, MY_SEGMENTS_UPDATE, SEGMENT_UPDATE, SPLIT_KILL, SPLIT_UPDATE, MY_SEGMENTS_UPDATE_V2, MY_LARGE_SEGMENTS_UPDATE, ControlType } from '../../constants'; +import { PUSH_SUBSYSTEM_UP, PUSH_NONRETRYABLE_ERROR, PUSH_SUBSYSTEM_DOWN, PUSH_RETRYABLE_ERROR, SEGMENT_UPDATE, SPLIT_KILL, SPLIT_UPDATE, MY_SEGMENTS_UPDATE_V3, MY_LARGE_SEGMENTS_UPDATE, ControlType } from '../../constants'; import { loggerMock } from '../../../../logger/__tests__/sdkLogger.mock'; // update messages import splitUpdateMessage from '../../../../__tests__/mocks/message.SPLIT_UPDATE.1457552620999.json'; import splitKillMessage from '../../../../__tests__/mocks/message.SPLIT_KILL.1457552650000.json'; import segmentUpdateMessage from '../../../../__tests__/mocks/message.SEGMENT_UPDATE.1457552640000.json'; -import mySegmentsUpdateMessage from '../../../../__tests__/mocks/message.MY_SEGMENTS_UPDATE.nicolas@split.io.1457552640000.json'; -// update messages MY_SEGMENTS_UPDATE_V2 -import unboundedMessage from '../../../../__tests__/mocks/message.V2.UNBOUNDED.1457552650000.json'; -import boundedGzipMessage from '../../../../__tests__/mocks/message.V2.BOUNDED.GZIP.1457552651000.json'; -import keylistGzipMessage from '../../../../__tests__/mocks/message.V2.KEYLIST.GZIP.1457552652000.json'; -import segmentRemovalMessage from '../../../../__tests__/mocks/message.V2.SEGMENT_REMOVAL.1457552653000.json'; +// update messages MY_SEGMENTS_UPDATE_V3 +import unboundedMessage from '../../../../__tests__/mocks/message.MY_SEGMENTS_UPDATE_V3.UNBOUNDED.1457552650000.json'; +import boundedGzipMessage from '../../../../__tests__/mocks/message.MY_SEGMENTS_UPDATE_V3.BOUNDED.GZIP.1457552651000.json'; +import keylistGzipMessage from '../../../../__tests__/mocks/message.MY_SEGMENTS_UPDATE_V3.KEYLIST.GZIP.1457552652000.json'; +import segmentRemovalMessage from '../../../../__tests__/mocks/message.MY_SEGMENTS_UPDATE_V3.SEGMENT_REMOVAL.1457552653000.json'; import { keylists, bitmaps } from '../../__tests__/dataMocks'; // update messages MY_LARGE_SEGMENTS_UPDATE @@ -153,33 +152,29 @@ test('`handlerMessage` for update notifications (NotificationProcessor) and stre sseHandler.handleMessage(segmentUpdateMessage); expect(pushEmitter.emit).toHaveBeenLastCalledWith(SEGMENT_UPDATE, ...expectedParams); // must emit SEGMENT_UPDATE with the message change number and segment name - expectedParams = [{ type: MY_SEGMENTS_UPDATE, changeNumber: 1457552640000, includesPayload: false }, 'NzM2MDI5Mzc0_NDEzMjQ1MzA0Nw==_NTcwOTc3MDQx_mySegments']; - sseHandler.handleMessage(mySegmentsUpdateMessage); - expect(pushEmitter.emit).toHaveBeenLastCalledWith(MY_SEGMENTS_UPDATE, ...expectedParams); // must emit MY_SEGMENTS_UPDATE with the message parsed data and channel - - expectedParams = [{ type: 'MY_SEGMENTS_UPDATE_V2', changeNumber: 1457552650000, c: 0, d: '', u: 0, segmentName: '' }]; + expectedParams = [{ t: 'MY_SEGMENTS_UPDATE_V3', cn: 1457552650000, c: 0, d: '', u: 0, l: [] }]; sseHandler.handleMessage(unboundedMessage); - expect(pushEmitter.emit).toHaveBeenLastCalledWith(MY_SEGMENTS_UPDATE_V2, ...expectedParams); // must emit MY_SEGMENTS_UPDATE_V2 with the message parsed data + expect(pushEmitter.emit).toHaveBeenLastCalledWith(MY_SEGMENTS_UPDATE_V3, ...expectedParams); // must emit MY_SEGMENTS_UPDATE_V3 with the message parsed data - expectedParams = [{ type: 'MY_SEGMENTS_UPDATE_V2', changeNumber: 1457552651000, c: 1, d: bitmaps[0].bitmapDataCompressed, u: 1, segmentName: '' }]; + expectedParams = [{ t: 'MY_SEGMENTS_UPDATE_V3', cn: 1457552651000, c: 1, d: bitmaps[0].bitmapDataCompressed, u: 1, l: [] }]; sseHandler.handleMessage(boundedGzipMessage); - expect(pushEmitter.emit).toHaveBeenLastCalledWith(MY_SEGMENTS_UPDATE_V2, ...expectedParams); // must emit MY_SEGMENTS_UPDATE_V2 with the message parsed data + expect(pushEmitter.emit).toHaveBeenLastCalledWith(MY_SEGMENTS_UPDATE_V3, ...expectedParams); // must emit MY_SEGMENTS_UPDATE_V3 with the message parsed data - expectedParams = [{ type: 'MY_SEGMENTS_UPDATE_V2', changeNumber: 1457552652000, c: 1, d: keylists[0].keyListDataCompressed, u: 2, segmentName: 'splitters' }]; + expectedParams = [{ t: 'MY_SEGMENTS_UPDATE_V3', cn: 1457552652000, c: 1, d: keylists[0].keyListDataCompressed, u: 2, l: ['splitters'] }]; sseHandler.handleMessage(keylistGzipMessage); - expect(pushEmitter.emit).toHaveBeenLastCalledWith(MY_SEGMENTS_UPDATE_V2, ...expectedParams); // must emit MY_SEGMENTS_UPDATE_V2 with the message parsed data + expect(pushEmitter.emit).toHaveBeenLastCalledWith(MY_SEGMENTS_UPDATE_V3, ...expectedParams); // must emit MY_SEGMENTS_UPDATE_V3 with the message parsed data - expectedParams = [{ type: 'MY_SEGMENTS_UPDATE_V2', changeNumber: 1457552653000, c: 0, d: '', u: 3, segmentName: 'splitters' }]; + expectedParams = [{ t: 'MY_SEGMENTS_UPDATE_V3', cn: 1457552653000, c: 0, d: '', u: 3, l: ['splitters'] }]; sseHandler.handleMessage(segmentRemovalMessage); - expect(pushEmitter.emit).toHaveBeenLastCalledWith(MY_SEGMENTS_UPDATE_V2, ...expectedParams); // must emit MY_SEGMENTS_UPDATE_V2 with the message parsed data + expect(pushEmitter.emit).toHaveBeenLastCalledWith(MY_SEGMENTS_UPDATE_V3, ...expectedParams); // must emit MY_SEGMENTS_UPDATE_V3 with the message parsed data - expectedParams = [{ type: 'MY_LARGE_SEGMENTS_UPDATE', changeNumber: 1457552650000, c: 0, d: '', u: 0, largeSegments: [], i: 300, h: 1, s: 0 }]; + expectedParams = [{ t: 'MY_LARGE_SEGMENTS_UPDATE', cn: 1457552650000, c: 0, d: '', u: 0, l: [], i: 300, h: 1, s: 0 }]; sseHandler.handleMessage(largeSegmentUnboundedMessage); - expect(pushEmitter.emit).toHaveBeenLastCalledWith(MY_LARGE_SEGMENTS_UPDATE, ...expectedParams); // must emit MY_SEGMENTS_UPDATE_V2 with the message parsed data + expect(pushEmitter.emit).toHaveBeenLastCalledWith(MY_LARGE_SEGMENTS_UPDATE, ...expectedParams); // must emit MY_LARGE_SEGMENTS_UPDATE with the message parsed data - expectedParams = [{ type: 'MY_LARGE_SEGMENTS_UPDATE', changeNumber: 1457552653000, c: 0, d: '', u: 3, largeSegments: ['employees'] }]; + expectedParams = [{ t: 'MY_LARGE_SEGMENTS_UPDATE', cn: 1457552653000, c: 0, d: '', u: 3, l: ['employees'] }]; sseHandler.handleMessage(largeSegmentRemovalMessage); - expect(pushEmitter.emit).toHaveBeenLastCalledWith(MY_LARGE_SEGMENTS_UPDATE, ...expectedParams); // must emit MY_SEGMENTS_UPDATE_V2 with the message parsed data + expect(pushEmitter.emit).toHaveBeenLastCalledWith(MY_LARGE_SEGMENTS_UPDATE, ...expectedParams); // must emit MY_LARGE_SEGMENTS_UPDATE with the message parsed data sseHandler.handleMessage(streamingReset); expect(pushEmitter.emit).toHaveBeenLastCalledWith(ControlType.STREAMING_RESET); // must emit STREAMING_RESET diff --git a/src/sync/streaming/SSEHandler/index.ts b/src/sync/streaming/SSEHandler/index.ts index 43da3e14..fdc4d321 100644 --- a/src/sync/streaming/SSEHandler/index.ts +++ b/src/sync/streaming/SSEHandler/index.ts @@ -1,9 +1,9 @@ import { errorParser, messageParser } from './NotificationParser'; import { notificationKeeperFactory } from './NotificationKeeper'; -import { PUSH_RETRYABLE_ERROR, PUSH_NONRETRYABLE_ERROR, OCCUPANCY, CONTROL, MY_SEGMENTS_UPDATE, MY_SEGMENTS_UPDATE_V2, SEGMENT_UPDATE, SPLIT_KILL, SPLIT_UPDATE, MY_LARGE_SEGMENTS_UPDATE } from '../constants'; +import { PUSH_RETRYABLE_ERROR, PUSH_NONRETRYABLE_ERROR, OCCUPANCY, CONTROL, MY_SEGMENTS_UPDATE_V3, SEGMENT_UPDATE, SPLIT_KILL, SPLIT_UPDATE, MY_LARGE_SEGMENTS_UPDATE } from '../constants'; import { IPushEventEmitter } from '../types'; import { ISseEventHandler } from '../SSEClient/types'; -import { INotificationError, INotificationMessage } from './types'; +import { IControlData, INotificationError, INotificationMessage, IOccupancyData } from './types'; import { ILogger } from '../../../logger/types'; import { STREAMING_PARSING_ERROR_FAILS, ERROR_STREAMING_SSE, STREAMING_PARSING_MESSAGE_FAILS, STREAMING_NEW_MESSAGE } from '../../../logger/constants'; import { ABLY_ERROR, NON_REQUESTED, SSE_CONNECTION_ERROR } from '../../../utils/constants'; @@ -74,29 +74,27 @@ export function SSEHandlerFactory(log: ILogger, pushEmitter: IPushEventEmitter, const { parsedData, data, channel, timestamp } = messageWithParsedData; log.debug(STREAMING_NEW_MESSAGE, [data]); - // we only handle update events if streaming is up. - if (!notificationKeeper.isStreamingUp() && [OCCUPANCY, CONTROL].indexOf(parsedData.type) === -1) - return; + // we only handle update events if streaming is up + // @ts-expect-error + const type = parsedData.type || parsedData.t; + if (!notificationKeeper.isStreamingUp() && [OCCUPANCY, CONTROL].indexOf(type) === -1) return; - switch (parsedData.type) { + switch (type) { /* update events */ case SPLIT_UPDATE: case SEGMENT_UPDATE: - case MY_SEGMENTS_UPDATE_V2: + case MY_SEGMENTS_UPDATE_V3: case MY_LARGE_SEGMENTS_UPDATE: case SPLIT_KILL: - pushEmitter.emit(parsedData.type, parsedData); - break; - case MY_SEGMENTS_UPDATE: - pushEmitter.emit(parsedData.type, parsedData, channel); + pushEmitter.emit(type, parsedData); break; /* occupancy & control events, handled by NotificationManagerKeeper */ case OCCUPANCY: - notificationKeeper.handleOccupancyEvent(parsedData.metrics.publishers, channel, timestamp); + notificationKeeper.handleOccupancyEvent((parsedData as IOccupancyData).metrics.publishers, channel, timestamp); break; case CONTROL: - notificationKeeper.handleControlEvent(parsedData.controlType, channel, timestamp); + notificationKeeper.handleControlEvent((parsedData as IControlData).controlType, channel, timestamp); break; default: diff --git a/src/sync/streaming/SSEHandler/types.ts b/src/sync/streaming/SSEHandler/types.ts index 08e7b72a..3db1fb19 100644 --- a/src/sync/streaming/SSEHandler/types.ts +++ b/src/sync/streaming/SSEHandler/types.ts @@ -1,12 +1,5 @@ import { ControlType } from '../constants'; -import { MY_SEGMENTS_UPDATE, MY_SEGMENTS_UPDATE_V2, SEGMENT_UPDATE, SPLIT_UPDATE, SPLIT_KILL, CONTROL, OCCUPANCY, MY_LARGE_SEGMENTS_UPDATE } from '../types'; - -export interface IMySegmentsUpdateData { - type: MY_SEGMENTS_UPDATE, - changeNumber: number, - includesPayload: boolean, - segmentList?: string[] -} +import { SEGMENT_UPDATE, SPLIT_UPDATE, SPLIT_KILL, CONTROL, OCCUPANCY, MY_LARGE_SEGMENTS_UPDATE, MY_SEGMENTS_UPDATE_V3 } from '../types'; export enum Compression { None = 0, @@ -26,19 +19,10 @@ export interface KeyList { r?: string[], // decimal hash64 of user keys } -export interface IMySegmentsUpdateV2Data { - type: MY_SEGMENTS_UPDATE_V2, - changeNumber: number, - segmentName: string, - c: Compression, - d: string, - u: UpdateStrategy, -} - -export interface IMyLargeSegmentsUpdateData { - type: MY_LARGE_SEGMENTS_UPDATE, - changeNumber: number, - largeSegments: string[], +interface IMySegmentsUpdateData { + t: T, + cn: number, + l: string[], c: Compression, d: string, u: UpdateStrategy, @@ -47,6 +31,10 @@ export interface IMyLargeSegmentsUpdateData { s?: number, // seed for hash function } +export interface IMySegmentsUpdateV3Data extends IMySegmentsUpdateData { } + +export interface IMyLargeSegmentsUpdateData extends IMySegmentsUpdateData { } + export interface ISegmentUpdateData { type: SEGMENT_UPDATE, changeNumber: number, @@ -80,6 +68,6 @@ export interface IOccupancyData { } } -export type INotificationData = IMySegmentsUpdateData | IMySegmentsUpdateV2Data | IMyLargeSegmentsUpdateData | ISegmentUpdateData | ISplitUpdateData | ISplitKillData | IControlData | IOccupancyData +export type INotificationData = IMySegmentsUpdateV3Data | IMyLargeSegmentsUpdateData | ISegmentUpdateData | ISplitUpdateData | ISplitKillData | IControlData | IOccupancyData export type INotificationMessage = { parsedData: INotificationData, channel: string, timestamp: number, data: string } export type INotificationError = Event & { parsedData?: any, message?: string } diff --git a/src/sync/streaming/constants.ts b/src/sync/streaming/constants.ts index c6a338c9..bbd72706 100644 --- a/src/sync/streaming/constants.ts +++ b/src/sync/streaming/constants.ts @@ -25,8 +25,7 @@ export const PUSH_SUBSYSTEM_UP = 'PUSH_SUBSYSTEM_UP'; export const PUSH_SUBSYSTEM_DOWN = 'PUSH_SUBSYSTEM_DOWN'; // Update-type push notifications, handled by NotificationProcessor -export const MY_SEGMENTS_UPDATE = 'MY_SEGMENTS_UPDATE'; -export const MY_SEGMENTS_UPDATE_V2 = 'MY_SEGMENTS_UPDATE_V2'; +export const MY_SEGMENTS_UPDATE_V3 = 'MY_SEGMENTS_UPDATE_V3'; export const SEGMENT_UPDATE = 'SEGMENT_UPDATE'; export const SPLIT_KILL = 'SPLIT_KILL'; export const SPLIT_UPDATE = 'SPLIT_UPDATE'; diff --git a/src/sync/streaming/pushManager.ts b/src/sync/streaming/pushManager.ts index c2ccd602..a2fcf7b1 100644 --- a/src/sync/streaming/pushManager.ts +++ b/src/sync/streaming/pushManager.ts @@ -11,9 +11,9 @@ import { authenticateFactory, hashUserKey } from './AuthClient'; import { forOwn } from '../../utils/lang'; import { SSEClient } from './SSEClient'; import { getMatching } from '../../utils/key'; -import { MY_SEGMENTS_UPDATE, MY_SEGMENTS_UPDATE_V2, PUSH_NONRETRYABLE_ERROR, PUSH_SUBSYSTEM_DOWN, SECONDS_BEFORE_EXPIRATION, SEGMENT_UPDATE, SPLIT_KILL, SPLIT_UPDATE, PUSH_RETRYABLE_ERROR, PUSH_SUBSYSTEM_UP, ControlType, MY_LARGE_SEGMENTS_UPDATE } from './constants'; -import { STREAMING_FALLBACK, STREAMING_REFRESH_TOKEN, STREAMING_CONNECTING, STREAMING_DISABLED, ERROR_STREAMING_AUTH, STREAMING_DISCONNECTING, STREAMING_RECONNECT, STREAMING_PARSING_MY_SEGMENTS_UPDATE_V2, STREAMING_PARSING_SPLIT_UPDATE } from '../../logger/constants'; -import { IMyLargeSegmentsUpdateData, IMySegmentsUpdateV2Data, KeyList, UpdateStrategy } from './SSEHandler/types'; +import { MY_SEGMENTS_UPDATE_V3, PUSH_NONRETRYABLE_ERROR, PUSH_SUBSYSTEM_DOWN, SECONDS_BEFORE_EXPIRATION, SEGMENT_UPDATE, SPLIT_KILL, SPLIT_UPDATE, PUSH_RETRYABLE_ERROR, PUSH_SUBSYSTEM_UP, ControlType, MY_LARGE_SEGMENTS_UPDATE } from './constants'; +import { STREAMING_FALLBACK, STREAMING_REFRESH_TOKEN, STREAMING_CONNECTING, STREAMING_DISABLED, ERROR_STREAMING_AUTH, STREAMING_DISCONNECTING, STREAMING_RECONNECT, STREAMING_PARSING_MY_SEGMENTS_UPDATE, STREAMING_PARSING_SPLIT_UPDATE } from '../../logger/constants'; +import { IMyLargeSegmentsUpdateData, IMySegmentsUpdateV3Data, KeyList, UpdateStrategy } from './SSEHandler/types'; import { isInBitmap, parseBitmap, parseFFUpdatePayload, parseKeyList } from './parseUtils'; import { ISet, _Set } from '../../utils/lang/sets'; import { hash } from '../../utils/murmur3/murmur3'; @@ -72,7 +72,7 @@ export function pushManagerFactory( // [Only for client-side] map of hashes to user keys, to dispatch MY_SEGMENTS_UPDATE events to the corresponding MySegmentsUpdateWorker const userKeyHashes: Record = {}; // [Only for client-side] map of user keys to their corresponding hash64 and MySegmentsUpdateWorkers. - // Hash64 is used to process MY_SEGMENTS_UPDATE_V2 events and dispatch actions to the corresponding MySegmentsUpdateWorker. + // Hash64 is used to process MY_SEGMENTS_UPDATE events and dispatch actions to the corresponding MySegmentsUpdateWorker. const clients: Record, workerLarge: ReturnType }> = {}; // [Only for client-side] variable to flag that a new client was added. It is needed to reconnect streaming. @@ -248,8 +248,8 @@ export function pushManagerFactory( splitsUpdateWorker.put(parsedData); }); - function handleMySegmentsUpdate(parsedData: IMySegmentsUpdateV2Data | IMyLargeSegmentsUpdateData) { - const isLS = parsedData.type === MY_LARGE_SEGMENTS_UPDATE; + function handleMySegmentsUpdate(parsedData: IMySegmentsUpdateV3Data | IMyLargeSegmentsUpdateData) { + const isLS = parsedData.t === MY_LARGE_SEGMENTS_UPDATE; switch (parsedData.u) { case UpdateStrategy.BoundedFetchRequest: { @@ -257,15 +257,13 @@ export function pushManagerFactory( try { bitmap = parseBitmap(parsedData.d, parsedData.c); } catch (e) { - log.warn(STREAMING_PARSING_MY_SEGMENTS_UPDATE_V2, ['BoundedFetchRequest', e]); + log.warn(STREAMING_PARSING_MY_SEGMENTS_UPDATE, ['BoundedFetchRequest', e]); break; } forOwn(clients, ({ hash64, worker, workerLarge }, matchingKey) => { if (isInBitmap(bitmap, hash64.hex)) { - isLS ? - workerLarge.put(parsedData.changeNumber, undefined, getDelay(parsedData, matchingKey)) : - worker.put(parsedData.changeNumber); + (isLS ? workerLarge : worker).put(parsedData.cn, undefined, getDelay(parsedData, matchingKey)); } }); return; @@ -277,68 +275,46 @@ export function pushManagerFactory( added = new _Set(keyList.a); removed = new _Set(keyList.r); } catch (e) { - log.warn(STREAMING_PARSING_MY_SEGMENTS_UPDATE_V2, ['KeyList', e]); + log.warn(STREAMING_PARSING_MY_SEGMENTS_UPDATE, ['KeyList', e]); break; } forOwn(clients, ({ hash64, worker, workerLarge }) => { const add = added.has(hash64.dec) ? true : removed.has(hash64.dec) ? false : undefined; if (add !== undefined) { - isLS ? - workerLarge.put(parsedData.changeNumber, [{ - isLS, - name: parsedData.largeSegments[0], - add - }]) : - worker.put(parsedData.changeNumber, [{ - name: parsedData.segmentName, - add - }]); + (isLS ? workerLarge : worker).put(parsedData.cn, [{ + isLS, + name: parsedData.l[0], + add + }]); } }); return; } case UpdateStrategy.SegmentRemoval: - if ((isLS && parsedData.largeSegments.length === 0) || (!isLS && !parsedData.segmentName)) { - log.warn(STREAMING_PARSING_MY_SEGMENTS_UPDATE_V2, ['SegmentRemoval', 'No segment name was provided']); + if (!parsedData.l || parsedData.l.length) { + log.warn(STREAMING_PARSING_MY_SEGMENTS_UPDATE, ['SegmentRemoval', 'No segment name was provided']); break; } forOwn(clients, ({ worker, workerLarge }) => { - isLS ? - workerLarge.put(parsedData.changeNumber, parsedData.largeSegments.map(largeSegment => ({ - isLS, - name: largeSegment, - add: false - }))) : - worker.put(parsedData.changeNumber, [{ - name: parsedData.segmentName, - add: false - }]); + (isLS ? workerLarge : worker).put(parsedData.cn, parsedData.l.map(largeSegment => ({ + isLS, + name: largeSegment, + add: false + }))); }); return; } // `UpdateStrategy.UnboundedFetchRequest` and fallbacks of other cases forOwn(clients, ({ worker, workerLarge }, matchingKey) => { - isLS ? - workerLarge.put(parsedData.changeNumber, undefined, getDelay(parsedData, matchingKey)) : - worker.put(parsedData.changeNumber); + (isLS ? workerLarge : worker).put(parsedData.cn, undefined, getDelay(parsedData, matchingKey)); }); } if (userKey) { - pushEmitter.on(MY_SEGMENTS_UPDATE, function handleMySegmentsUpdate(parsedData, channel) { - const userKeyHash = channel.split('_')[2]; - const userKey = userKeyHashes[userKeyHash]; - if (userKey && clients[userKey]) { // check existence since it can be undefined if client has been destroyed - clients[userKey].worker.put( - parsedData.changeNumber, - parsedData.includesPayload ? { mySegments: parsedData.segmentList ? parsedData.segmentList.map(segment => ({ name: segment })) : [] } : undefined); - } - }); - - pushEmitter.on(MY_SEGMENTS_UPDATE_V2, handleMySegmentsUpdate); + pushEmitter.on(MY_SEGMENTS_UPDATE_V3, handleMySegmentsUpdate); pushEmitter.on(MY_LARGE_SEGMENTS_UPDATE, handleMySegmentsUpdate); } else { pushEmitter.on(SEGMENT_UPDATE, segmentsUpdateWorker!.put); diff --git a/src/sync/streaming/types.ts b/src/sync/streaming/types.ts index 607ab67d..db58e045 100644 --- a/src/sync/streaming/types.ts +++ b/src/sync/streaming/types.ts @@ -1,4 +1,4 @@ -import { IMySegmentsUpdateData, IMySegmentsUpdateV2Data, ISegmentUpdateData, ISplitUpdateData, ISplitKillData, IMyLargeSegmentsUpdateData } from './SSEHandler/types'; +import { IMySegmentsUpdateV3Data, ISegmentUpdateData, ISplitUpdateData, ISplitKillData, IMyLargeSegmentsUpdateData, INotificationData } from './SSEHandler/types'; import { ITask } from '../types'; import { IMySegmentsSyncTask } from '../polling/types'; import { IEventEmitter } from '../../types'; @@ -11,8 +11,7 @@ export type PUSH_NONRETRYABLE_ERROR = 'PUSH_NONRETRYABLE_ERROR' export type PUSH_RETRYABLE_ERROR = 'PUSH_RETRYABLE_ERROR' // Update-type push notifications, handled by NotificationProcessor -export type MY_SEGMENTS_UPDATE = 'MY_SEGMENTS_UPDATE'; -export type MY_SEGMENTS_UPDATE_V2 = 'MY_SEGMENTS_UPDATE_V2'; +export type MY_SEGMENTS_UPDATE_V3 = 'MY_SEGMENTS_UPDATE_V3'; export type SEGMENT_UPDATE = 'SEGMENT_UPDATE'; export type SPLIT_KILL = 'SPLIT_KILL'; export type SPLIT_UPDATE = 'SPLIT_UPDATE'; @@ -22,24 +21,23 @@ export type MY_LARGE_SEGMENTS_UPDATE = 'MY_LARGE_SEGMENTS_UPDATE'; export type CONTROL = 'CONTROL'; export type OCCUPANCY = 'OCCUPANCY'; -export type IPushEvent = PUSH_SUBSYSTEM_UP | PUSH_SUBSYSTEM_DOWN | PUSH_NONRETRYABLE_ERROR | PUSH_RETRYABLE_ERROR | MY_SEGMENTS_UPDATE | MY_SEGMENTS_UPDATE_V2 | SEGMENT_UPDATE | SPLIT_UPDATE | SPLIT_KILL | MY_LARGE_SEGMENTS_UPDATE | ControlType.STREAMING_RESET +export type IPushEvent = PUSH_SUBSYSTEM_UP | PUSH_SUBSYSTEM_DOWN | PUSH_NONRETRYABLE_ERROR | PUSH_RETRYABLE_ERROR | MY_SEGMENTS_UPDATE_V3 | SEGMENT_UPDATE | SPLIT_UPDATE | SPLIT_KILL | MY_LARGE_SEGMENTS_UPDATE | ControlType.STREAMING_RESET type IParsedData = - T extends MY_SEGMENTS_UPDATE ? IMySegmentsUpdateData : - T extends MY_SEGMENTS_UPDATE_V2 ? IMySegmentsUpdateV2Data : + T extends MY_SEGMENTS_UPDATE_V3 ? IMySegmentsUpdateV3Data : T extends MY_LARGE_SEGMENTS_UPDATE ? IMyLargeSegmentsUpdateData : T extends SEGMENT_UPDATE ? ISegmentUpdateData : T extends SPLIT_UPDATE ? ISplitUpdateData : - T extends SPLIT_KILL ? ISplitKillData : undefined; + T extends SPLIT_KILL ? ISplitKillData : INotificationData; /** * EventEmitter used as Feedback Loop between the SyncManager and PushManager, * where the latter pushes messages and the former consumes it */ export interface IPushEventEmitter extends IEventEmitter { - once(event: T, listener: (parsedData: IParsedData, channel: T extends MY_SEGMENTS_UPDATE ? string : undefined) => void): this; - on(event: T, listener: (parsedData: IParsedData, channel: T extends MY_SEGMENTS_UPDATE ? string : undefined) => void): this; - emit(event: T, parsedData?: IParsedData, channel?: T extends MY_SEGMENTS_UPDATE ? string : undefined): boolean; + once(event: T, listener: (parsedData: IParsedData) => void): this; + on(event: T, listener: (parsedData: IParsedData) => void): this; + emit(event: T, parsedData?: IParsedData): boolean; } /** From fff7c4557ff7178800e4958cffed44926657665f Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Fri, 23 Aug 2024 23:36:06 +0200 Subject: [PATCH 051/146] New DTO for /memberships endpoint --- src/dtos/types.ts | 20 ++++++++++++------- src/readiness/readinessManager.ts | 2 +- src/services/__tests__/splitApi.spec.ts | 2 +- src/services/splitApi.ts | 6 +++--- .../polling/updaters/mySegmentsUpdater.ts | 5 ++--- .../__tests__/index.spec.ts | 14 ++++++------- 6 files changed, 27 insertions(+), 22 deletions(-) diff --git a/src/dtos/types.ts b/src/dtos/types.ts index 7a71311d..cf5a974f 100644 --- a/src/dtos/types.ts +++ b/src/dtos/types.ts @@ -224,14 +224,20 @@ export interface ISegmentChangesResponse { till: number } -/** Interface of the parsed JSON response of `/mySegments/{userKey}` */ +/** Interface of the parsed JSON response of `/memberships/{userKey}` */ export interface IMySegmentsResponse { - mySegments: { - id?: string, - name: string - }[], - myLargeSegments?: string[], - till?: number + ms?: { + cn?: number, + k?: Array<{ + n: string + }> + }, + ls?: { + cn?: number, + k?: Array<{ + n: string + }> + } } /** Metadata internal type for storages */ diff --git a/src/readiness/readinessManager.ts b/src/readiness/readinessManager.ts index 1257c3a6..3437eee6 100644 --- a/src/readiness/readinessManager.ts +++ b/src/readiness/readinessManager.ts @@ -10,7 +10,7 @@ function splitsEventEmitterFactory(EventEmitter: new () => IEventEmitter): ISpli }); // `isSplitKill` condition avoids an edge-case of wrongly emitting SDK_READY if: - // - `/mySegments` fetch and SPLIT_KILL occurs before `/splitChanges` fetch, and + // - `/memberships` fetch and SPLIT_KILL occurs before `/splitChanges` fetch, and // - storage has cached splits (for which case `splitsStorage.killLocally` can return true) splitsEventEmitter.on(SDK_SPLITS_ARRIVED, (isSplitKill: boolean) => { if (!isSplitKill) splitsEventEmitter.splitsArrived = true; }); splitsEventEmitter.once(SDK_SPLITS_CACHE_LOADED, () => { splitsEventEmitter.splitsCacheLoaded = true; }); diff --git a/src/services/__tests__/splitApi.spec.ts b/src/services/__tests__/splitApi.spec.ts index 07d6dec4..535ef3b9 100644 --- a/src/services/__tests__/splitApi.spec.ts +++ b/src/services/__tests__/splitApi.spec.ts @@ -33,7 +33,7 @@ describe('splitApi', () => { splitApi.fetchMySegments('userKey'); [url, { headers }] = fetchMock.mock.calls[1]; assertHeaders(settings, headers); - expect(url).toBe('sdk/mySegments/userKey'); + expect(url).toBe('sdk/memberships/userKey'); splitApi.fetchSegmentChanges(-1, 'segmentName', false, 90); [url, { headers }] = fetchMock.mock.calls[2]; diff --git a/src/services/splitApi.ts b/src/services/splitApi.ts index a5526081..80a913e4 100644 --- a/src/services/splitApi.ts +++ b/src/services/splitApi.ts @@ -70,11 +70,11 @@ export function splitApiFactory( fetchMySegments(userMatchingKey: string, noCache?: boolean) { /** * URI encoding of user keys in order to: - * - avoid 400 responses (due to URI malformed). E.g.: '/api/mySegments/%' - * - avoid 404 responses. E.g.: '/api/mySegments/foo/bar' + * - avoid 400 responses (due to URI malformed). E.g.: '/api/memberships/%' + * - avoid 404 responses. E.g.: '/api/memberships/foo/bar' * - match user keys with special characters. E.g.: 'foo%bar', 'foo/bar' */ - const url = `${urls.sdk}/mySegments/${encodeURIComponent(userMatchingKey)}`; + const url = `${urls.sdk}/memberships/${encodeURIComponent(userMatchingKey)}`; return splitHttpClient(url, noCache ? noCacheHeaderOptions : undefined, telemetryTracker.trackHttp(MY_SEGMENT)); }, diff --git a/src/sync/polling/updaters/mySegmentsUpdater.ts b/src/sync/polling/updaters/mySegmentsUpdater.ts index fade62b3..9fb6719e 100644 --- a/src/sync/polling/updaters/mySegmentsUpdater.ts +++ b/src/sync/polling/updaters/mySegmentsUpdater.ts @@ -51,9 +51,8 @@ export function mySegmentsUpdaterFactory( }); } else { // Reset the list of segment names - const mySegmentsUpdated = segments.resetSegments(segmentsData.mySegments.map((segment) => segment.name)); - const myLargeSegmentsUpdated = largeSegments!.resetSegments(segmentsData.myLargeSegments || []/*, segmentsData.till*/); - shouldNotifyUpdate = mySegmentsUpdated || myLargeSegmentsUpdated; + shouldNotifyUpdate = segments.resetSegments((segmentsData.ms?.k || []).map((segment) => segment.n), segmentsData.ms?.cn); + shouldNotifyUpdate = largeSegments!.resetSegments((segmentsData.ls?.k || []).map((segment) => segment.n), segmentsData.ls?.cn) || shouldNotifyUpdate; } // Notify update if required diff --git a/src/utils/settingsValidation/__tests__/index.spec.ts b/src/utils/settingsValidation/__tests__/index.spec.ts index 8e0238c4..1deffc98 100644 --- a/src/utils/settingsValidation/__tests__/index.spec.ts +++ b/src/utils/settingsValidation/__tests__/index.spec.ts @@ -309,13 +309,13 @@ test('SETTINGS / urls should be correctly assigned', () => { const baseEventsUrl = 'https://events.split.io/api'; [ - '/mySegments/nico', - '/mySegments/events@split', - '/mySegments/metrics@split', - '/mySegments/testImpressions@split', - '/mySegments/testImpressions', - '/mySegments/events', - '/mySegments/metrics', + '/memberships/nico', + '/memberships/events@split', + '/memberships/metrics@split', + '/memberships/testImpressions@split', + '/memberships/testImpressions', + '/memberships/events', + '/memberships/metrics', '/splitChanges?since=-1', '/splitChanges?since=100', '/segmentChanges/segment1?since=100', From 21a862aaca6810c00c78d3ae16b7160e1e06a07a Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Tue, 27 Aug 2024 16:41:12 +0300 Subject: [PATCH 052/146] Add getChangeNumber method to mySegments storages --- src/storages/AbstractSegmentsCacheSync.ts | 8 ++---- src/storages/KeyBuilderCS.ts | 9 ++++++ .../inLocalStorage/MySegmentsCacheInLocal.ts | 28 +++++++++++++++++-- .../inMemory/MySegmentsCacheInMemory.ts | 8 +++++- 4 files changed, 44 insertions(+), 9 deletions(-) diff --git a/src/storages/AbstractSegmentsCacheSync.ts b/src/storages/AbstractSegmentsCacheSync.ts index a3780d48..7af8d95f 100644 --- a/src/storages/AbstractSegmentsCacheSync.ts +++ b/src/storages/AbstractSegmentsCacheSync.ts @@ -54,15 +54,11 @@ export abstract class AbstractSegmentsCacheSync implements ISegmentsCacheSync { */ setChangeNumber(name: string, changeNumber: number): boolean { return true; } - /** - * For server-side synchronizer: get the change number of `name` segment. - * For client-side synchronizer: the method is not used. - */ - getChangeNumber(name: string): number { return -1; } + abstract getChangeNumber(name: string): number /** * For server-side synchronizer: the method is not used. * For client-side synchronizer: reset the cache with the given list of segments. */ - resetSegments(names: string[]): boolean { return true; } + resetSegments(names: string[], changeNumber?: number): boolean { return true; } } diff --git a/src/storages/KeyBuilderCS.ts b/src/storages/KeyBuilderCS.ts index 01602249..5a170eb8 100644 --- a/src/storages/KeyBuilderCS.ts +++ b/src/storages/KeyBuilderCS.ts @@ -5,6 +5,7 @@ export interface MySegmentsKeyBuilder { buildSegmentNameKey(segmentName: string): string; extractSegmentName(builtSegmentKeyName: string): string | undefined; extractOldSegmentKey(builtSegmentKeyName: string): string | undefined; + buildTillKey(): string; } export class KeyBuilderCS extends KeyBuilder implements MySegmentsKeyBuilder { @@ -47,6 +48,10 @@ export class KeyBuilderCS extends KeyBuilder implements MySegmentsKeyBuilder { isSplitsCacheKey(key: string) { return this.regexSplitsCacheKey.test(key); } + + buildTillKey() { + return `${this.prefix}.${this.matchingKey}.segment.till`; + } } export function myLargeSegmentsKeyBuilder(prefix: string, matchingKey: string): MySegmentsKeyBuilder { @@ -63,6 +68,10 @@ export function myLargeSegmentsKeyBuilder(prefix: string, matchingKey: string): extractOldSegmentKey() { return undefined; + }, + + buildTillKey() { + return `${prefix}.${matchingKey}.largeSegment.till`; } }; } diff --git a/src/storages/inLocalStorage/MySegmentsCacheInLocal.ts b/src/storages/inLocalStorage/MySegmentsCacheInLocal.ts index 3b5085be..165812e8 100644 --- a/src/storages/inLocalStorage/MySegmentsCacheInLocal.ts +++ b/src/storages/inLocalStorage/MySegmentsCacheInLocal.ts @@ -1,4 +1,5 @@ import { ILogger } from '../../logger/types'; +import { isNaNNumber } from '../../utils/lang'; import { AbstractSegmentsCacheSync } from '../AbstractSegmentsCacheSync'; import type { MySegmentsKeyBuilder } from '../KeyBuilderCS'; import { LOG_PREFIX, DEFINED } from './constants'; @@ -58,10 +59,20 @@ export class MySegmentsCacheInLocal extends AbstractSegmentsCacheSync { /** * Reset (update) the cached list of segments with the given list, removing and adding segments if necessary. * - * @param {string[]} segmentNames list of segment names + * @param {string[]} names list of segment names * @returns boolean indicating if the cache was updated (i.e., given list was different from the cached one) */ - resetSegments(names: string[]): boolean { + resetSegments(names: string[], changeNumber?: number): boolean { + try { + if (changeNumber) { + localStorage.setItem(this.keys.buildTillKey(), changeNumber + ''); + } else { + localStorage.removeItem(this.keys.buildTillKey()); + } + } catch (e) { + this.log.error(e); + } + let isDiff = false; let index; @@ -133,4 +144,17 @@ export class MySegmentsCacheInLocal extends AbstractSegmentsCacheSync { return 1; } + getChangeNumber() { + const n = -1; + let value: string | number | null = localStorage.getItem(this.keys.buildTillKey()); + + if (value !== null) { + value = parseInt(value, 10); + + return isNaNNumber(value) ? n : value; + } + + return n; + } + } diff --git a/src/storages/inMemory/MySegmentsCacheInMemory.ts b/src/storages/inMemory/MySegmentsCacheInMemory.ts index 08a767a1..f2b8ad93 100644 --- a/src/storages/inMemory/MySegmentsCacheInMemory.ts +++ b/src/storages/inMemory/MySegmentsCacheInMemory.ts @@ -7,6 +7,7 @@ import { AbstractSegmentsCacheSync } from '../AbstractSegmentsCacheSync'; export class MySegmentsCacheInMemory extends AbstractSegmentsCacheSync { private segmentCache: Record = {}; + private cn?: number; clear() { this.segmentCache = {}; @@ -35,7 +36,8 @@ export class MySegmentsCacheInMemory extends AbstractSegmentsCacheSync { * @param {string[]} names list of segment names * @returns boolean indicating if the cache was updated (i.e., given list was different from the cached one) */ - resetSegments(names: string[]): boolean { + resetSegments(names: string[], changeNumber?: number): boolean { + this.cn = changeNumber; let isDiff = false; let index; @@ -72,6 +74,10 @@ export class MySegmentsCacheInMemory extends AbstractSegmentsCacheSync { return isDiff; } + getChangeNumber() { + return this.cn || -1; + } + getRegisteredSegments() { return Object.keys(this.segmentCache); } From c5cd2811e1796b9c7d15c97e90792ac5ef0a63e2 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Tue, 27 Aug 2024 20:08:19 +0300 Subject: [PATCH 053/146] Unit tests --- src/storages/KeyBuilderCS.ts | 4 ++-- .../inLocalStorage/__tests__/MySegmentsCacheInLocal.spec.ts | 6 ++++-- .../inMemory/__tests__/MySegmentsCacheInMemory.spec.ts | 6 ++++-- 3 files changed, 10 insertions(+), 6 deletions(-) diff --git a/src/storages/KeyBuilderCS.ts b/src/storages/KeyBuilderCS.ts index 5a170eb8..0fe9c9bf 100644 --- a/src/storages/KeyBuilderCS.ts +++ b/src/storages/KeyBuilderCS.ts @@ -50,7 +50,7 @@ export class KeyBuilderCS extends KeyBuilder implements MySegmentsKeyBuilder { } buildTillKey() { - return `${this.prefix}.${this.matchingKey}.segment.till`; + return `${this.prefix}.${this.matchingKey}.segments.till`; } } @@ -71,7 +71,7 @@ export function myLargeSegmentsKeyBuilder(prefix: string, matchingKey: string): }, buildTillKey() { - return `${prefix}.${matchingKey}.largeSegment.till`; + return `${prefix}.${matchingKey}.largeSegments.till`; } }; } diff --git a/src/storages/inLocalStorage/__tests__/MySegmentsCacheInLocal.spec.ts b/src/storages/inLocalStorage/__tests__/MySegmentsCacheInLocal.spec.ts index c581e716..3b062c66 100644 --- a/src/storages/inLocalStorage/__tests__/MySegmentsCacheInLocal.spec.ts +++ b/src/storages/inLocalStorage/__tests__/MySegmentsCacheInLocal.spec.ts @@ -11,8 +11,10 @@ test('SEGMENT CACHE / in LocalStorage', () => { caches.forEach(cache => { cache.clear(); - cache.addToSegment('mocked-segment'); - cache.addToSegment('mocked-segment-2'); + expect(cache.resetSegments(['mocked-segment', 'mocked-segment-2'], 123)).toBe(true); + expect(cache.getChangeNumber()).toBe(123); + expect(cache.resetSegments(['mocked-segment', 'mocked-segment-2'])).toBe(false); + expect(cache.getChangeNumber()).toBe(-1); expect(cache.isInSegment('mocked-segment')).toBe(true); expect(cache.getRegisteredSegments()).toEqual(['mocked-segment', 'mocked-segment-2']); diff --git a/src/storages/inMemory/__tests__/MySegmentsCacheInMemory.spec.ts b/src/storages/inMemory/__tests__/MySegmentsCacheInMemory.spec.ts index 7236e950..62fb95f7 100644 --- a/src/storages/inMemory/__tests__/MySegmentsCacheInMemory.spec.ts +++ b/src/storages/inMemory/__tests__/MySegmentsCacheInMemory.spec.ts @@ -3,8 +3,10 @@ import { MySegmentsCacheInMemory } from '../MySegmentsCacheInMemory'; test('MY SEGMENTS CACHE / in memory', () => { const cache = new MySegmentsCacheInMemory(); - cache.addToSegment('mocked-segment'); - cache.addToSegment('mocked-segment-2'); + expect(cache.resetSegments(['mocked-segment', 'mocked-segment-2'], 123)).toBe(true); + expect(cache.getChangeNumber()).toBe(123); + expect(cache.resetSegments(['mocked-segment', 'mocked-segment-2'])).toBe(false); + expect(cache.getChangeNumber()).toBe(-1); expect(cache.isInSegment('mocked-segment')).toBe(true); expect(cache.getRegisteredSegments()).toEqual(['mocked-segment', 'mocked-segment-2']); From 2f02cf9534c635206113eef516e6a2b5f171d55c Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Wed, 28 Aug 2024 18:38:01 +0300 Subject: [PATCH 054/146] Polishing --- CHANGES.txt | 4 +++ src/dtos/types.ts | 2 +- src/logger/messages/warn.ts | 2 +- src/services/__tests__/splitApi.spec.ts | 2 +- src/services/splitApi.ts | 2 +- src/services/types.ts | 4 +-- .../polling/fetchers/mySegmentsFetcher.ts | 16 ++++------ .../polling/fetchers/segmentChangesFetcher.ts | 2 +- src/sync/polling/fetchers/types.ts | 4 +-- src/sync/polling/pollingManagerCS.ts | 2 +- .../polling/syncTasks/mySegmentsSyncTask.ts | 6 ++-- src/sync/polling/types.ts | 4 +-- .../polling/updaters/mySegmentsUpdater.ts | 2 +- src/sync/streaming/SSEHandler/types.ts | 6 ++-- .../streaming/__tests__/parseUtils.spec.ts | 9 +++++- .../streaming/__tests__/pushManager.spec.ts | 9 +----- src/sync/streaming/parseUtils.ts | 14 +++++++- src/sync/streaming/pushManager.ts | 32 ++++++++----------- src/sync/streaming/types.ts | 2 +- 19 files changed, 67 insertions(+), 57 deletions(-) diff --git a/CHANGES.txt b/CHANGES.txt index 18746546..b8342740 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -1,3 +1,7 @@ +2.0.0 (September XX, 2024) + - BREAKING CHANGES: + - Removed `/mySegments` endpoint from SplitAPI module, as it is replaced by `/memberships` endpoint. + 1.16.1 (July 10, 2024) - Updated some transitive dependencies for vulnerability fixes. diff --git a/src/dtos/types.ts b/src/dtos/types.ts index cf5a974f..82a28689 100644 --- a/src/dtos/types.ts +++ b/src/dtos/types.ts @@ -225,7 +225,7 @@ export interface ISegmentChangesResponse { } /** Interface of the parsed JSON response of `/memberships/{userKey}` */ -export interface IMySegmentsResponse { +export interface IMembershipsResponse { ms?: { cn?: number, k?: Array<{ diff --git a/src/logger/messages/warn.ts b/src/logger/messages/warn.ts index 4e1aa331..3b74d550 100644 --- a/src/logger/messages/warn.ts +++ b/src/logger/messages/warn.ts @@ -32,7 +32,7 @@ export const codesWarn: [number, string][] = codesError.concat([ [c.WARN_SPLITS_FILTER_EMPTY, c.LOG_PREFIX_SETTINGS + ': feature flag filter configuration must be a non-empty array of filter objects.'], [c.WARN_SDK_KEY, c.LOG_PREFIX_SETTINGS + ': You already have %s. We recommend keeping only one instance of the factory at all times (Singleton pattern) and reusing it throughout your application'], - [c.STREAMING_PARSING_MY_SEGMENTS_UPDATE, c.LOG_PREFIX_SYNC_STREAMING + 'Fetching MySegments due to an error processing %s notification: %s'], + [c.STREAMING_PARSING_MY_SEGMENTS_UPDATE, c.LOG_PREFIX_SYNC_STREAMING + 'Fetching Memberships due to an error processing %s notification: %s'], [c.STREAMING_PARSING_SPLIT_UPDATE, c.LOG_PREFIX_SYNC_STREAMING + 'Fetching SplitChanges due to an error processing SPLIT_UPDATE notification: %s'], [c.WARN_INVALID_FLAGSET, '%s: you passed %s, flag set must adhere to the regular expressions %s. This means a flag set must start with a letter or number, be in lowercase, alphanumeric and have a max length of 50 characters. %s was discarded.'], [c.WARN_LOWERCASE_FLAGSET, '%s: flag set %s should be all lowercase - converting string to lowercase.'], diff --git a/src/services/__tests__/splitApi.spec.ts b/src/services/__tests__/splitApi.spec.ts index 535ef3b9..c0b7eb21 100644 --- a/src/services/__tests__/splitApi.spec.ts +++ b/src/services/__tests__/splitApi.spec.ts @@ -30,7 +30,7 @@ describe('splitApi', () => { assertHeaders(settings, headers); expect(url).toBe('auth/v2/auth?s=1.1&users=key1&users=key2'); - splitApi.fetchMySegments('userKey'); + splitApi.fetchMemberships('userKey'); [url, { headers }] = fetchMock.mock.calls[1]; assertHeaders(settings, headers); expect(url).toBe('sdk/memberships/userKey'); diff --git a/src/services/splitApi.ts b/src/services/splitApi.ts index 80a913e4..5fd97e25 100644 --- a/src/services/splitApi.ts +++ b/src/services/splitApi.ts @@ -67,7 +67,7 @@ export function splitApiFactory( return splitHttpClient(url, noCache ? noCacheHeaderOptions : undefined, telemetryTracker.trackHttp(SEGMENT)); }, - fetchMySegments(userMatchingKey: string, noCache?: boolean) { + fetchMemberships(userMatchingKey: string, noCache?: boolean) { /** * URI encoding of user keys in order to: * - avoid 400 responses (due to URI malformed). E.g.: '/api/memberships/%' diff --git a/src/services/types.ts b/src/services/types.ts index 116ccec5..96b918f3 100644 --- a/src/services/types.ts +++ b/src/services/types.ts @@ -39,7 +39,7 @@ export type IFetchSplitChanges = (since: number, noCache?: boolean, till?: numbe export type IFetchSegmentChanges = (since: number, segmentName: string, noCache?: boolean, till?: number) => Promise -export type IFetchMySegments = (userMatchingKey: string, noCache?: boolean) => Promise +export type IFetchMemberships = (userMatchingKey: string, noCache?: boolean) => Promise export type IPostEventsBulk = (body: string, headers?: Record) => Promise @@ -61,7 +61,7 @@ export interface ISplitApi { fetchAuth: IFetchAuth fetchSplitChanges: IFetchSplitChanges fetchSegmentChanges: IFetchSegmentChanges - fetchMySegments: IFetchMySegments + fetchMemberships: IFetchMemberships postEventsBulk: IPostEventsBulk postUniqueKeysBulkCs: IPostUniqueKeysBulkCs postUniqueKeysBulkSs: IPostUniqueKeysBulkSs diff --git a/src/sync/polling/fetchers/mySegmentsFetcher.ts b/src/sync/polling/fetchers/mySegmentsFetcher.ts index 0d09cc36..c91bd3c3 100644 --- a/src/sync/polling/fetchers/mySegmentsFetcher.ts +++ b/src/sync/polling/fetchers/mySegmentsFetcher.ts @@ -1,26 +1,24 @@ -import { IFetchMySegments, IResponse } from '../../../services/types'; -import { IMySegmentsResponse } from '../../../dtos/types'; +import { IFetchMemberships, IResponse } from '../../../services/types'; +import { IMembershipsResponse } from '../../../dtos/types'; import { IMySegmentsFetcher } from './types'; /** * Factory of MySegments fetcher. * MySegments fetcher is a wrapper around `mySegments` API service that parses the response and handle errors. */ -export function mySegmentsFetcherFactory(fetchMySegments: IFetchMySegments): IMySegmentsFetcher { +export function mySegmentsFetcherFactory(fetchMemberships: IFetchMemberships): IMySegmentsFetcher { return function mySegmentsFetcher( userMatchingKey: string, noCache?: boolean, - // Optional decorator for `fetchMySegments` promise, such as timeout or time tracker + // Optional decorator for `fetchMemberships` promise, such as timeout or time tracker decorator?: (promise: Promise) => Promise - ): Promise { + ): Promise { - let mySegmentsPromise = fetchMySegments(userMatchingKey, noCache); + let mySegmentsPromise = fetchMemberships(userMatchingKey, noCache); if (decorator) mySegmentsPromise = decorator(mySegmentsPromise); - // Extract segment names - return mySegmentsPromise - .then(resp => resp.json()); + return mySegmentsPromise.then(resp => resp.json()); }; } diff --git a/src/sync/polling/fetchers/segmentChangesFetcher.ts b/src/sync/polling/fetchers/segmentChangesFetcher.ts index 01a42b38..2eb1cdbf 100644 --- a/src/sync/polling/fetchers/segmentChangesFetcher.ts +++ b/src/sync/polling/fetchers/segmentChangesFetcher.ts @@ -28,7 +28,7 @@ export function segmentChangesFetcherFactory(fetchSegmentChanges: IFetchSegmentC segmentName: string, noCache?: boolean, till?: number, - // Optional decorator for `fetchMySegments` promise, such as timeout or time tracker + // Optional decorator for `fetchSegmentChanges` promise, such as timeout or time tracker decorator?: (promise: Promise) => Promise ): Promise { diff --git a/src/sync/polling/fetchers/types.ts b/src/sync/polling/fetchers/types.ts index 0e06efee..bf14d0ba 100644 --- a/src/sync/polling/fetchers/types.ts +++ b/src/sync/polling/fetchers/types.ts @@ -1,4 +1,4 @@ -import { ISplitChangesResponse, ISegmentChangesResponse, IMySegmentsResponse } from '../../../dtos/types'; +import { ISplitChangesResponse, ISegmentChangesResponse, IMembershipsResponse } from '../../../dtos/types'; import { IResponse } from '../../../services/types'; export type ISplitChangesFetcher = ( @@ -20,4 +20,4 @@ export type IMySegmentsFetcher = ( userMatchingKey: string, noCache?: boolean, decorator?: (promise: Promise) => Promise -) => Promise +) => Promise diff --git a/src/sync/polling/pollingManagerCS.ts b/src/sync/polling/pollingManagerCS.ts index 2bfbd95e..4ce0882a 100644 --- a/src/sync/polling/pollingManagerCS.ts +++ b/src/sync/polling/pollingManagerCS.ts @@ -55,7 +55,7 @@ export function pollingManagerCSFactory( }); function add(matchingKey: string, readiness: IReadinessManager, storage: IStorageSync) { - const mySegmentsSyncTask = mySegmentsSyncTaskFactory(splitApi.fetchMySegments, storage, readiness, settings, matchingKey); + const mySegmentsSyncTask = mySegmentsSyncTaskFactory(splitApi.fetchMemberships, storage, readiness, settings, matchingKey); // smart ready function smartReady() { diff --git a/src/sync/polling/syncTasks/mySegmentsSyncTask.ts b/src/sync/polling/syncTasks/mySegmentsSyncTask.ts index 4e43fe44..fe006c68 100644 --- a/src/sync/polling/syncTasks/mySegmentsSyncTask.ts +++ b/src/sync/polling/syncTasks/mySegmentsSyncTask.ts @@ -2,7 +2,7 @@ import { IStorageSync } from '../../../storages/types'; import { IReadinessManager } from '../../../readiness/types'; import { syncTaskFactory } from '../../syncTask'; import { IMySegmentsSyncTask } from '../types'; -import { IFetchMySegments } from '../../../services/types'; +import { IFetchMemberships } from '../../../services/types'; import { mySegmentsFetcherFactory } from '../fetchers/mySegmentsFetcher'; import { ISettings } from '../../../types'; import { mySegmentsUpdaterFactory } from '../updaters/mySegmentsUpdater'; @@ -11,7 +11,7 @@ import { mySegmentsUpdaterFactory } from '../updaters/mySegmentsUpdater'; * Creates a sync task that periodically executes a `mySegmentsUpdater` task */ export function mySegmentsSyncTaskFactory( - fetchMySegments: IFetchMySegments, + fetchMemberships: IFetchMemberships, storage: IStorageSync, readiness: IReadinessManager, settings: ISettings, @@ -21,7 +21,7 @@ export function mySegmentsSyncTaskFactory( settings.log, mySegmentsUpdaterFactory( settings.log, - mySegmentsFetcherFactory(fetchMySegments), + mySegmentsFetcherFactory(fetchMemberships), storage, readiness.segments, settings.startup.requestTimeoutBeforeReady, diff --git a/src/sync/polling/types.ts b/src/sync/polling/types.ts index ce17ae5c..65bcc45b 100644 --- a/src/sync/polling/types.ts +++ b/src/sync/polling/types.ts @@ -1,4 +1,4 @@ -import { IMySegmentsResponse, ISplit } from '../../dtos/types'; +import { IMembershipsResponse, ISplit } from '../../dtos/types'; import { IReadinessManager } from '../../readiness/types'; import { IStorageSync } from '../../storages/types'; import { ITask, ISyncTask } from '../types'; @@ -7,7 +7,7 @@ export interface ISplitsSyncTask extends ISyncTask<[noCache?: boolean, till?: nu export interface ISegmentsSyncTask extends ISyncTask<[fetchOnlyNew?: boolean, segmentName?: string, noCache?: boolean, till?: number], boolean> { } -export type MySegmentsData = IMySegmentsResponse | { +export type MySegmentsData = IMembershipsResponse | { /* segment type */ isLS?: boolean /* segment name */ diff --git a/src/sync/polling/updaters/mySegmentsUpdater.ts b/src/sync/polling/updaters/mySegmentsUpdater.ts index 9fb6719e..6c30a18e 100644 --- a/src/sync/polling/updaters/mySegmentsUpdater.ts +++ b/src/sync/polling/updaters/mySegmentsUpdater.ts @@ -41,7 +41,7 @@ export function mySegmentsUpdaterFactory( let shouldNotifyUpdate; if (Array.isArray(segmentsData)) { // Add/Delete the segment names - (segmentsData as { isLS?: boolean, name: string, add: boolean }[]).forEach(({ isLS, name, add }) => { + segmentsData.forEach(({ isLS, name, add }) => { const cache = isLS ? largeSegments : segments; if (cache!.isInSegment(name) !== add) { shouldNotifyUpdate = true; diff --git a/src/sync/streaming/SSEHandler/types.ts b/src/sync/streaming/SSEHandler/types.ts index 3db1fb19..0f53f8d0 100644 --- a/src/sync/streaming/SSEHandler/types.ts +++ b/src/sync/streaming/SSEHandler/types.ts @@ -22,9 +22,9 @@ export interface KeyList { interface IMySegmentsUpdateData { t: T, cn: number, - l: string[], - c: Compression, - d: string, + n?: string[], + c?: Compression, + d?: string, u: UpdateStrategy, i?: number, // time interval in millis h?: number, // hash function diff --git a/src/sync/streaming/__tests__/parseUtils.spec.ts b/src/sync/streaming/__tests__/parseUtils.spec.ts index a66487c1..ee1402bb 100644 --- a/src/sync/streaming/__tests__/parseUtils.spec.ts +++ b/src/sync/streaming/__tests__/parseUtils.spec.ts @@ -1,7 +1,7 @@ import { hash64 } from '../../../utils/murmur3/murmur3_64'; import { keylists, bitmaps, splitNotifications } from './dataMocks'; -import { parseKeyList, parseBitmap, isInBitmap, parseFFUpdatePayload } from '../parseUtils'; +import { parseKeyList, parseBitmap, isInBitmap, parseFFUpdatePayload, getDelay } from '../parseUtils'; import { _Set } from '../../../utils/lang/sets'; test('parseKeyList', () => { @@ -60,3 +60,10 @@ test('split notification - parseKeyList', () => { }); }); + +test('getDelay', () => { + expect(getDelay({ i: 300, h: 1, s: 0 }, 'nicolas@split.io')).toBe(241); + expect(getDelay({ i: 60000, h: 1, s: 1 }, 'emi@split.io')).toBe(14389); + expect(getDelay({ i: 60000, h: 1, s: 0 }, 'emi@split.io')).toBe(24593); + expect(getDelay({}, 'emi@split.io')).toBe(24593); +}); diff --git a/src/sync/streaming/__tests__/pushManager.spec.ts b/src/sync/streaming/__tests__/pushManager.spec.ts index 83ffb106..7a08bc9b 100644 --- a/src/sync/streaming/__tests__/pushManager.spec.ts +++ b/src/sync/streaming/__tests__/pushManager.spec.ts @@ -3,7 +3,7 @@ import { fullSettings, fullSettingsServerSide } from '../../../utils/settingsVal import { syncTaskFactory } from '../../__tests__/syncTask.mock'; // Test target -import { pushManagerFactory, getDelay } from '../pushManager'; +import { pushManagerFactory } from '../pushManager'; import { IPushManager } from '../types'; const paramsMock = { @@ -192,10 +192,3 @@ describe('pushManager in server-side', () => { }); }); - -test('getDelay', () => { - expect(getDelay({ i: 300, h: 1, s: 0 }, 'nicolas@split.io')).toBe(241); - expect(getDelay({ i: 60000, h: 1, s: 1 }, 'emi@split.io')).toBe(14389); - expect(getDelay({ i: 60000, h: 1, s: 0 }, 'emi@split.io')).toBe(24593); - expect(getDelay({}, 'emi@split.io')).toBe(24593); -}); diff --git a/src/sync/streaming/parseUtils.ts b/src/sync/streaming/parseUtils.ts index b34cfa2c..d7342baa 100644 --- a/src/sync/streaming/parseUtils.ts +++ b/src/sync/streaming/parseUtils.ts @@ -1,6 +1,7 @@ import { algorithms } from '../../utils/decompress'; import { decodeFromBase64 } from '../../utils/base64'; -import { Compression, KeyList } from './SSEHandler/types'; +import { hash } from '../../utils/murmur3/murmur3'; +import { Compression, IMyLargeSegmentsUpdateData, KeyList } from './SSEHandler/types'; import { ISplit } from '../../dtos/types'; const GZIP = 1; @@ -87,3 +88,14 @@ export function parseFFUpdatePayload(compression: Compression, data: string): IS parseKeyList(data, compression, false) : JSON.parse(decodeFromBase64(data)); } + +const DEFAULT_MAX_INTERVAL = 60000; + +export function getDelay(parsedData: Pick, matchingKey: string) { + if (parsedData.h === 0) return 0; + + const interval = parsedData.i || DEFAULT_MAX_INTERVAL; + const seed = parsedData.s || 0; + + return hash(matchingKey, seed) % interval; +} diff --git a/src/sync/streaming/pushManager.ts b/src/sync/streaming/pushManager.ts index a2fcf7b1..2643731f 100644 --- a/src/sync/streaming/pushManager.ts +++ b/src/sync/streaming/pushManager.ts @@ -14,23 +14,13 @@ import { getMatching } from '../../utils/key'; import { MY_SEGMENTS_UPDATE_V3, PUSH_NONRETRYABLE_ERROR, PUSH_SUBSYSTEM_DOWN, SECONDS_BEFORE_EXPIRATION, SEGMENT_UPDATE, SPLIT_KILL, SPLIT_UPDATE, PUSH_RETRYABLE_ERROR, PUSH_SUBSYSTEM_UP, ControlType, MY_LARGE_SEGMENTS_UPDATE } from './constants'; import { STREAMING_FALLBACK, STREAMING_REFRESH_TOKEN, STREAMING_CONNECTING, STREAMING_DISABLED, ERROR_STREAMING_AUTH, STREAMING_DISCONNECTING, STREAMING_RECONNECT, STREAMING_PARSING_MY_SEGMENTS_UPDATE, STREAMING_PARSING_SPLIT_UPDATE } from '../../logger/constants'; import { IMyLargeSegmentsUpdateData, IMySegmentsUpdateV3Data, KeyList, UpdateStrategy } from './SSEHandler/types'; -import { isInBitmap, parseBitmap, parseFFUpdatePayload, parseKeyList } from './parseUtils'; +import { getDelay, isInBitmap, parseBitmap, parseFFUpdatePayload, parseKeyList } from './parseUtils'; import { ISet, _Set } from '../../utils/lang/sets'; -import { hash } from '../../utils/murmur3/murmur3'; import { Hash64, hash64 } from '../../utils/murmur3/murmur3_64'; import { IAuthTokenPushEnabled } from './AuthClient/types'; import { TOKEN_REFRESH, AUTH_REJECTION, MY_LARGE_SEGMENT, MY_SEGMENT } from '../../utils/constants'; import { ISdkFactoryContextSync } from '../../sdkFactory/types'; -export function getDelay(parsedData: Pick, matchingKey: string) { - if (parsedData.h === 0) return 0; - - const interval = parsedData.i || 60000; - const seed = parsedData.s || 0; - - return hash(matchingKey, seed) % interval; -} - /** * PushManager factory: * - for server-side if key is not provided in settings. @@ -255,7 +245,7 @@ export function pushManagerFactory( case UpdateStrategy.BoundedFetchRequest: { let bitmap: Uint8Array; try { - bitmap = parseBitmap(parsedData.d, parsedData.c); + bitmap = parseBitmap(parsedData.d!, parsedData.c!); } catch (e) { log.warn(STREAMING_PARSING_MY_SEGMENTS_UPDATE, ['BoundedFetchRequest', e]); break; @@ -271,7 +261,7 @@ export function pushManagerFactory( case UpdateStrategy.KeyList: { let keyList: KeyList, added: ISet, removed: ISet; try { - keyList = parseKeyList(parsedData.d, parsedData.c); + keyList = parseKeyList(parsedData.d!, parsedData.c!); added = new _Set(keyList.a); removed = new _Set(keyList.r); } catch (e) { @@ -279,29 +269,35 @@ export function pushManagerFactory( break; } + if (!parsedData.n || !parsedData.n.length) { + log.warn(STREAMING_PARSING_MY_SEGMENTS_UPDATE, ['KeyList', 'No segment name was provided']); + break; + } + forOwn(clients, ({ hash64, worker, workerLarge }) => { const add = added.has(hash64.dec) ? true : removed.has(hash64.dec) ? false : undefined; if (add !== undefined) { (isLS ? workerLarge : worker).put(parsedData.cn, [{ isLS, - name: parsedData.l[0], - add + name: parsedData.n![0], + add, }]); } }); return; } case UpdateStrategy.SegmentRemoval: - if (!parsedData.l || parsedData.l.length) { + if (!parsedData.n || !parsedData.n.length) { log.warn(STREAMING_PARSING_MY_SEGMENTS_UPDATE, ['SegmentRemoval', 'No segment name was provided']); break; } forOwn(clients, ({ worker, workerLarge }) => { - (isLS ? workerLarge : worker).put(parsedData.cn, parsedData.l.map(largeSegment => ({ + (isLS ? workerLarge : worker).put(parsedData.cn, parsedData.n!.map(largeSegment => ({ isLS, name: largeSegment, - add: false + add: false, + cn: parsedData.cn }))); }); return; diff --git a/src/sync/streaming/types.ts b/src/sync/streaming/types.ts index db58e045..a719ecbb 100644 --- a/src/sync/streaming/types.ts +++ b/src/sync/streaming/types.ts @@ -45,6 +45,6 @@ export interface IPushEventEmitter extends IEventEmitter { */ export interface IPushManager extends ITask, IPushEventEmitter { // Methods used in client-side, to support multiple clients - add(userKey: string, mySegmentsSyncTask: IMySegmentsSyncTask, myLargeSegmentsSyncTask?: IMySegmentsSyncTask): void, + add(userKey: string, mySegmentsSyncTask: IMySegmentsSyncTask): void, remove(userKey: string): void } From a4e97a91d4a1f7320e10c0043a7e996ff005bb4f Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Thu, 29 Aug 2024 22:24:54 +0100 Subject: [PATCH 055/146] rc --- package-lock.json | 74 ++++++++++++----------------------------------- package.json | 2 +- 2 files changed, 19 insertions(+), 57 deletions(-) diff --git a/package-lock.json b/package-lock.json index 8019d143..2635def7 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "@splitsoftware/splitio-commons", - "version": "1.16.1-rc.9", + "version": "1.16.1-rc.10", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "@splitsoftware/splitio-commons", - "version": "1.16.1-rc.9", + "version": "1.16.1-rc.10", "license": "Apache-2.0", "dependencies": { "tslib": "^2.3.1" @@ -5933,18 +5933,6 @@ "node": ">=8" } }, - "node_modules/jest-util/node_modules/picomatch": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.0.tgz", - "integrity": "sha512-lY1Q/PiJGC2zOv/z391WOTD+Z02bCgsFfvxoXXf6h7kv9o+WmsmzYqrAwY63sNgOxE4xEdq0WyUnXfKeBrSvYw==", - "dev": true, - "engines": { - "node": ">=8.6" - }, - "funding": { - "url": "https://github.com/sponsors/jonschlinkert" - } - }, "node_modules/jest-util/node_modules/supports-color": { "version": "7.2.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", @@ -6456,30 +6444,18 @@ } }, "node_modules/micromatch": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.4.tgz", - "integrity": "sha512-pRmzw/XUcwXGpD9aI9q/0XOwLNygjETJ8y0ao0wdqprrzDa4YnxLcz7fQRZr8voh8V10kGhABbNcHVk5wHgWwg==", + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", + "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", "dev": true, "dependencies": { - "braces": "^3.0.1", - "picomatch": "^2.2.3" + "braces": "^3.0.3", + "picomatch": "^2.3.1" }, "engines": { "node": ">=8.6" } }, - "node_modules/micromatch/node_modules/picomatch": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.0.tgz", - "integrity": "sha512-lY1Q/PiJGC2zOv/z391WOTD+Z02bCgsFfvxoXXf6h7kv9o+WmsmzYqrAwY63sNgOxE4xEdq0WyUnXfKeBrSvYw==", - "dev": true, - "engines": { - "node": ">=8.6" - }, - "funding": { - "url": "https://github.com/sponsors/jonschlinkert" - } - }, "node_modules/mime-db": { "version": "1.52.0", "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", @@ -6851,9 +6827,9 @@ "dev": true }, "node_modules/picomatch": { - "version": "2.2.2", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.2.2.tgz", - "integrity": "sha512-q0M/9eZHzmr0AulXyPwNfZjtwZ/RBZlbN3K3CErVrk50T2ASYI7Bye0EvekFY3IP1Nt2DHu0re+V2ZHIpMkuWg==", + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", "dev": true, "engines": { "node": ">=8.6" @@ -12429,12 +12405,6 @@ "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "dev": true }, - "picomatch": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.0.tgz", - "integrity": "sha512-lY1Q/PiJGC2zOv/z391WOTD+Z02bCgsFfvxoXXf6h7kv9o+WmsmzYqrAwY63sNgOxE4xEdq0WyUnXfKeBrSvYw==", - "dev": true - }, "supports-color": { "version": "7.2.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", @@ -12831,21 +12801,13 @@ "dev": true }, "micromatch": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.4.tgz", - "integrity": "sha512-pRmzw/XUcwXGpD9aI9q/0XOwLNygjETJ8y0ao0wdqprrzDa4YnxLcz7fQRZr8voh8V10kGhABbNcHVk5wHgWwg==", + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", + "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", "dev": true, "requires": { - "braces": "^3.0.1", - "picomatch": "^2.2.3" - }, - "dependencies": { - "picomatch": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.0.tgz", - "integrity": "sha512-lY1Q/PiJGC2zOv/z391WOTD+Z02bCgsFfvxoXXf6h7kv9o+WmsmzYqrAwY63sNgOxE4xEdq0WyUnXfKeBrSvYw==", - "dev": true - } + "braces": "^3.0.3", + "picomatch": "^2.3.1" } }, "mime-db": { @@ -13129,9 +13091,9 @@ "dev": true }, "picomatch": { - "version": "2.2.2", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.2.2.tgz", - "integrity": "sha512-q0M/9eZHzmr0AulXyPwNfZjtwZ/RBZlbN3K3CErVrk50T2ASYI7Bye0EvekFY3IP1Nt2DHu0re+V2ZHIpMkuWg==", + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", "dev": true }, "pirates": { diff --git a/package.json b/package.json index f999f4e6..ea569a0e 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@splitsoftware/splitio-commons", - "version": "1.16.1-rc.9", + "version": "1.16.1-rc.10", "description": "Split JavaScript SDK common components", "main": "cjs/index.js", "module": "esm/index.js", From f6bac4d6c17034c6fc4b4016702b55b54961b3fa Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Fri, 30 Aug 2024 20:03:53 +0100 Subject: [PATCH 056/146] Reuse code for mySegment storages --- src/storages/AbstractSegmentsCacheSync.ts | 41 ++++++++-- src/storages/AbstractSplitsCacheSync.ts | 2 +- .../inLocalStorage/MySegmentsCacheInLocal.ts | 82 +++---------------- .../__tests__/MySegmentsCacheInLocal.spec.ts | 2 + .../inMemory/MySegmentsCacheInMemory.ts | 49 +---------- .../__tests__/MySegmentsCacheInMemory.spec.ts | 5 +- src/storages/types.ts | 4 +- 7 files changed, 58 insertions(+), 127 deletions(-) diff --git a/src/storages/AbstractSegmentsCacheSync.ts b/src/storages/AbstractSegmentsCacheSync.ts index 7af8d95f..4b92c57c 100644 --- a/src/storages/AbstractSegmentsCacheSync.ts +++ b/src/storages/AbstractSegmentsCacheSync.ts @@ -28,7 +28,9 @@ export abstract class AbstractSegmentsCacheSync implements ISegmentsCacheSync { /** * clear the cache. */ - abstract clear(): void + clear() { + this.resetSegments([]); + } /** * For server-side synchronizer: add the given list of segments to the cache, with an empty list of keys. The segments that already exist are not modified. @@ -49,16 +51,41 @@ export abstract class AbstractSegmentsCacheSync implements ISegmentsCacheSync { abstract getKeysCount(): number /** - * For server-side synchronizer: set the change number of `name` segment. - * For client-side synchronizer: the method is not used. + * For server-side synchronizer: change number of `name` segment. + * For client-side synchronizer: change number of mySegments. */ - setChangeNumber(name: string, changeNumber: number): boolean { return true; } - + abstract setChangeNumber(name?: string, changeNumber?: number): boolean | void abstract getChangeNumber(name: string): number /** * For server-side synchronizer: the method is not used. - * For client-side synchronizer: reset the cache with the given list of segments. + * For client-side synchronizer: it resets the cache with the given list of segments. */ - resetSegments(names: string[], changeNumber?: number): boolean { return true; } + resetSegments(names: string[], changeNumber?: number): boolean { + this.setChangeNumber(undefined, changeNumber); + + names = names.sort(); + const storedSegmentKeys = this.getRegisteredSegments().sort(); + + // Extreme fast => everything is empty + if (!names.length && !storedSegmentKeys.length) return false; + + let index = 0; + + while (index < names.length && index < storedSegmentKeys.length && names[index] === storedSegmentKeys[index]) index++; + + // Quick path => no changes + if (index === names.length && index === storedSegmentKeys.length) return false; + + // Slowest path => add and/or remove segments + for (let removeIndex = index; removeIndex < storedSegmentKeys.length; removeIndex++) { + this.removeFromSegment(storedSegmentKeys[removeIndex]); + } + + for (let addIndex = index; addIndex < names.length; addIndex++) { + this.addToSegment(names[addIndex]); + } + + return true; + } } diff --git a/src/storages/AbstractSplitsCacheSync.ts b/src/storages/AbstractSplitsCacheSync.ts index 461056ff..ef44db40 100644 --- a/src/storages/AbstractSplitsCacheSync.ts +++ b/src/storages/AbstractSplitsCacheSync.ts @@ -32,7 +32,7 @@ export abstract class AbstractSplitsCacheSync implements ISplitsCacheSync { return splits; } - abstract setChangeNumber(changeNumber: number): boolean + abstract setChangeNumber(changeNumber: number): boolean | void abstract getChangeNumber(): number diff --git a/src/storages/inLocalStorage/MySegmentsCacheInLocal.ts b/src/storages/inLocalStorage/MySegmentsCacheInLocal.ts index 165812e8..4931f6f2 100644 --- a/src/storages/inLocalStorage/MySegmentsCacheInLocal.ts +++ b/src/storages/inLocalStorage/MySegmentsCacheInLocal.ts @@ -16,18 +16,6 @@ export class MySegmentsCacheInLocal extends AbstractSegmentsCacheSync { // There is not need to flush segments cache like splits cache, since resetSegments receives the up-to-date list of active segments } - /** - * Removes list of segments from localStorage - * @NOTE this method is not being used at the moment. - */ - clear() { - this.log.info(LOG_PREFIX + 'Flushing MySegments data from localStorage'); - - // We cannot simply call `localStorage.clear()` since that implies removing user items from the storage - // We could optimize next sentence, since it implies iterating over all localStorage items - this.resetSegments([]); - } - addToSegment(name: string): boolean { const segmentKey = this.keys.buildSegmentNameKey(name); @@ -56,41 +44,22 @@ export class MySegmentsCacheInLocal extends AbstractSegmentsCacheSync { return localStorage.getItem(this.keys.buildSegmentNameKey(name)) === DEFINED; } - /** - * Reset (update) the cached list of segments with the given list, removing and adding segments if necessary. - * - * @param {string[]} names list of segment names - * @returns boolean indicating if the cache was updated (i.e., given list was different from the cached one) - */ - resetSegments(names: string[], changeNumber?: number): boolean { - try { - if (changeNumber) { - localStorage.setItem(this.keys.buildTillKey(), changeNumber + ''); - } else { - localStorage.removeItem(this.keys.buildTillKey()); - } - } catch (e) { - this.log.error(e); - } - - let isDiff = false; - let index; - + getRegisteredSegments(): string[] { // Scan current values from localStorage - const storedSegmentNames = Object.keys(localStorage).reduce((accum, key) => { + return Object.keys(localStorage).reduce((accum, key) => { let segmentName = this.keys.extractSegmentName(key); if (segmentName) { accum.push(segmentName); } else { - // @TODO @BREAKING: This is only to clean up "old" keys. Remove this whole else code block and reuse `getRegisteredSegments` method. + // @TODO @BREAKING: This is only to clean up "old" keys. Remove this whole else code block segmentName = this.keys.extractOldSegmentKey(key); if (segmentName) { // this was an old segment key, let's clean up. const newSegmentKey = this.keys.buildSegmentNameKey(segmentName); try { // If the new format key is not there, create it. - if (!localStorage.getItem(newSegmentKey) && names.indexOf(segmentName) > -1) { + if (!localStorage.getItem(newSegmentKey)) { localStorage.setItem(newSegmentKey, DEFINED); // we are migrating a segment, let's track it. accum.push(segmentName); @@ -104,46 +73,21 @@ export class MySegmentsCacheInLocal extends AbstractSegmentsCacheSync { return accum; }, [] as string[]); - - // Extreme fast => everything is empty - if (names.length === 0 && storedSegmentNames.length === names.length) - return isDiff; - - // Quick path - if (storedSegmentNames.length !== names.length) { - isDiff = true; - - storedSegmentNames.forEach(name => this.removeFromSegment(name)); - names.forEach(name => this.addToSegment(name)); - } else { - // Slowest path => we need to find at least 1 difference because - for (index = 0; index < names.length && storedSegmentNames.indexOf(names[index]) !== -1; index++) { - // TODO: why empty statement? - } - - if (index < names.length) { - isDiff = true; - - storedSegmentNames.forEach(name => this.removeFromSegment(name)); - names.forEach(name => this.addToSegment(name)); - } - } - - return isDiff; - } - - getRegisteredSegments(): string[] { - return Object.keys(localStorage).reduce((accum, key) => { - const segmentName = this.keys.extractSegmentName(key); - if (segmentName) accum.push(segmentName); - return accum; - }, []); } getKeysCount() { return 1; } + setChangeNumber(name?: string, changeNumber?: number) { + try { + if (changeNumber) localStorage.setItem(this.keys.buildTillKey(), changeNumber + ''); + else localStorage.removeItem(this.keys.buildTillKey()); + } catch (e) { + this.log.error(e); + } + } + getChangeNumber() { const n = -1; let value: string | number | null = localStorage.getItem(this.keys.buildTillKey()); diff --git a/src/storages/inLocalStorage/__tests__/MySegmentsCacheInLocal.spec.ts b/src/storages/inLocalStorage/__tests__/MySegmentsCacheInLocal.spec.ts index 3b062c66..c754346a 100644 --- a/src/storages/inLocalStorage/__tests__/MySegmentsCacheInLocal.spec.ts +++ b/src/storages/inLocalStorage/__tests__/MySegmentsCacheInLocal.spec.ts @@ -65,4 +65,6 @@ test('SEGMENT CACHE / in LocalStorage migration for mysegments keys', () => { expect(localStorage.getItem(oldKey2)).toBe(null); // Old keys are removed. cache.clear(); + expect(cache.getRegisteredSegments()).toEqual([]); + expect(cache.getChangeNumber()).toBe(-1); }); diff --git a/src/storages/inMemory/MySegmentsCacheInMemory.ts b/src/storages/inMemory/MySegmentsCacheInMemory.ts index f2b8ad93..a373a893 100644 --- a/src/storages/inMemory/MySegmentsCacheInMemory.ts +++ b/src/storages/inMemory/MySegmentsCacheInMemory.ts @@ -9,10 +9,6 @@ export class MySegmentsCacheInMemory extends AbstractSegmentsCacheSync { private segmentCache: Record = {}; private cn?: number; - clear() { - this.segmentCache = {}; - } - addToSegment(name: string): boolean { this.segmentCache[name] = true; @@ -29,51 +25,10 @@ export class MySegmentsCacheInMemory extends AbstractSegmentsCacheSync { return this.segmentCache[name] === true; } - /** - * Reset (update) the cached list of segments with the given list, removing and adding segments if necessary. - * @NOTE based on the way we use segments in the browser, this way is the best option - * - * @param {string[]} names list of segment names - * @returns boolean indicating if the cache was updated (i.e., given list was different from the cached one) - */ - resetSegments(names: string[], changeNumber?: number): boolean { - this.cn = changeNumber; - let isDiff = false; - let index; - - const storedSegmentKeys = Object.keys(this.segmentCache); - - // Extreme fast => everything is empty - if (names.length === 0 && storedSegmentKeys.length === names.length) - return isDiff; - - // Quick path - if (storedSegmentKeys.length !== names.length) { - isDiff = true; - this.segmentCache = {}; - names.forEach(s => { - this.addToSegment(s); - }); - } else { - // Slowest path => we need to find at least 1 difference because - for (index = 0; index < names.length && this.isInSegment(names[index]); index++) { - // TODO: why empty statement? - } - - if (index < names.length) { - isDiff = true; - - this.segmentCache = {}; - names.forEach(s => { - this.addToSegment(s); - }); - } - } - - return isDiff; + setChangeNumber(name?: string, changeNumber?: number) { + this.cn = changeNumber; } - getChangeNumber() { return this.cn || -1; } diff --git a/src/storages/inMemory/__tests__/MySegmentsCacheInMemory.spec.ts b/src/storages/inMemory/__tests__/MySegmentsCacheInMemory.spec.ts index 62fb95f7..07c81772 100644 --- a/src/storages/inMemory/__tests__/MySegmentsCacheInMemory.spec.ts +++ b/src/storages/inMemory/__tests__/MySegmentsCacheInMemory.spec.ts @@ -12,10 +12,13 @@ test('MY SEGMENTS CACHE / in memory', () => { expect(cache.getRegisteredSegments()).toEqual(['mocked-segment', 'mocked-segment-2']); expect(cache.getKeysCount()).toBe(1); - cache.removeFromSegment('mocked-segment'); + expect(cache.resetSegments(['mocked-segment-2'], 150)).toBe(true); expect(cache.isInSegment('mocked-segment')).toBe(false); expect(cache.getRegisteredSegments()).toEqual(['mocked-segment-2']); expect(cache.getKeysCount()).toBe(1); + cache.clear(); + expect(cache.getRegisteredSegments()).toEqual([]); + expect(cache.getChangeNumber()).toBe(-1); }); diff --git a/src/storages/types.ts b/src/storages/types.ts index b0ead7f9..d09a25e2 100644 --- a/src/storages/types.ts +++ b/src/storages/types.ts @@ -218,7 +218,7 @@ export interface ISplitsCacheSync extends ISplitsCacheBase { removeSplits(names: string[]): boolean[], getSplit(name: string): ISplit | null, getSplits(names: string[]): Record, - setChangeNumber(changeNumber: number): boolean, + setChangeNumber(changeNumber: number): boolean | void, getChangeNumber(): number, getAll(): ISplit[], getSplitNames(): string[], @@ -268,7 +268,7 @@ export interface ISegmentsCacheSync extends ISegmentsCacheBase { registerSegments(names: string[]): boolean getRegisteredSegments(): string[] getKeysCount(): number // only used for telemetry - setChangeNumber(name: string, changeNumber: number): boolean + setChangeNumber(name: string, changeNumber: number): boolean | void getChangeNumber(name: string): number resetSegments(names: string[], changeNumber?: number): boolean // only for Sync Client-Side clear(): void From f49ccac8ce39d73659c33ac85f88efe3635246e1 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Fri, 30 Aug 2024 22:23:22 +0100 Subject: [PATCH 057/146] Update resetSegments method to handle instant updates --- src/dtos/types.ts | 21 ++++++------- src/storages/AbstractSegmentsCacheSync.ts | 28 +++++++++++++---- src/storages/dataLoader.ts | 2 +- .../inLocalStorage/MySegmentsCacheInLocal.ts | 2 ++ .../__tests__/MySegmentsCacheInLocal.spec.ts | 6 ++-- .../inMemory/MySegmentsCacheInMemory.ts | 5 ++++ .../__tests__/MySegmentsCacheInMemory.spec.ts | 6 ++-- src/storages/types.ts | 5 ++-- src/sync/polling/types.ts | 16 +++++----- .../polling/updaters/mySegmentsUpdater.ts | 30 ++++++++----------- src/sync/streaming/pushManager.ts | 19 ++++++------ 11 files changed, 79 insertions(+), 61 deletions(-) diff --git a/src/dtos/types.ts b/src/dtos/types.ts index 82a28689..85d5251e 100644 --- a/src/dtos/types.ts +++ b/src/dtos/types.ts @@ -224,20 +224,17 @@ export interface ISegmentChangesResponse { till: number } +export interface IMySegmentsResponse { + cn?: number, + k?: { + n: string + }[] +} + /** Interface of the parsed JSON response of `/memberships/{userKey}` */ export interface IMembershipsResponse { - ms?: { - cn?: number, - k?: Array<{ - n: string - }> - }, - ls?: { - cn?: number, - k?: Array<{ - n: string - }> - } + ms?: IMySegmentsResponse, + ls?: IMySegmentsResponse } /** Metadata internal type for storages */ diff --git a/src/storages/AbstractSegmentsCacheSync.ts b/src/storages/AbstractSegmentsCacheSync.ts index 4b92c57c..7e398203 100644 --- a/src/storages/AbstractSegmentsCacheSync.ts +++ b/src/storages/AbstractSegmentsCacheSync.ts @@ -1,5 +1,7 @@ /* eslint-disable @typescript-eslint/no-unused-vars */ /* eslint-disable no-unused-vars */ +import { IMySegmentsResponse } from '../dtos/types'; +import { MySegmentsData } from '../sync/polling/types'; import { ISegmentsCacheSync } from './types'; /** @@ -29,7 +31,7 @@ export abstract class AbstractSegmentsCacheSync implements ISegmentsCacheSync { * clear the cache. */ clear() { - this.resetSegments([]); + this.resetSegments({}); } /** @@ -59,12 +61,28 @@ export abstract class AbstractSegmentsCacheSync implements ISegmentsCacheSync { /** * For server-side synchronizer: the method is not used. - * For client-side synchronizer: it resets the cache with the given list of segments. + * For client-side synchronizer: it resets or updates the cache. */ - resetSegments(names: string[], changeNumber?: number): boolean { - this.setChangeNumber(undefined, changeNumber); + resetSegments(segmentsData: MySegmentsData | IMySegmentsResponse): boolean { + this.setChangeNumber(undefined, segmentsData.cn); - names = names.sort(); + const { added, removed } = segmentsData as MySegmentsData; + + if (added && removed) { + let isDiff = false; + + added.forEach(segment => { + isDiff = this.addToSegment(segment) || isDiff; + }); + + removed.forEach(segment => { + isDiff = this.removeFromSegment(segment) || isDiff; + }); + + return isDiff; + } + + const names = ((segmentsData as IMySegmentsResponse).k || []).map(s => s.n).sort(); const storedSegmentKeys = this.getRegisteredSegments().sort(); // Extreme fast => everything is empty diff --git a/src/storages/dataLoader.ts b/src/storages/dataLoader.ts index 1e351157..24898d68 100644 --- a/src/storages/dataLoader.ts +++ b/src/storages/dataLoader.ts @@ -50,6 +50,6 @@ export function dataLoaderFactory(preloadedData: SplitIO.PreloadedData): DataLoa return Array.isArray(userIds) && userIds.indexOf(userId) > -1; }); } - storage.segments.resetSegments(mySegmentsData); + storage.segments.resetSegments({ k: mySegmentsData.map(s => ({ n: s })) }); }; } diff --git a/src/storages/inLocalStorage/MySegmentsCacheInLocal.ts b/src/storages/inLocalStorage/MySegmentsCacheInLocal.ts index 4931f6f2..7e01a906 100644 --- a/src/storages/inLocalStorage/MySegmentsCacheInLocal.ts +++ b/src/storages/inLocalStorage/MySegmentsCacheInLocal.ts @@ -20,6 +20,7 @@ export class MySegmentsCacheInLocal extends AbstractSegmentsCacheSync { const segmentKey = this.keys.buildSegmentNameKey(name); try { + if (localStorage.getItem(segmentKey) === DEFINED) return false; localStorage.setItem(segmentKey, DEFINED); return true; } catch (e) { @@ -32,6 +33,7 @@ export class MySegmentsCacheInLocal extends AbstractSegmentsCacheSync { const segmentKey = this.keys.buildSegmentNameKey(name); try { + if (localStorage.getItem(segmentKey) !== DEFINED) return false; localStorage.removeItem(segmentKey); return true; } catch (e) { diff --git a/src/storages/inLocalStorage/__tests__/MySegmentsCacheInLocal.spec.ts b/src/storages/inLocalStorage/__tests__/MySegmentsCacheInLocal.spec.ts index c754346a..aac52cac 100644 --- a/src/storages/inLocalStorage/__tests__/MySegmentsCacheInLocal.spec.ts +++ b/src/storages/inLocalStorage/__tests__/MySegmentsCacheInLocal.spec.ts @@ -11,9 +11,9 @@ test('SEGMENT CACHE / in LocalStorage', () => { caches.forEach(cache => { cache.clear(); - expect(cache.resetSegments(['mocked-segment', 'mocked-segment-2'], 123)).toBe(true); + expect(cache.resetSegments({ k: [{ n: 'mocked-segment' }, { n: 'mocked-segment-2' }], cn: 123 })).toBe(true); expect(cache.getChangeNumber()).toBe(123); - expect(cache.resetSegments(['mocked-segment', 'mocked-segment-2'])).toBe(false); + expect(cache.resetSegments({ k: [{ n: 'mocked-segment' }, { n: 'mocked-segment-2' }] })).toBe(false); expect(cache.getChangeNumber()).toBe(-1); expect(cache.isInSegment('mocked-segment')).toBe(true); @@ -57,7 +57,7 @@ test('SEGMENT CACHE / in LocalStorage migration for mysegments keys', () => { localStorage.setItem(oldKey2, '1'); expect(localStorage.getItem(newKey1)).toBe(null); // control assertion - cache.resetSegments(['segment1']); + cache.resetSegments({ k: [{ n: 'segment1' }] }); expect(localStorage.getItem(newKey1)).toBe('1'); // The segment key for segment1, as is part of the new list, should be migrated. expect(localStorage.getItem(newKey2)).toBe(null); // The segment key for segment2 should not be migrated. diff --git a/src/storages/inMemory/MySegmentsCacheInMemory.ts b/src/storages/inMemory/MySegmentsCacheInMemory.ts index a373a893..1e10c0a6 100644 --- a/src/storages/inMemory/MySegmentsCacheInMemory.ts +++ b/src/storages/inMemory/MySegmentsCacheInMemory.ts @@ -10,12 +10,16 @@ export class MySegmentsCacheInMemory extends AbstractSegmentsCacheSync { private cn?: number; addToSegment(name: string): boolean { + if (this.segmentCache[name]) return false; + this.segmentCache[name] = true; return true; } removeFromSegment(name: string): boolean { + if (!this.segmentCache[name]) return false; + delete this.segmentCache[name]; return true; @@ -29,6 +33,7 @@ export class MySegmentsCacheInMemory extends AbstractSegmentsCacheSync { setChangeNumber(name?: string, changeNumber?: number) { this.cn = changeNumber; } + getChangeNumber() { return this.cn || -1; } diff --git a/src/storages/inMemory/__tests__/MySegmentsCacheInMemory.spec.ts b/src/storages/inMemory/__tests__/MySegmentsCacheInMemory.spec.ts index 07c81772..b936d17c 100644 --- a/src/storages/inMemory/__tests__/MySegmentsCacheInMemory.spec.ts +++ b/src/storages/inMemory/__tests__/MySegmentsCacheInMemory.spec.ts @@ -3,16 +3,16 @@ import { MySegmentsCacheInMemory } from '../MySegmentsCacheInMemory'; test('MY SEGMENTS CACHE / in memory', () => { const cache = new MySegmentsCacheInMemory(); - expect(cache.resetSegments(['mocked-segment', 'mocked-segment-2'], 123)).toBe(true); + expect(cache.resetSegments({ k: [{ n: 'mocked-segment' }, { n: 'mocked-segment-2' }], cn: 123 })).toBe(true); expect(cache.getChangeNumber()).toBe(123); - expect(cache.resetSegments(['mocked-segment', 'mocked-segment-2'])).toBe(false); + expect(cache.resetSegments({ k: [{ n: 'mocked-segment' }, { n: 'mocked-segment-2' }] })).toBe(false); expect(cache.getChangeNumber()).toBe(-1); expect(cache.isInSegment('mocked-segment')).toBe(true); expect(cache.getRegisteredSegments()).toEqual(['mocked-segment', 'mocked-segment-2']); expect(cache.getKeysCount()).toBe(1); - expect(cache.resetSegments(['mocked-segment-2'], 150)).toBe(true); + expect(cache.resetSegments({ k: [{ n: 'mocked-segment-2' }], cn: 150})).toBe(true); expect(cache.isInSegment('mocked-segment')).toBe(false); expect(cache.getRegisteredSegments()).toEqual(['mocked-segment-2']); diff --git a/src/storages/types.ts b/src/storages/types.ts index d09a25e2..a416a783 100644 --- a/src/storages/types.ts +++ b/src/storages/types.ts @@ -1,4 +1,5 @@ -import { MaybeThenable, ISplit } from '../dtos/types'; +import { MaybeThenable, ISplit, IMySegmentsResponse } from '../dtos/types'; +import { MySegmentsData } from '../sync/polling/types'; import { EventDataType, HttpErrors, HttpLatencies, ImpressionDataType, LastSync, Method, MethodExceptions, MethodLatencies, MultiMethodExceptions, MultiMethodLatencies, MultiConfigs, OperationType, StoredEventWithMetadata, StoredImpressionWithMetadata, StreamingEvent, UniqueKeysPayloadCs, UniqueKeysPayloadSs, TelemetryUsageStatsPayload, UpdatesFromSSEEnum } from '../sync/submitters/types'; import { SplitIO, ImpressionDTO, ISettings } from '../types'; import { ISet } from '../utils/lang/sets'; @@ -270,7 +271,7 @@ export interface ISegmentsCacheSync extends ISegmentsCacheBase { getKeysCount(): number // only used for telemetry setChangeNumber(name: string, changeNumber: number): boolean | void getChangeNumber(name: string): number - resetSegments(names: string[], changeNumber?: number): boolean // only for Sync Client-Side + resetSegments(segmentsData: MySegmentsData | IMySegmentsResponse): boolean // only for Sync Client-Side clear(): void } diff --git a/src/sync/polling/types.ts b/src/sync/polling/types.ts index 65bcc45b..9b07003c 100644 --- a/src/sync/polling/types.ts +++ b/src/sync/polling/types.ts @@ -1,4 +1,4 @@ -import { IMembershipsResponse, ISplit } from '../../dtos/types'; +import { ISplit } from '../../dtos/types'; import { IReadinessManager } from '../../readiness/types'; import { IStorageSync } from '../../storages/types'; import { ITask, ISyncTask } from '../types'; @@ -7,14 +7,12 @@ export interface ISplitsSyncTask extends ISyncTask<[noCache?: boolean, till?: nu export interface ISegmentsSyncTask extends ISyncTask<[fetchOnlyNew?: boolean, segmentName?: string, noCache?: boolean, till?: number], boolean> { } -export type MySegmentsData = IMembershipsResponse | { - /* segment type */ - isLS?: boolean - /* segment name */ - name: string - /* action: `true` for add, and `false` for delete */ - add: boolean -}[] +export type MySegmentsData = { + isLS: boolean + cn?: number + added: string[] + removed: string[] +} export interface IMySegmentsSyncTask extends ISyncTask<[segmentsData?: MySegmentsData, noCache?: boolean], boolean> { } diff --git a/src/sync/polling/updaters/mySegmentsUpdater.ts b/src/sync/polling/updaters/mySegmentsUpdater.ts index 6c30a18e..a9667d15 100644 --- a/src/sync/polling/updaters/mySegmentsUpdater.ts +++ b/src/sync/polling/updaters/mySegmentsUpdater.ts @@ -6,8 +6,11 @@ import { SDK_SEGMENTS_ARRIVED } from '../../../readiness/constants'; import { ILogger } from '../../../logger/types'; import { SYNC_MYSEGMENTS_FETCH_RETRY } from '../../../logger/constants'; import { MySegmentsData } from '../types'; +import { IMembershipsResponse } from '../../../dtos/types'; -type IMySegmentsUpdater = (segmentList?: MySegmentsData, noCache?: boolean) => Promise +type MembershipsData = IMembershipsResponse | MySegmentsData; + +type IMySegmentsUpdater = (segmentList?: MembershipsData, noCache?: boolean) => Promise /** * factory of MySegments updater, a task that: @@ -36,23 +39,16 @@ export function mySegmentsUpdaterFactory( } // @TODO if allowing pluggable storages, handle async execution - function updateSegments(segmentsData: MySegmentsData) { + function updateSegments(segmentsData: MembershipsData) { let shouldNotifyUpdate; - if (Array.isArray(segmentsData)) { - // Add/Delete the segment names - segmentsData.forEach(({ isLS, name, add }) => { - const cache = isLS ? largeSegments : segments; - if (cache!.isInSegment(name) !== add) { - shouldNotifyUpdate = true; - if (add) cache!.addToSegment(name); - else cache!.removeFromSegment(name); - } - }); + if ((segmentsData as MySegmentsData).isLS !== undefined) { + shouldNotifyUpdate = (segmentsData as MySegmentsData).isLS ? + largeSegments!.resetSegments(segmentsData as MySegmentsData) : + segments.resetSegments(segmentsData as MySegmentsData); } else { - // Reset the list of segment names - shouldNotifyUpdate = segments.resetSegments((segmentsData.ms?.k || []).map((segment) => segment.n), segmentsData.ms?.cn); - shouldNotifyUpdate = largeSegments!.resetSegments((segmentsData.ls?.k || []).map((segment) => segment.n), segmentsData.ls?.cn) || shouldNotifyUpdate; + shouldNotifyUpdate = segments.resetSegments((segmentsData as IMembershipsResponse).ms || {}); + shouldNotifyUpdate = largeSegments!.resetSegments((segmentsData as IMembershipsResponse).ls || {}) || shouldNotifyUpdate; } // Notify update if required @@ -62,7 +58,7 @@ export function mySegmentsUpdaterFactory( } } - function _mySegmentsUpdater(retry: number, segmentsData?: MySegmentsData, noCache?: boolean): Promise { + function _mySegmentsUpdater(retry: number, segmentsData?: MembershipsData, noCache?: boolean): Promise { const updaterPromise: Promise = segmentsData ? // If segmentsData is provided, there is no need to fetch mySegments new Promise((res) => { updateSegments(segmentsData); res(true); }) : @@ -98,7 +94,7 @@ export function mySegmentsUpdaterFactory( * (3) or `undefined`, for which the updater will fetch mySegments in order to sync the storage. * @param {boolean | undefined} noCache true to revalidate data to fetch */ - return function mySegmentsUpdater(segmentsData?: MySegmentsData, noCache?: boolean) { + return function mySegmentsUpdater(segmentsData?: MembershipsData, noCache?: boolean) { return _mySegmentsUpdater(0, segmentsData, noCache); }; diff --git a/src/sync/streaming/pushManager.ts b/src/sync/streaming/pushManager.ts index 2643731f..1a51d498 100644 --- a/src/sync/streaming/pushManager.ts +++ b/src/sync/streaming/pushManager.ts @@ -277,11 +277,12 @@ export function pushManagerFactory( forOwn(clients, ({ hash64, worker, workerLarge }) => { const add = added.has(hash64.dec) ? true : removed.has(hash64.dec) ? false : undefined; if (add !== undefined) { - (isLS ? workerLarge : worker).put(parsedData.cn, [{ + (isLS ? workerLarge : worker).put(parsedData.cn, { isLS, - name: parsedData.n![0], - add, - }]); + cn: parsedData.cn, + added: add ? [parsedData.n![0]] : [], + removed: add ? [] : [parsedData.n![0]] + }); } }); return; @@ -293,12 +294,12 @@ export function pushManagerFactory( } forOwn(clients, ({ worker, workerLarge }) => { - (isLS ? workerLarge : worker).put(parsedData.cn, parsedData.n!.map(largeSegment => ({ + (isLS ? workerLarge : worker).put(parsedData.cn, { isLS, - name: largeSegment, - add: false, - cn: parsedData.cn - }))); + cn: parsedData.cn, + added: [], + removed: parsedData.n! + }); }); return; } From fb8bd8f088914c5161b3b472916b344d98dbf470 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Sun, 1 Sep 2024 14:32:15 -0300 Subject: [PATCH 058/146] Notification refactors: t->type, MEMBERSHIP_MS_UPDATE, MEMBERSHIP_LS_UPDATE --- ..._UPDATE.SEGMENT_REMOVAL.1457552653000.json | 4 ++ ...HIP_LS_UPDATE.UNBOUNDED.1457552650000.json | 4 ++ ..._MS_UPDATE.BOUNDED.GZIP.1457552651000.json | 4 ++ ..._MS_UPDATE.KEYLIST.GZIP.1457552652000.json | 4 ++ ..._UPDATE.SEGMENT_REMOVAL.1457552653000.json | 4 ++ ...HIP_MS_UPDATE.UNBOUNDED.1457552650000.json | 4 ++ ..._UPDATE.SEGMENT_REMOVAL.1457552653000.json | 4 -- ...GMENTS_UPDATE.UNBOUNDED.1457552650000.json | 4 -- ..._UPDATE_V3.BOUNDED.GZIP.1457552651000.json | 4 -- ..._UPDATE_V3.KEYLIST.GZIP.1457552652000.json | 4 -- ...DATE_V3.SEGMENT_REMOVAL.1457552653000.json | 4 -- ...NTS_UPDATE_V3.UNBOUNDED.1457552650000.json | 4 -- src/logger/constants.ts | 2 +- src/logger/messages/warn.ts | 2 +- src/sync/streaming/AuthClient/index.ts | 2 +- .../SSEHandler/__tests__/index.spec.ts | 42 +++++++++---------- src/sync/streaming/SSEHandler/index.ts | 20 ++++----- src/sync/streaming/SSEHandler/types.ts | 12 +++--- src/sync/streaming/constants.ts | 4 +- src/sync/streaming/parseUtils.ts | 4 +- src/sync/streaming/pushManager.ts | 26 ++++++------ src/sync/streaming/types.ts | 12 +++--- 22 files changed, 86 insertions(+), 88 deletions(-) create mode 100644 src/__tests__/mocks/message.MEMBERSHIP_LS_UPDATE.SEGMENT_REMOVAL.1457552653000.json create mode 100644 src/__tests__/mocks/message.MEMBERSHIP_LS_UPDATE.UNBOUNDED.1457552650000.json create mode 100644 src/__tests__/mocks/message.MEMBERSHIP_MS_UPDATE.BOUNDED.GZIP.1457552651000.json create mode 100644 src/__tests__/mocks/message.MEMBERSHIP_MS_UPDATE.KEYLIST.GZIP.1457552652000.json create mode 100644 src/__tests__/mocks/message.MEMBERSHIP_MS_UPDATE.SEGMENT_REMOVAL.1457552653000.json create mode 100644 src/__tests__/mocks/message.MEMBERSHIP_MS_UPDATE.UNBOUNDED.1457552650000.json delete mode 100644 src/__tests__/mocks/message.MY_LARGE_SEGMENTS_UPDATE.SEGMENT_REMOVAL.1457552653000.json delete mode 100644 src/__tests__/mocks/message.MY_LARGE_SEGMENTS_UPDATE.UNBOUNDED.1457552650000.json delete mode 100644 src/__tests__/mocks/message.MY_SEGMENTS_UPDATE_V3.BOUNDED.GZIP.1457552651000.json delete mode 100644 src/__tests__/mocks/message.MY_SEGMENTS_UPDATE_V3.KEYLIST.GZIP.1457552652000.json delete mode 100644 src/__tests__/mocks/message.MY_SEGMENTS_UPDATE_V3.SEGMENT_REMOVAL.1457552653000.json delete mode 100644 src/__tests__/mocks/message.MY_SEGMENTS_UPDATE_V3.UNBOUNDED.1457552650000.json diff --git a/src/__tests__/mocks/message.MEMBERSHIP_LS_UPDATE.SEGMENT_REMOVAL.1457552653000.json b/src/__tests__/mocks/message.MEMBERSHIP_LS_UPDATE.SEGMENT_REMOVAL.1457552653000.json new file mode 100644 index 00000000..b2ebeb21 --- /dev/null +++ b/src/__tests__/mocks/message.MEMBERSHIP_LS_UPDATE.SEGMENT_REMOVAL.1457552653000.json @@ -0,0 +1,4 @@ +{ + "type": "message", + "data": "{\"data\":\"{\\\"type\\\":\\\"MEMBERSHIP_LS_UPDATE\\\",\\\"cn\\\":1457552653000,\\\"l\\\":[\\\"employees\\\"],\\\"c\\\": 0,\\\"u\\\": 3,\\\"d\\\":\\\"\\\"}\"}" +} \ No newline at end of file diff --git a/src/__tests__/mocks/message.MEMBERSHIP_LS_UPDATE.UNBOUNDED.1457552650000.json b/src/__tests__/mocks/message.MEMBERSHIP_LS_UPDATE.UNBOUNDED.1457552650000.json new file mode 100644 index 00000000..f5e29c7f --- /dev/null +++ b/src/__tests__/mocks/message.MEMBERSHIP_LS_UPDATE.UNBOUNDED.1457552650000.json @@ -0,0 +1,4 @@ +{ + "type": "message", + "data": "{\"data\":\"{\\\"type\\\":\\\"MEMBERSHIP_LS_UPDATE\\\",\\\"cn\\\":1457552650000,\\\"l\\\":[],\\\"c\\\": 0,\\\"u\\\": 0,\\\"d\\\":\\\"\\\",\\\"i\\\":300,\\\"h\\\":1,\\\"s\\\":0}\"}" +} \ No newline at end of file diff --git a/src/__tests__/mocks/message.MEMBERSHIP_MS_UPDATE.BOUNDED.GZIP.1457552651000.json b/src/__tests__/mocks/message.MEMBERSHIP_MS_UPDATE.BOUNDED.GZIP.1457552651000.json new file mode 100644 index 00000000..a2036733 --- /dev/null +++ b/src/__tests__/mocks/message.MEMBERSHIP_MS_UPDATE.BOUNDED.GZIP.1457552651000.json @@ -0,0 +1,4 @@ +{ + "type": "message", + "data": "{\"data\":\"{\\\"type\\\":\\\"MEMBERSHIP_MS_UPDATE\\\",\\\"cn\\\":1457552651000,\\\"l\\\":[],\\\"c\\\": 1,\\\"u\\\": 1,\\\"d\\\":\\\"H4sIAAAAAAAA/2JABxzYeIxQLguYFIBLN8Bl4EABjc+EzOnAsA4QAAD//8YBvWeAAAAA\\\"}\"}" +} \ No newline at end of file diff --git a/src/__tests__/mocks/message.MEMBERSHIP_MS_UPDATE.KEYLIST.GZIP.1457552652000.json b/src/__tests__/mocks/message.MEMBERSHIP_MS_UPDATE.KEYLIST.GZIP.1457552652000.json new file mode 100644 index 00000000..751d0314 --- /dev/null +++ b/src/__tests__/mocks/message.MEMBERSHIP_MS_UPDATE.KEYLIST.GZIP.1457552652000.json @@ -0,0 +1,4 @@ +{ + "type": "message", + "data": "{\"data\":\"{\\\"type\\\":\\\"MEMBERSHIP_MS_UPDATE\\\",\\\"cn\\\":1457552652000,\\\"l\\\":[\\\"splitters\\\"],\\\"c\\\": 1,\\\"u\\\": 2,\\\"d\\\":\\\"H4sIAAAAAAAA/wTAsRHDUAgD0F2ofwEIkPAqPhdZIW0uu/v97GPXHU004ULuMGrYR6XUbIjlXULPPse+dt1yhJibBODjrTmj3GJ4emduuDDP/w0AAP//18WLsl0AAAA=\\\"}\"}" +} \ No newline at end of file diff --git a/src/__tests__/mocks/message.MEMBERSHIP_MS_UPDATE.SEGMENT_REMOVAL.1457552653000.json b/src/__tests__/mocks/message.MEMBERSHIP_MS_UPDATE.SEGMENT_REMOVAL.1457552653000.json new file mode 100644 index 00000000..fa136a72 --- /dev/null +++ b/src/__tests__/mocks/message.MEMBERSHIP_MS_UPDATE.SEGMENT_REMOVAL.1457552653000.json @@ -0,0 +1,4 @@ +{ + "type": "message", + "data": "{\"data\":\"{\\\"type\\\":\\\"MEMBERSHIP_MS_UPDATE\\\",\\\"cn\\\":1457552653000,\\\"l\\\":[\\\"splitters\\\"],\\\"c\\\": 0,\\\"u\\\": 3,\\\"d\\\":\\\"\\\"}\"}" +} \ No newline at end of file diff --git a/src/__tests__/mocks/message.MEMBERSHIP_MS_UPDATE.UNBOUNDED.1457552650000.json b/src/__tests__/mocks/message.MEMBERSHIP_MS_UPDATE.UNBOUNDED.1457552650000.json new file mode 100644 index 00000000..81d41475 --- /dev/null +++ b/src/__tests__/mocks/message.MEMBERSHIP_MS_UPDATE.UNBOUNDED.1457552650000.json @@ -0,0 +1,4 @@ +{ + "type": "message", + "data": "{\"data\":\"{\\\"type\\\":\\\"MEMBERSHIP_MS_UPDATE\\\",\\\"cn\\\":1457552650000,\\\"l\\\":[],\\\"c\\\": 0,\\\"u\\\": 0,\\\"d\\\":\\\"\\\"}\"}" +} \ No newline at end of file diff --git a/src/__tests__/mocks/message.MY_LARGE_SEGMENTS_UPDATE.SEGMENT_REMOVAL.1457552653000.json b/src/__tests__/mocks/message.MY_LARGE_SEGMENTS_UPDATE.SEGMENT_REMOVAL.1457552653000.json deleted file mode 100644 index 78c8661b..00000000 --- a/src/__tests__/mocks/message.MY_LARGE_SEGMENTS_UPDATE.SEGMENT_REMOVAL.1457552653000.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "type": "message", - "data": "{\"data\":\"{\\\"t\\\":\\\"MY_LARGE_SEGMENTS_UPDATE\\\",\\\"cn\\\":1457552653000,\\\"l\\\":[\\\"employees\\\"],\\\"c\\\": 0,\\\"u\\\": 3,\\\"d\\\":\\\"\\\"}\"}" -} \ No newline at end of file diff --git a/src/__tests__/mocks/message.MY_LARGE_SEGMENTS_UPDATE.UNBOUNDED.1457552650000.json b/src/__tests__/mocks/message.MY_LARGE_SEGMENTS_UPDATE.UNBOUNDED.1457552650000.json deleted file mode 100644 index 4780aac1..00000000 --- a/src/__tests__/mocks/message.MY_LARGE_SEGMENTS_UPDATE.UNBOUNDED.1457552650000.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "type": "message", - "data": "{\"data\":\"{\\\"t\\\":\\\"MY_LARGE_SEGMENTS_UPDATE\\\",\\\"cn\\\":1457552650000,\\\"l\\\":[],\\\"c\\\": 0,\\\"u\\\": 0,\\\"d\\\":\\\"\\\",\\\"i\\\":300,\\\"h\\\":1,\\\"s\\\":0}\"}" -} \ No newline at end of file diff --git a/src/__tests__/mocks/message.MY_SEGMENTS_UPDATE_V3.BOUNDED.GZIP.1457552651000.json b/src/__tests__/mocks/message.MY_SEGMENTS_UPDATE_V3.BOUNDED.GZIP.1457552651000.json deleted file mode 100644 index f99ade55..00000000 --- a/src/__tests__/mocks/message.MY_SEGMENTS_UPDATE_V3.BOUNDED.GZIP.1457552651000.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "type": "message", - "data": "{\"data\":\"{\\\"t\\\":\\\"MY_SEGMENTS_UPDATE_V3\\\",\\\"cn\\\":1457552651000,\\\"l\\\":[],\\\"c\\\": 1,\\\"u\\\": 1,\\\"d\\\":\\\"H4sIAAAAAAAA/2JABxzYeIxQLguYFIBLN8Bl4EABjc+EzOnAsA4QAAD//8YBvWeAAAAA\\\"}\"}" -} \ No newline at end of file diff --git a/src/__tests__/mocks/message.MY_SEGMENTS_UPDATE_V3.KEYLIST.GZIP.1457552652000.json b/src/__tests__/mocks/message.MY_SEGMENTS_UPDATE_V3.KEYLIST.GZIP.1457552652000.json deleted file mode 100644 index 3b218920..00000000 --- a/src/__tests__/mocks/message.MY_SEGMENTS_UPDATE_V3.KEYLIST.GZIP.1457552652000.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "type": "message", - "data": "{\"data\":\"{\\\"t\\\":\\\"MY_SEGMENTS_UPDATE_V3\\\",\\\"cn\\\":1457552652000,\\\"l\\\":[\\\"splitters\\\"],\\\"c\\\": 1,\\\"u\\\": 2,\\\"d\\\":\\\"H4sIAAAAAAAA/wTAsRHDUAgD0F2ofwEIkPAqPhdZIW0uu/v97GPXHU004ULuMGrYR6XUbIjlXULPPse+dt1yhJibBODjrTmj3GJ4emduuDDP/w0AAP//18WLsl0AAAA=\\\"}\"}" -} \ No newline at end of file diff --git a/src/__tests__/mocks/message.MY_SEGMENTS_UPDATE_V3.SEGMENT_REMOVAL.1457552653000.json b/src/__tests__/mocks/message.MY_SEGMENTS_UPDATE_V3.SEGMENT_REMOVAL.1457552653000.json deleted file mode 100644 index 05af28a4..00000000 --- a/src/__tests__/mocks/message.MY_SEGMENTS_UPDATE_V3.SEGMENT_REMOVAL.1457552653000.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "type": "message", - "data": "{\"data\":\"{\\\"t\\\":\\\"MY_SEGMENTS_UPDATE_V3\\\",\\\"cn\\\":1457552653000,\\\"l\\\":[\\\"splitters\\\"],\\\"c\\\": 0,\\\"u\\\": 3,\\\"d\\\":\\\"\\\"}\"}" -} \ No newline at end of file diff --git a/src/__tests__/mocks/message.MY_SEGMENTS_UPDATE_V3.UNBOUNDED.1457552650000.json b/src/__tests__/mocks/message.MY_SEGMENTS_UPDATE_V3.UNBOUNDED.1457552650000.json deleted file mode 100644 index 2f57352c..00000000 --- a/src/__tests__/mocks/message.MY_SEGMENTS_UPDATE_V3.UNBOUNDED.1457552650000.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "type": "message", - "data": "{\"data\":\"{\\\"t\\\":\\\"MY_SEGMENTS_UPDATE_V3\\\",\\\"cn\\\":1457552650000,\\\"l\\\":[],\\\"c\\\": 0,\\\"u\\\": 0,\\\"d\\\":\\\"\\\"}\"}" -} \ No newline at end of file diff --git a/src/logger/constants.ts b/src/logger/constants.ts index 9b1582de..3ffa711a 100644 --- a/src/logger/constants.ts +++ b/src/logger/constants.ts @@ -79,7 +79,7 @@ export const WARN_SPLITS_FILTER_IGNORED = 219; export const WARN_SPLITS_FILTER_INVALID = 220; export const WARN_SPLITS_FILTER_EMPTY = 221; export const WARN_SDK_KEY = 222; -export const STREAMING_PARSING_MY_SEGMENTS_UPDATE = 223; +export const STREAMING_PARSING_MEMBERSHIP_UPDATE = 223; export const STREAMING_PARSING_SPLIT_UPDATE = 224; export const WARN_INVALID_FLAGSET = 225; export const WARN_LOWERCASE_FLAGSET = 226; diff --git a/src/logger/messages/warn.ts b/src/logger/messages/warn.ts index 3b74d550..f0940901 100644 --- a/src/logger/messages/warn.ts +++ b/src/logger/messages/warn.ts @@ -32,7 +32,7 @@ export const codesWarn: [number, string][] = codesError.concat([ [c.WARN_SPLITS_FILTER_EMPTY, c.LOG_PREFIX_SETTINGS + ': feature flag filter configuration must be a non-empty array of filter objects.'], [c.WARN_SDK_KEY, c.LOG_PREFIX_SETTINGS + ': You already have %s. We recommend keeping only one instance of the factory at all times (Singleton pattern) and reusing it throughout your application'], - [c.STREAMING_PARSING_MY_SEGMENTS_UPDATE, c.LOG_PREFIX_SYNC_STREAMING + 'Fetching Memberships due to an error processing %s notification: %s'], + [c.STREAMING_PARSING_MEMBERSHIP_UPDATE, c.LOG_PREFIX_SYNC_STREAMING + 'Fetching Memberships due to an error processing %s notification: %s'], [c.STREAMING_PARSING_SPLIT_UPDATE, c.LOG_PREFIX_SYNC_STREAMING + 'Fetching SplitChanges due to an error processing SPLIT_UPDATE notification: %s'], [c.WARN_INVALID_FLAGSET, '%s: you passed %s, flag set must adhere to the regular expressions %s. This means a flag set must start with a letter or number, be in lowercase, alphanumeric and have a max length of 50 characters. %s was discarded.'], [c.WARN_LOWERCASE_FLAGSET, '%s: flag set %s should be all lowercase - converting string to lowercase.'], diff --git a/src/sync/streaming/AuthClient/index.ts b/src/sync/streaming/AuthClient/index.ts index b8d81c55..e7654c9d 100644 --- a/src/sync/streaming/AuthClient/index.ts +++ b/src/sync/streaming/AuthClient/index.ts @@ -14,7 +14,7 @@ export function authenticateFactory(fetchAuth: IFetchAuth): IAuthenticate { /** * Run authentication requests to Auth Server, and returns a promise that resolves with the decoded JTW token. - * @param {string[] | undefined} userKeys set of user Keys to track MY_SEGMENTS_CHANGES. It is undefined for server-side API. + * @param {string[] | undefined} userKeys set of user Keys to track membership updates. It is undefined for server-side API. */ return function authenticate(userKeys?: string[]): Promise { return fetchAuth(userKeys) diff --git a/src/sync/streaming/SSEHandler/__tests__/index.spec.ts b/src/sync/streaming/SSEHandler/__tests__/index.spec.ts index 264e8e80..ea3eac8e 100644 --- a/src/sync/streaming/SSEHandler/__tests__/index.spec.ts +++ b/src/sync/streaming/SSEHandler/__tests__/index.spec.ts @@ -1,6 +1,6 @@ // @ts-nocheck import { SSEHandlerFactory } from '..'; -import { PUSH_SUBSYSTEM_UP, PUSH_NONRETRYABLE_ERROR, PUSH_SUBSYSTEM_DOWN, PUSH_RETRYABLE_ERROR, SEGMENT_UPDATE, SPLIT_KILL, SPLIT_UPDATE, MY_SEGMENTS_UPDATE_V3, MY_LARGE_SEGMENTS_UPDATE, ControlType } from '../../constants'; +import { PUSH_SUBSYSTEM_UP, PUSH_NONRETRYABLE_ERROR, PUSH_SUBSYSTEM_DOWN, PUSH_RETRYABLE_ERROR, SEGMENT_UPDATE, SPLIT_KILL, SPLIT_UPDATE, MEMBERSHIP_MS_UPDATE, MEMBERSHIP_LS_UPDATE, ControlType } from '../../constants'; import { loggerMock } from '../../../../logger/__tests__/sdkLogger.mock'; // update messages @@ -8,16 +8,16 @@ import splitUpdateMessage from '../../../../__tests__/mocks/message.SPLIT_UPDATE import splitKillMessage from '../../../../__tests__/mocks/message.SPLIT_KILL.1457552650000.json'; import segmentUpdateMessage from '../../../../__tests__/mocks/message.SEGMENT_UPDATE.1457552640000.json'; -// update messages MY_SEGMENTS_UPDATE_V3 -import unboundedMessage from '../../../../__tests__/mocks/message.MY_SEGMENTS_UPDATE_V3.UNBOUNDED.1457552650000.json'; -import boundedGzipMessage from '../../../../__tests__/mocks/message.MY_SEGMENTS_UPDATE_V3.BOUNDED.GZIP.1457552651000.json'; -import keylistGzipMessage from '../../../../__tests__/mocks/message.MY_SEGMENTS_UPDATE_V3.KEYLIST.GZIP.1457552652000.json'; -import segmentRemovalMessage from '../../../../__tests__/mocks/message.MY_SEGMENTS_UPDATE_V3.SEGMENT_REMOVAL.1457552653000.json'; +// update messages MEMBERSHIP_MS_UPDATE +import unboundedMessage from '../../../../__tests__/mocks/message.MEMBERSHIP_MS_UPDATE.UNBOUNDED.1457552650000.json'; +import boundedGzipMessage from '../../../../__tests__/mocks/message.MEMBERSHIP_MS_UPDATE.BOUNDED.GZIP.1457552651000.json'; +import keylistGzipMessage from '../../../../__tests__/mocks/message.MEMBERSHIP_MS_UPDATE.KEYLIST.GZIP.1457552652000.json'; +import segmentRemovalMessage from '../../../../__tests__/mocks/message.MEMBERSHIP_MS_UPDATE.SEGMENT_REMOVAL.1457552653000.json'; import { keylists, bitmaps } from '../../__tests__/dataMocks'; -// update messages MY_LARGE_SEGMENTS_UPDATE -import largeSegmentUnboundedMessage from '../../../../__tests__/mocks/message.MY_LARGE_SEGMENTS_UPDATE.UNBOUNDED.1457552650000.json'; -import largeSegmentRemovalMessage from '../../../../__tests__/mocks/message.MY_LARGE_SEGMENTS_UPDATE.SEGMENT_REMOVAL.1457552653000.json'; +// update messages MEMBERSHIP_LS_UPDATE +import largeSegmentUnboundedMessage from '../../../../__tests__/mocks/message.MEMBERSHIP_LS_UPDATE.UNBOUNDED.1457552650000.json'; +import largeSegmentRemovalMessage from '../../../../__tests__/mocks/message.MEMBERSHIP_LS_UPDATE.SEGMENT_REMOVAL.1457552653000.json'; // occupancy messages import occupancy1ControlPri from '../../../../__tests__/mocks/message.OCCUPANCY.1.control_pri.1586987434450.json'; @@ -152,29 +152,29 @@ test('`handlerMessage` for update notifications (NotificationProcessor) and stre sseHandler.handleMessage(segmentUpdateMessage); expect(pushEmitter.emit).toHaveBeenLastCalledWith(SEGMENT_UPDATE, ...expectedParams); // must emit SEGMENT_UPDATE with the message change number and segment name - expectedParams = [{ t: 'MY_SEGMENTS_UPDATE_V3', cn: 1457552650000, c: 0, d: '', u: 0, l: [] }]; + expectedParams = [{ type: 'MEMBERSHIP_MS_UPDATE', cn: 1457552650000, c: 0, d: '', u: 0, l: [] }]; sseHandler.handleMessage(unboundedMessage); - expect(pushEmitter.emit).toHaveBeenLastCalledWith(MY_SEGMENTS_UPDATE_V3, ...expectedParams); // must emit MY_SEGMENTS_UPDATE_V3 with the message parsed data + expect(pushEmitter.emit).toHaveBeenLastCalledWith(MEMBERSHIP_MS_UPDATE, ...expectedParams); // must emit MEMBERSHIP_MS_UPDATE with the message parsed data - expectedParams = [{ t: 'MY_SEGMENTS_UPDATE_V3', cn: 1457552651000, c: 1, d: bitmaps[0].bitmapDataCompressed, u: 1, l: [] }]; + expectedParams = [{ type: 'MEMBERSHIP_MS_UPDATE', cn: 1457552651000, c: 1, d: bitmaps[0].bitmapDataCompressed, u: 1, l: [] }]; sseHandler.handleMessage(boundedGzipMessage); - expect(pushEmitter.emit).toHaveBeenLastCalledWith(MY_SEGMENTS_UPDATE_V3, ...expectedParams); // must emit MY_SEGMENTS_UPDATE_V3 with the message parsed data + expect(pushEmitter.emit).toHaveBeenLastCalledWith(MEMBERSHIP_MS_UPDATE, ...expectedParams); // must emit MEMBERSHIP_MS_UPDATE with the message parsed data - expectedParams = [{ t: 'MY_SEGMENTS_UPDATE_V3', cn: 1457552652000, c: 1, d: keylists[0].keyListDataCompressed, u: 2, l: ['splitters'] }]; + expectedParams = [{ type: 'MEMBERSHIP_MS_UPDATE', cn: 1457552652000, c: 1, d: keylists[0].keyListDataCompressed, u: 2, l: ['splitters'] }]; sseHandler.handleMessage(keylistGzipMessage); - expect(pushEmitter.emit).toHaveBeenLastCalledWith(MY_SEGMENTS_UPDATE_V3, ...expectedParams); // must emit MY_SEGMENTS_UPDATE_V3 with the message parsed data + expect(pushEmitter.emit).toHaveBeenLastCalledWith(MEMBERSHIP_MS_UPDATE, ...expectedParams); // must emit MEMBERSHIP_MS_UPDATE with the message parsed data - expectedParams = [{ t: 'MY_SEGMENTS_UPDATE_V3', cn: 1457552653000, c: 0, d: '', u: 3, l: ['splitters'] }]; + expectedParams = [{ type: 'MEMBERSHIP_MS_UPDATE', cn: 1457552653000, c: 0, d: '', u: 3, l: ['splitters'] }]; sseHandler.handleMessage(segmentRemovalMessage); - expect(pushEmitter.emit).toHaveBeenLastCalledWith(MY_SEGMENTS_UPDATE_V3, ...expectedParams); // must emit MY_SEGMENTS_UPDATE_V3 with the message parsed data + expect(pushEmitter.emit).toHaveBeenLastCalledWith(MEMBERSHIP_MS_UPDATE, ...expectedParams); // must emit MEMBERSHIP_MS_UPDATE with the message parsed data - expectedParams = [{ t: 'MY_LARGE_SEGMENTS_UPDATE', cn: 1457552650000, c: 0, d: '', u: 0, l: [], i: 300, h: 1, s: 0 }]; + expectedParams = [{ type: 'MEMBERSHIP_LS_UPDATE', cn: 1457552650000, c: 0, d: '', u: 0, l: [], i: 300, h: 1, s: 0 }]; sseHandler.handleMessage(largeSegmentUnboundedMessage); - expect(pushEmitter.emit).toHaveBeenLastCalledWith(MY_LARGE_SEGMENTS_UPDATE, ...expectedParams); // must emit MY_LARGE_SEGMENTS_UPDATE with the message parsed data + expect(pushEmitter.emit).toHaveBeenLastCalledWith(MEMBERSHIP_LS_UPDATE, ...expectedParams); // must emit MEMBERSHIP_LS_UPDATE with the message parsed data - expectedParams = [{ t: 'MY_LARGE_SEGMENTS_UPDATE', cn: 1457552653000, c: 0, d: '', u: 3, l: ['employees'] }]; + expectedParams = [{ type: 'MEMBERSHIP_LS_UPDATE', cn: 1457552653000, c: 0, d: '', u: 3, l: ['employees'] }]; sseHandler.handleMessage(largeSegmentRemovalMessage); - expect(pushEmitter.emit).toHaveBeenLastCalledWith(MY_LARGE_SEGMENTS_UPDATE, ...expectedParams); // must emit MY_LARGE_SEGMENTS_UPDATE with the message parsed data + expect(pushEmitter.emit).toHaveBeenLastCalledWith(MEMBERSHIP_LS_UPDATE, ...expectedParams); // must emit MEMBERSHIP_LS_UPDATE with the message parsed data sseHandler.handleMessage(streamingReset); expect(pushEmitter.emit).toHaveBeenLastCalledWith(ControlType.STREAMING_RESET); // must emit STREAMING_RESET diff --git a/src/sync/streaming/SSEHandler/index.ts b/src/sync/streaming/SSEHandler/index.ts index fdc4d321..ea2d63b8 100644 --- a/src/sync/streaming/SSEHandler/index.ts +++ b/src/sync/streaming/SSEHandler/index.ts @@ -1,9 +1,9 @@ import { errorParser, messageParser } from './NotificationParser'; import { notificationKeeperFactory } from './NotificationKeeper'; -import { PUSH_RETRYABLE_ERROR, PUSH_NONRETRYABLE_ERROR, OCCUPANCY, CONTROL, MY_SEGMENTS_UPDATE_V3, SEGMENT_UPDATE, SPLIT_KILL, SPLIT_UPDATE, MY_LARGE_SEGMENTS_UPDATE } from '../constants'; +import { PUSH_RETRYABLE_ERROR, PUSH_NONRETRYABLE_ERROR, OCCUPANCY, CONTROL, SEGMENT_UPDATE, SPLIT_KILL, SPLIT_UPDATE, MEMBERSHIP_MS_UPDATE, MEMBERSHIP_LS_UPDATE } from '../constants'; import { IPushEventEmitter } from '../types'; import { ISseEventHandler } from '../SSEClient/types'; -import { IControlData, INotificationError, INotificationMessage, IOccupancyData } from './types'; +import { INotificationError, INotificationMessage } from './types'; import { ILogger } from '../../../logger/types'; import { STREAMING_PARSING_ERROR_FAILS, ERROR_STREAMING_SSE, STREAMING_PARSING_MESSAGE_FAILS, STREAMING_NEW_MESSAGE } from '../../../logger/constants'; import { ABLY_ERROR, NON_REQUESTED, SSE_CONNECTION_ERROR } from '../../../utils/constants'; @@ -75,26 +75,24 @@ export function SSEHandlerFactory(log: ILogger, pushEmitter: IPushEventEmitter, log.debug(STREAMING_NEW_MESSAGE, [data]); // we only handle update events if streaming is up - // @ts-expect-error - const type = parsedData.type || parsedData.t; - if (!notificationKeeper.isStreamingUp() && [OCCUPANCY, CONTROL].indexOf(type) === -1) return; + if (!notificationKeeper.isStreamingUp() && [OCCUPANCY, CONTROL].indexOf(parsedData.type) === -1) return; - switch (type) { + switch (parsedData.type) { /* update events */ case SPLIT_UPDATE: case SEGMENT_UPDATE: - case MY_SEGMENTS_UPDATE_V3: - case MY_LARGE_SEGMENTS_UPDATE: + case MEMBERSHIP_MS_UPDATE: + case MEMBERSHIP_LS_UPDATE: case SPLIT_KILL: - pushEmitter.emit(type, parsedData); + pushEmitter.emit(parsedData.type, parsedData); break; /* occupancy & control events, handled by NotificationManagerKeeper */ case OCCUPANCY: - notificationKeeper.handleOccupancyEvent((parsedData as IOccupancyData).metrics.publishers, channel, timestamp); + notificationKeeper.handleOccupancyEvent(parsedData.metrics.publishers, channel, timestamp); break; case CONTROL: - notificationKeeper.handleControlEvent((parsedData as IControlData).controlType, channel, timestamp); + notificationKeeper.handleControlEvent(parsedData.controlType, channel, timestamp); break; default: diff --git a/src/sync/streaming/SSEHandler/types.ts b/src/sync/streaming/SSEHandler/types.ts index 0f53f8d0..240dd393 100644 --- a/src/sync/streaming/SSEHandler/types.ts +++ b/src/sync/streaming/SSEHandler/types.ts @@ -1,5 +1,5 @@ import { ControlType } from '../constants'; -import { SEGMENT_UPDATE, SPLIT_UPDATE, SPLIT_KILL, CONTROL, OCCUPANCY, MY_LARGE_SEGMENTS_UPDATE, MY_SEGMENTS_UPDATE_V3 } from '../types'; +import { SEGMENT_UPDATE, SPLIT_UPDATE, SPLIT_KILL, CONTROL, OCCUPANCY, MEMBERSHIP_LS_UPDATE, MEMBERSHIP_MS_UPDATE } from '../types'; export enum Compression { None = 0, @@ -19,8 +19,8 @@ export interface KeyList { r?: string[], // decimal hash64 of user keys } -interface IMySegmentsUpdateData { - t: T, +interface IMembershipUpdateData { + type: T, cn: number, n?: string[], c?: Compression, @@ -31,9 +31,9 @@ interface IMySegmentsUpdateData { s?: number, // seed for hash function } -export interface IMySegmentsUpdateV3Data extends IMySegmentsUpdateData { } +export interface IMembershipMSUpdateData extends IMembershipUpdateData { } -export interface IMyLargeSegmentsUpdateData extends IMySegmentsUpdateData { } +export interface IMembershipLSUpdateData extends IMembershipUpdateData { } export interface ISegmentUpdateData { type: SEGMENT_UPDATE, @@ -68,6 +68,6 @@ export interface IOccupancyData { } } -export type INotificationData = IMySegmentsUpdateV3Data | IMyLargeSegmentsUpdateData | ISegmentUpdateData | ISplitUpdateData | ISplitKillData | IControlData | IOccupancyData +export type INotificationData = IMembershipMSUpdateData | IMembershipLSUpdateData | ISegmentUpdateData | ISplitUpdateData | ISplitKillData | IControlData | IOccupancyData export type INotificationMessage = { parsedData: INotificationData, channel: string, timestamp: number, data: string } export type INotificationError = Event & { parsedData?: any, message?: string } diff --git a/src/sync/streaming/constants.ts b/src/sync/streaming/constants.ts index bbd72706..27eb4b02 100644 --- a/src/sync/streaming/constants.ts +++ b/src/sync/streaming/constants.ts @@ -25,11 +25,11 @@ export const PUSH_SUBSYSTEM_UP = 'PUSH_SUBSYSTEM_UP'; export const PUSH_SUBSYSTEM_DOWN = 'PUSH_SUBSYSTEM_DOWN'; // Update-type push notifications, handled by NotificationProcessor -export const MY_SEGMENTS_UPDATE_V3 = 'MY_SEGMENTS_UPDATE_V3'; +export const MEMBERSHIP_MS_UPDATE = 'MEMBERSHIP_MS_UPDATE'; +export const MEMBERSHIP_LS_UPDATE = 'MEMBERSHIP_LS_UPDATE'; export const SEGMENT_UPDATE = 'SEGMENT_UPDATE'; export const SPLIT_KILL = 'SPLIT_KILL'; export const SPLIT_UPDATE = 'SPLIT_UPDATE'; -export const MY_LARGE_SEGMENTS_UPDATE = 'MY_LARGE_SEGMENTS_UPDATE'; // Control-type push notifications, handled by NotificationKeeper export const CONTROL = 'CONTROL'; diff --git a/src/sync/streaming/parseUtils.ts b/src/sync/streaming/parseUtils.ts index d7342baa..925b0524 100644 --- a/src/sync/streaming/parseUtils.ts +++ b/src/sync/streaming/parseUtils.ts @@ -1,7 +1,7 @@ import { algorithms } from '../../utils/decompress'; import { decodeFromBase64 } from '../../utils/base64'; import { hash } from '../../utils/murmur3/murmur3'; -import { Compression, IMyLargeSegmentsUpdateData, KeyList } from './SSEHandler/types'; +import { Compression, IMembershipMSUpdateData, KeyList } from './SSEHandler/types'; import { ISplit } from '../../dtos/types'; const GZIP = 1; @@ -91,7 +91,7 @@ export function parseFFUpdatePayload(compression: Compression, data: string): IS const DEFAULT_MAX_INTERVAL = 60000; -export function getDelay(parsedData: Pick, matchingKey: string) { +export function getDelay(parsedData: Pick, matchingKey: string) { if (parsedData.h === 0) return 0; const interval = parsedData.i || DEFAULT_MAX_INTERVAL; diff --git a/src/sync/streaming/pushManager.ts b/src/sync/streaming/pushManager.ts index 2643731f..023e9271 100644 --- a/src/sync/streaming/pushManager.ts +++ b/src/sync/streaming/pushManager.ts @@ -11,9 +11,9 @@ import { authenticateFactory, hashUserKey } from './AuthClient'; import { forOwn } from '../../utils/lang'; import { SSEClient } from './SSEClient'; import { getMatching } from '../../utils/key'; -import { MY_SEGMENTS_UPDATE_V3, PUSH_NONRETRYABLE_ERROR, PUSH_SUBSYSTEM_DOWN, SECONDS_BEFORE_EXPIRATION, SEGMENT_UPDATE, SPLIT_KILL, SPLIT_UPDATE, PUSH_RETRYABLE_ERROR, PUSH_SUBSYSTEM_UP, ControlType, MY_LARGE_SEGMENTS_UPDATE } from './constants'; -import { STREAMING_FALLBACK, STREAMING_REFRESH_TOKEN, STREAMING_CONNECTING, STREAMING_DISABLED, ERROR_STREAMING_AUTH, STREAMING_DISCONNECTING, STREAMING_RECONNECT, STREAMING_PARSING_MY_SEGMENTS_UPDATE, STREAMING_PARSING_SPLIT_UPDATE } from '../../logger/constants'; -import { IMyLargeSegmentsUpdateData, IMySegmentsUpdateV3Data, KeyList, UpdateStrategy } from './SSEHandler/types'; +import { MEMBERSHIP_MS_UPDATE, MEMBERSHIP_LS_UPDATE, PUSH_NONRETRYABLE_ERROR, PUSH_SUBSYSTEM_DOWN, SECONDS_BEFORE_EXPIRATION, SEGMENT_UPDATE, SPLIT_KILL, SPLIT_UPDATE, PUSH_RETRYABLE_ERROR, PUSH_SUBSYSTEM_UP, ControlType } from './constants'; +import { STREAMING_FALLBACK, STREAMING_REFRESH_TOKEN, STREAMING_CONNECTING, STREAMING_DISABLED, ERROR_STREAMING_AUTH, STREAMING_DISCONNECTING, STREAMING_RECONNECT, STREAMING_PARSING_MEMBERSHIP_UPDATE, STREAMING_PARSING_SPLIT_UPDATE } from '../../logger/constants'; +import { IMembershipMSUpdateData, IMembershipLSUpdateData, KeyList, UpdateStrategy } from './SSEHandler/types'; import { getDelay, isInBitmap, parseBitmap, parseFFUpdatePayload, parseKeyList } from './parseUtils'; import { ISet, _Set } from '../../utils/lang/sets'; import { Hash64, hash64 } from '../../utils/murmur3/murmur3_64'; @@ -59,10 +59,10 @@ export function pushManagerFactory( // For server-side we pass the segmentsSyncTask, used by SplitsUpdateWorker to fetch new segments const splitsUpdateWorker = SplitsUpdateWorker(log, storage.splits, pollingManager.splitsSyncTask, readiness.splits, telemetryTracker, userKey ? undefined : pollingManager.segmentsSyncTask as ISegmentsSyncTask); - // [Only for client-side] map of hashes to user keys, to dispatch MY_SEGMENTS_UPDATE events to the corresponding MySegmentsUpdateWorker + // [Only for client-side] map of hashes to user keys, to dispatch update events to the corresponding MySegmentsUpdateWorker const userKeyHashes: Record = {}; // [Only for client-side] map of user keys to their corresponding hash64 and MySegmentsUpdateWorkers. - // Hash64 is used to process MY_SEGMENTS_UPDATE events and dispatch actions to the corresponding MySegmentsUpdateWorker. + // Hash64 is used to process membership update events and dispatch actions to the corresponding MySegmentsUpdateWorker. const clients: Record, workerLarge: ReturnType }> = {}; // [Only for client-side] variable to flag that a new client was added. It is needed to reconnect streaming. @@ -238,8 +238,8 @@ export function pushManagerFactory( splitsUpdateWorker.put(parsedData); }); - function handleMySegmentsUpdate(parsedData: IMySegmentsUpdateV3Data | IMyLargeSegmentsUpdateData) { - const isLS = parsedData.t === MY_LARGE_SEGMENTS_UPDATE; + function handleMySegmentsUpdate(parsedData: IMembershipMSUpdateData | IMembershipLSUpdateData) { + const isLS = parsedData.type === MEMBERSHIP_LS_UPDATE; switch (parsedData.u) { case UpdateStrategy.BoundedFetchRequest: { @@ -247,7 +247,7 @@ export function pushManagerFactory( try { bitmap = parseBitmap(parsedData.d!, parsedData.c!); } catch (e) { - log.warn(STREAMING_PARSING_MY_SEGMENTS_UPDATE, ['BoundedFetchRequest', e]); + log.warn(STREAMING_PARSING_MEMBERSHIP_UPDATE, ['BoundedFetchRequest', e]); break; } @@ -265,12 +265,12 @@ export function pushManagerFactory( added = new _Set(keyList.a); removed = new _Set(keyList.r); } catch (e) { - log.warn(STREAMING_PARSING_MY_SEGMENTS_UPDATE, ['KeyList', e]); + log.warn(STREAMING_PARSING_MEMBERSHIP_UPDATE, ['KeyList', e]); break; } if (!parsedData.n || !parsedData.n.length) { - log.warn(STREAMING_PARSING_MY_SEGMENTS_UPDATE, ['KeyList', 'No segment name was provided']); + log.warn(STREAMING_PARSING_MEMBERSHIP_UPDATE, ['KeyList', 'No segment name was provided']); break; } @@ -288,7 +288,7 @@ export function pushManagerFactory( } case UpdateStrategy.SegmentRemoval: if (!parsedData.n || !parsedData.n.length) { - log.warn(STREAMING_PARSING_MY_SEGMENTS_UPDATE, ['SegmentRemoval', 'No segment name was provided']); + log.warn(STREAMING_PARSING_MEMBERSHIP_UPDATE, ['SegmentRemoval', 'No segment name was provided']); break; } @@ -310,8 +310,8 @@ export function pushManagerFactory( } if (userKey) { - pushEmitter.on(MY_SEGMENTS_UPDATE_V3, handleMySegmentsUpdate); - pushEmitter.on(MY_LARGE_SEGMENTS_UPDATE, handleMySegmentsUpdate); + pushEmitter.on(MEMBERSHIP_MS_UPDATE, handleMySegmentsUpdate); + pushEmitter.on(MEMBERSHIP_LS_UPDATE, handleMySegmentsUpdate); } else { pushEmitter.on(SEGMENT_UPDATE, segmentsUpdateWorker!.put); } diff --git a/src/sync/streaming/types.ts b/src/sync/streaming/types.ts index a719ecbb..be76c578 100644 --- a/src/sync/streaming/types.ts +++ b/src/sync/streaming/types.ts @@ -1,4 +1,4 @@ -import { IMySegmentsUpdateV3Data, ISegmentUpdateData, ISplitUpdateData, ISplitKillData, IMyLargeSegmentsUpdateData, INotificationData } from './SSEHandler/types'; +import { IMembershipMSUpdateData, IMembershipLSUpdateData, ISegmentUpdateData, ISplitUpdateData, ISplitKillData, INotificationData } from './SSEHandler/types'; import { ITask } from '../types'; import { IMySegmentsSyncTask } from '../polling/types'; import { IEventEmitter } from '../../types'; @@ -11,21 +11,21 @@ export type PUSH_NONRETRYABLE_ERROR = 'PUSH_NONRETRYABLE_ERROR' export type PUSH_RETRYABLE_ERROR = 'PUSH_RETRYABLE_ERROR' // Update-type push notifications, handled by NotificationProcessor -export type MY_SEGMENTS_UPDATE_V3 = 'MY_SEGMENTS_UPDATE_V3'; +export type MEMBERSHIP_MS_UPDATE = 'MEMBERSHIP_MS_UPDATE'; +export type MEMBERSHIP_LS_UPDATE = 'MEMBERSHIP_LS_UPDATE'; export type SEGMENT_UPDATE = 'SEGMENT_UPDATE'; export type SPLIT_KILL = 'SPLIT_KILL'; export type SPLIT_UPDATE = 'SPLIT_UPDATE'; -export type MY_LARGE_SEGMENTS_UPDATE = 'MY_LARGE_SEGMENTS_UPDATE'; // Control-type push notifications, handled by NotificationKeeper export type CONTROL = 'CONTROL'; export type OCCUPANCY = 'OCCUPANCY'; -export type IPushEvent = PUSH_SUBSYSTEM_UP | PUSH_SUBSYSTEM_DOWN | PUSH_NONRETRYABLE_ERROR | PUSH_RETRYABLE_ERROR | MY_SEGMENTS_UPDATE_V3 | SEGMENT_UPDATE | SPLIT_UPDATE | SPLIT_KILL | MY_LARGE_SEGMENTS_UPDATE | ControlType.STREAMING_RESET +export type IPushEvent = PUSH_SUBSYSTEM_UP | PUSH_SUBSYSTEM_DOWN | PUSH_NONRETRYABLE_ERROR | PUSH_RETRYABLE_ERROR | MEMBERSHIP_MS_UPDATE | MEMBERSHIP_LS_UPDATE | SEGMENT_UPDATE | SPLIT_UPDATE | SPLIT_KILL | ControlType.STREAMING_RESET type IParsedData = - T extends MY_SEGMENTS_UPDATE_V3 ? IMySegmentsUpdateV3Data : - T extends MY_LARGE_SEGMENTS_UPDATE ? IMyLargeSegmentsUpdateData : + T extends MEMBERSHIP_MS_UPDATE ? IMembershipMSUpdateData : + T extends MEMBERSHIP_LS_UPDATE ? IMembershipLSUpdateData : T extends SEGMENT_UPDATE ? ISegmentUpdateData : T extends SPLIT_UPDATE ? ISplitUpdateData : T extends SPLIT_KILL ? ISplitKillData : INotificationData; From 8917d5b24fca6dc210673e208ec3ed2960b882de Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Sun, 1 Sep 2024 16:54:44 -0300 Subject: [PATCH 059/146] Refactor: MEMBERSHIPS constant for telemetry --- src/services/splitApi.ts | 4 ++-- .../__tests__/TelemetryCacheInMemory.spec.ts | 16 ++++++++-------- .../UpdateWorkers/MySegmentsUpdateWorker.ts | 6 +++--- src/sync/streaming/pushManager.ts | 6 +++--- src/sync/submitters/types.ts | 7 +++---- src/utils/constants/index.ts | 3 +-- 6 files changed, 20 insertions(+), 22 deletions(-) diff --git a/src/services/splitApi.ts b/src/services/splitApi.ts index 5fd97e25..e03c83bd 100644 --- a/src/services/splitApi.ts +++ b/src/services/splitApi.ts @@ -4,7 +4,7 @@ import { splitHttpClientFactory } from './splitHttpClient'; import { ISplitApi } from './types'; import { objectAssign } from '../utils/lang/objectAssign'; import { ITelemetryTracker } from '../trackers/types'; -import { SPLITS, IMPRESSIONS, IMPRESSIONS_COUNT, EVENTS, TELEMETRY, TOKEN, SEGMENT, MY_SEGMENT } from '../utils/constants'; +import { SPLITS, IMPRESSIONS, IMPRESSIONS_COUNT, EVENTS, TELEMETRY, TOKEN, SEGMENT, MEMBERSHIPS } from '../utils/constants'; import { ERROR_TOO_MANY_SETS } from '../logger/constants'; const noCacheHeaderOptions = { headers: { 'Cache-Control': 'no-cache' } }; @@ -75,7 +75,7 @@ export function splitApiFactory( * - match user keys with special characters. E.g.: 'foo%bar', 'foo/bar' */ const url = `${urls.sdk}/memberships/${encodeURIComponent(userMatchingKey)}`; - return splitHttpClient(url, noCache ? noCacheHeaderOptions : undefined, telemetryTracker.trackHttp(MY_SEGMENT)); + return splitHttpClient(url, noCache ? noCacheHeaderOptions : undefined, telemetryTracker.trackHttp(MEMBERSHIPS)); }, /** diff --git a/src/storages/inMemory/__tests__/TelemetryCacheInMemory.spec.ts b/src/storages/inMemory/__tests__/TelemetryCacheInMemory.spec.ts index 4ed3eb9c..c6d4340b 100644 --- a/src/storages/inMemory/__tests__/TelemetryCacheInMemory.spec.ts +++ b/src/storages/inMemory/__tests__/TelemetryCacheInMemory.spec.ts @@ -1,4 +1,4 @@ -import { QUEUED, DROPPED, DEDUPED, EVENTS, IMPRESSIONS, IMPRESSIONS_COUNT, MY_SEGMENT, SEGMENT, SPLITS, TELEMETRY, TOKEN, TRACK, TREATMENT, TREATMENTS, TREATMENTS_WITH_CONFIG, TREATMENT_WITH_CONFIG } from '../../../utils/constants'; +import { QUEUED, DROPPED, DEDUPED, EVENTS, IMPRESSIONS, IMPRESSIONS_COUNT, MEMBERSHIPS, SEGMENT, SPLITS, TELEMETRY, TOKEN, TRACK, TREATMENT, TREATMENTS, TREATMENTS_WITH_CONFIG, TREATMENT_WITH_CONFIG } from '../../../utils/constants'; import { EventDataType, ImpressionDataType, Method, OperationType, StreamingEvent } from '../../../sync/submitters/types'; import { TelemetryCacheInMemory } from '../TelemetryCacheInMemory'; @@ -14,7 +14,7 @@ const operationTypes: OperationType[] = [ TELEMETRY, TOKEN, SEGMENT, - MY_SEGMENT + MEMBERSHIPS ]; const methods: Method[] = [ @@ -88,7 +88,7 @@ describe('TELEMETRY CACHE', () => { expect(cache.getLastSynchronization()).toEqual(expectedLastSync); // Overwrite a single operation - cache.recordSuccessfulSync(MY_SEGMENT, 100); + cache.recordSuccessfulSync(MEMBERSHIPS, 100); expect(cache.getLastSynchronization()).toEqual({ ...expectedLastSync, 'ms': 100 }); }); @@ -106,7 +106,7 @@ describe('TELEMETRY CACHE', () => { expect(cache.popHttpErrors()).toEqual({}); // Set a single http error - cache.recordHttpError(MY_SEGMENT, 400); + cache.recordHttpError(MEMBERSHIPS, 400); expect(cache.popHttpErrors()).toEqual({ 'ms': { 400: 1 } }); }); @@ -232,14 +232,14 @@ describe('TELEMETRY CACHE', () => { cache.recordUpdatesFromSSE(SPLITS); cache.recordUpdatesFromSSE(SPLITS); cache.recordUpdatesFromSSE(SPLITS); - cache.recordUpdatesFromSSE(MY_SEGMENT); - cache.recordUpdatesFromSSE(MY_SEGMENT); + cache.recordUpdatesFromSSE(MEMBERSHIPS); + cache.recordUpdatesFromSSE(MEMBERSHIPS); expect(cache.popUpdatesFromSSE()).toEqual({ sp: 3, ms: 2 }); expect(cache.popUpdatesFromSSE()).toEqual({}); cache.recordUpdatesFromSSE(SPLITS); - cache.recordUpdatesFromSSE(MY_SEGMENT); + cache.recordUpdatesFromSSE(MEMBERSHIPS); cache.recordUpdatesFromSSE(SPLITS); - cache.recordUpdatesFromSSE(MY_SEGMENT); + cache.recordUpdatesFromSSE(MEMBERSHIPS); expect(cache.popUpdatesFromSSE()).toEqual({ sp: 2, ms: 2 }); expect(cache.popUpdatesFromSSE()).toEqual({}); }); diff --git a/src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts b/src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts index aa8d6482..3b121cd6 100644 --- a/src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts +++ b/src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts @@ -2,12 +2,12 @@ import { IMySegmentsSyncTask, MySegmentsData } from '../../polling/types'; import { Backoff } from '../../../utils/Backoff'; import { IUpdateWorker } from './types'; import { ITelemetryTracker } from '../../../trackers/types'; -import { UpdatesFromSSEEnum } from '../../submitters/types'; +import { MEMBERSHIPS } from '../../../utils/constants'; /** * MySegmentsUpdateWorker factory */ -export function MySegmentsUpdateWorker(mySegmentsSyncTask: IMySegmentsSyncTask, telemetryTracker: ITelemetryTracker, updateType: UpdatesFromSSEEnum): IUpdateWorker<[changeNumber: number, segmentsData?: MySegmentsData, delay?: number]> { +export function MySegmentsUpdateWorker(mySegmentsSyncTask: IMySegmentsSyncTask, telemetryTracker: ITelemetryTracker): IUpdateWorker<[changeNumber: number, segmentsData?: MySegmentsData, delay?: number]> { let maxChangeNumber = 0; // keeps the maximum changeNumber among queued events let currentChangeNumber = -1; @@ -37,7 +37,7 @@ export function MySegmentsUpdateWorker(mySegmentsSyncTask: IMySegmentsSyncTask, syncTask.then((result) => { if (!isHandlingEvent) return; // halt if `stop` has been called if (result !== false) { // Unlike `Splits|SegmentsUpdateWorker`, we cannot use `mySegmentsCache.getChangeNumber` since `/mySegments` endpoint doesn't provide this value. - if (_segmentsData) telemetryTracker.trackUpdatesFromSSE(updateType); + if (_segmentsData) telemetryTracker.trackUpdatesFromSSE(MEMBERSHIPS); currentChangeNumber = Math.max(currentChangeNumber, currentMaxChangeNumber); // use `currentMaxChangeNumber`, in case that `maxChangeNumber` was updated during fetch. } if (handleNewEvent) { diff --git a/src/sync/streaming/pushManager.ts b/src/sync/streaming/pushManager.ts index 707000bc..5978d7a2 100644 --- a/src/sync/streaming/pushManager.ts +++ b/src/sync/streaming/pushManager.ts @@ -18,7 +18,7 @@ import { getDelay, isInBitmap, parseBitmap, parseFFUpdatePayload, parseKeyList } import { ISet, _Set } from '../../utils/lang/sets'; import { Hash64, hash64 } from '../../utils/murmur3/murmur3_64'; import { IAuthTokenPushEnabled } from './AuthClient/types'; -import { TOKEN_REFRESH, AUTH_REJECTION, MY_LARGE_SEGMENT, MY_SEGMENT } from '../../utils/constants'; +import { TOKEN_REFRESH, AUTH_REJECTION } from '../../utils/constants'; import { ISdkFactoryContextSync } from '../../sdkFactory/types'; /** @@ -352,8 +352,8 @@ export function pushManagerFactory( userKeyHashes[hash] = userKey; clients[userKey] = { hash64: hash64(userKey), - worker: MySegmentsUpdateWorker(mySegmentsSyncTask, telemetryTracker, MY_SEGMENT), - workerLarge: MySegmentsUpdateWorker(mySegmentsSyncTask, telemetryTracker, MY_LARGE_SEGMENT) + worker: MySegmentsUpdateWorker(mySegmentsSyncTask, telemetryTracker), + workerLarge: MySegmentsUpdateWorker(mySegmentsSyncTask, telemetryTracker) }; connectForNewClient = true; // we must reconnect on start, to listen the channel for the new user key diff --git a/src/sync/submitters/types.ts b/src/sync/submitters/types.ts index 7dfa0985..bd757b85 100644 --- a/src/sync/submitters/types.ts +++ b/src/sync/submitters/types.ts @@ -103,7 +103,7 @@ export type DROPPED = 1; export type DEDUPED = 2; export type ImpressionDataType = QUEUED | DROPPED | DEDUPED export type EventDataType = QUEUED | DROPPED; -export type UpdatesFromSSEEnum = SPLITS | MY_SEGMENT | MY_LARGE_SEGMENT; +export type UpdatesFromSSEEnum = SPLITS | MEMBERSHIPS; export type SPLITS = 'sp'; export type IMPRESSIONS = 'im'; @@ -112,9 +112,8 @@ export type EVENTS = 'ev'; export type TELEMETRY = 'te'; export type TOKEN = 'to'; export type SEGMENT = 'se'; -export type MY_SEGMENT = 'ms'; -export type MY_LARGE_SEGMENT = 'mls'; -export type OperationType = SPLITS | IMPRESSIONS | IMPRESSIONS_COUNT | EVENTS | TELEMETRY | TOKEN | SEGMENT | MY_SEGMENT; +export type MEMBERSHIPS = 'ms'; +export type OperationType = SPLITS | IMPRESSIONS | IMPRESSIONS_COUNT | EVENTS | TELEMETRY | TOKEN | SEGMENT | MEMBERSHIPS; export type LastSync = Partial> export type HttpErrors = Partial> diff --git a/src/utils/constants/index.ts b/src/utils/constants/index.ts index e11a2d77..543aa90c 100644 --- a/src/utils/constants/index.ts +++ b/src/utils/constants/index.ts @@ -75,8 +75,7 @@ export const EVENTS = 'ev'; export const TELEMETRY = 'te'; export const TOKEN = 'to'; export const SEGMENT = 'se'; -export const MY_SEGMENT = 'ms'; -export const MY_LARGE_SEGMENT = 'mls'; +export const MEMBERSHIPS = 'ms'; export const TREATMENT = 't'; export const TREATMENTS = 'ts'; From 680d889d2048012c02172afc50acf76c67aa7d9a Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Sun, 1 Sep 2024 17:24:35 -0300 Subject: [PATCH 060/146] Refactor to reduce code --- src/sync/polling/types.ts | 5 ++- .../polling/updaters/mySegmentsUpdater.ts | 15 ++++--- .../UpdateWorkers/MySegmentsUpdateWorker.ts | 11 ++--- .../__tests__/MySegmentsUpdateWorker.spec.ts | 40 +++++++++---------- src/sync/streaming/pushManager.ts | 14 +++---- 5 files changed, 41 insertions(+), 44 deletions(-) diff --git a/src/sync/polling/types.ts b/src/sync/polling/types.ts index 9b07003c..6611a5a8 100644 --- a/src/sync/polling/types.ts +++ b/src/sync/polling/types.ts @@ -1,6 +1,7 @@ import { ISplit } from '../../dtos/types'; import { IReadinessManager } from '../../readiness/types'; import { IStorageSync } from '../../storages/types'; +import { MEMBERSHIP_LS_UPDATE, MEMBERSHIP_MS_UPDATE } from '../streaming/types'; import { ITask, ISyncTask } from '../types'; export interface ISplitsSyncTask extends ISyncTask<[noCache?: boolean, till?: number, splitUpdateNotification?: { payload: ISplit, changeNumber: number }], boolean> { } @@ -8,8 +9,8 @@ export interface ISplitsSyncTask extends ISyncTask<[noCache?: boolean, till?: nu export interface ISegmentsSyncTask extends ISyncTask<[fetchOnlyNew?: boolean, segmentName?: string, noCache?: boolean, till?: number], boolean> { } export type MySegmentsData = { - isLS: boolean - cn?: number + type: MEMBERSHIP_MS_UPDATE | MEMBERSHIP_LS_UPDATE + cn: number added: string[] removed: string[] } diff --git a/src/sync/polling/updaters/mySegmentsUpdater.ts b/src/sync/polling/updaters/mySegmentsUpdater.ts index a9667d15..f315e5f7 100644 --- a/src/sync/polling/updaters/mySegmentsUpdater.ts +++ b/src/sync/polling/updaters/mySegmentsUpdater.ts @@ -7,10 +7,9 @@ import { ILogger } from '../../../logger/types'; import { SYNC_MYSEGMENTS_FETCH_RETRY } from '../../../logger/constants'; import { MySegmentsData } from '../types'; import { IMembershipsResponse } from '../../../dtos/types'; +import { MEMBERSHIP_LS_UPDATE } from '../../streaming/constants'; -type MembershipsData = IMembershipsResponse | MySegmentsData; - -type IMySegmentsUpdater = (segmentList?: MembershipsData, noCache?: boolean) => Promise +type IMySegmentsUpdater = (segmentList?: MySegmentsData, noCache?: boolean) => Promise /** * factory of MySegments updater, a task that: @@ -39,11 +38,11 @@ export function mySegmentsUpdaterFactory( } // @TODO if allowing pluggable storages, handle async execution - function updateSegments(segmentsData: MembershipsData) { + function updateSegments(segmentsData: IMembershipsResponse | MySegmentsData) { let shouldNotifyUpdate; - if ((segmentsData as MySegmentsData).isLS !== undefined) { - shouldNotifyUpdate = (segmentsData as MySegmentsData).isLS ? + if ((segmentsData as MySegmentsData).type !== undefined) { + shouldNotifyUpdate = (segmentsData as MySegmentsData).type === MEMBERSHIP_LS_UPDATE ? largeSegments!.resetSegments(segmentsData as MySegmentsData) : segments.resetSegments(segmentsData as MySegmentsData); } else { @@ -58,7 +57,7 @@ export function mySegmentsUpdaterFactory( } } - function _mySegmentsUpdater(retry: number, segmentsData?: MembershipsData, noCache?: boolean): Promise { + function _mySegmentsUpdater(retry: number, segmentsData?: MySegmentsData, noCache?: boolean): Promise { const updaterPromise: Promise = segmentsData ? // If segmentsData is provided, there is no need to fetch mySegments new Promise((res) => { updateSegments(segmentsData); res(true); }) : @@ -94,7 +93,7 @@ export function mySegmentsUpdaterFactory( * (3) or `undefined`, for which the updater will fetch mySegments in order to sync the storage. * @param {boolean | undefined} noCache true to revalidate data to fetch */ - return function mySegmentsUpdater(segmentsData?: MembershipsData, noCache?: boolean) { + return function mySegmentsUpdater(segmentsData?: MySegmentsData, noCache?: boolean) { return _mySegmentsUpdater(0, segmentsData, noCache); }; diff --git a/src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts b/src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts index 3b121cd6..c94755fa 100644 --- a/src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts +++ b/src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts @@ -7,7 +7,7 @@ import { MEMBERSHIPS } from '../../../utils/constants'; /** * MySegmentsUpdateWorker factory */ -export function MySegmentsUpdateWorker(mySegmentsSyncTask: IMySegmentsSyncTask, telemetryTracker: ITelemetryTracker): IUpdateWorker<[changeNumber: number, segmentsData?: MySegmentsData, delay?: number]> { +export function MySegmentsUpdateWorker(mySegmentsSyncTask: IMySegmentsSyncTask, telemetryTracker: ITelemetryTracker): IUpdateWorker<[mySegmentsData?: Pick, payload?: Pick, delay?: number]> { let maxChangeNumber = 0; // keeps the maximum changeNumber among queued events let currentChangeNumber = -1; @@ -59,13 +59,14 @@ export function MySegmentsUpdateWorker(mySegmentsSyncTask: IMySegmentsSyncTask, * @param segmentsData data for KeyList or SegmentRemoval instant updates * @param delay optional time to wait for BoundedFetchRequest or BoundedFetchRequest updates */ - put(changeNumber: number, segmentsData?: MySegmentsData, delay?: number) { + put(mySegmentsData: Pick, payload?: Pick, delay?: number) { + const { type, cn } = mySegmentsData; // Ignore event if it is outdated or if there is a pending fetch request (_delay is set) - if (changeNumber <= currentChangeNumber || changeNumber <= maxChangeNumber || _delay) return; + if (cn <= currentChangeNumber || cn <= maxChangeNumber || _delay) return; - maxChangeNumber = changeNumber; + maxChangeNumber = cn; handleNewEvent = true; - _segmentsData = segmentsData; + _segmentsData = payload && { type, cn, added: payload.added, removed: payload.removed }; _delay = delay; if (backoff.timeoutID || !isHandlingEvent) __handleMySegmentsUpdateCall(); diff --git a/src/sync/streaming/UpdateWorkers/__tests__/MySegmentsUpdateWorker.spec.ts b/src/sync/streaming/UpdateWorkers/__tests__/MySegmentsUpdateWorker.spec.ts index a374edec..26b7c55b 100644 --- a/src/sync/streaming/UpdateWorkers/__tests__/MySegmentsUpdateWorker.spec.ts +++ b/src/sync/streaming/UpdateWorkers/__tests__/MySegmentsUpdateWorker.spec.ts @@ -49,14 +49,14 @@ describe('MySegmentsUpdateWorker', () => { // assert calling `mySegmentsSyncTask.execute` if `isExecuting` is false expect(mySegmentsSyncTask.isExecuting()).toBe(false); - mySegmentUpdateWorker.put(100); + mySegmentUpdateWorker.put({ cn: 100 }); expect(mySegmentsSyncTask.execute).toBeCalledTimes(1); // synchronizes MySegments if `isExecuting` is false // assert queueing changeNumber if `isExecuting` is true expect(mySegmentsSyncTask.isExecuting()).toBe(true); - mySegmentUpdateWorker.put(105); - mySegmentUpdateWorker.put(104); - mySegmentUpdateWorker.put(106); + mySegmentUpdateWorker.put({ cn: 105 }); + mySegmentUpdateWorker.put({ cn: 104 }); + mySegmentUpdateWorker.put({ cn: 106 }); expect(mySegmentsSyncTask.execute).toBeCalledTimes(1); // doesn't synchronize MySegments if `isExecuting` is true // assert calling `mySegmentsSyncTask.execute` if previous call is resolved and a new changeNumber in queue @@ -79,25 +79,25 @@ describe('MySegmentsUpdateWorker', () => { // to validate the special case than the fetch associated to the first event is resolved after a second event with payload arrives mySegmentsSyncTask.execute.mockClear(); expect(mySegmentsSyncTask.isExecuting()).toBe(false); - mySegmentUpdateWorker.put(110); + mySegmentUpdateWorker.put({ cn: 110 }); expect(mySegmentsSyncTask.isExecuting()).toBe(true); - mySegmentUpdateWorker.put(120, ['some_segment']); + mySegmentUpdateWorker.put({ cn: 120 }, { removed: ['some_segment'] }); expect(mySegmentsSyncTask.execute).toBeCalledTimes(1); // doesn't synchronize MySegments if `isExecuting` is true, even if payload (segmentList) is included expect(mySegmentsSyncTask.execute).toHaveBeenLastCalledWith(undefined, true); mySegmentsSyncTask.__resolveMySegmentsUpdaterCall(); // fetch success await new Promise(res => setTimeout(res, 10)); expect(mySegmentsSyncTask.execute).toBeCalledTimes(2); // re-synchronizes MySegments once previous event was handled - expect(mySegmentsSyncTask.execute).toHaveBeenLastCalledWith(['some_segment'], true); // synchronizes MySegments with given segmentList + expect(mySegmentsSyncTask.execute).toHaveBeenLastCalledWith({ cn: 120, removed: ['some_segment'] }, true); // synchronizes MySegments with given segmentList mySegmentsSyncTask.__resolveMySegmentsUpdaterCall(); // fetch success await new Promise(res => setTimeout(res, 10)); // assert handling an event without segmentList after one with segmentList mySegmentsSyncTask.execute.mockClear(); - mySegmentUpdateWorker.put(130, ['other_segment']); - mySegmentUpdateWorker.put(140); + mySegmentUpdateWorker.put({ cn: 130 }, { removed: ['other_segment'] }); + mySegmentUpdateWorker.put({ cn: 140 }); expect(mySegmentsSyncTask.execute).toBeCalledTimes(1); // synchronizes MySegments once, until event is handled - expect(mySegmentsSyncTask.execute).toHaveBeenLastCalledWith(['other_segment'], true); + expect(mySegmentsSyncTask.execute).toHaveBeenLastCalledWith({ cn: 130, removed: ['other_segment'] }, true); mySegmentsSyncTask.__resolveMySegmentsUpdaterCall(); // fetch success await new Promise(res => setTimeout(res)); @@ -116,12 +116,12 @@ describe('MySegmentsUpdateWorker', () => { const mySegmentUpdateWorker = MySegmentsUpdateWorker(mySegmentsSyncTask, telemetryTracker); // while fetch fails, should retry with backoff - mySegmentUpdateWorker.put(100); + mySegmentUpdateWorker.put({ cn: 100 }); await new Promise(res => setTimeout(res, Backoff.__TEST__BASE_MILLIS * 3 + 100 /* some delay */)); expect(mySegmentsSyncTask.execute).toBeCalledTimes(3); // if backoff is scheduled and a new event is queued, it must be handled immediately - mySegmentUpdateWorker.put(105); + mySegmentUpdateWorker.put({ cn: 105 }); expect(mySegmentsSyncTask.execute).toBeCalledTimes(4); }); @@ -130,13 +130,13 @@ describe('MySegmentsUpdateWorker', () => { const mySegmentsSyncTask = mySegmentsSyncTaskMock([false]); const mySegmentUpdateWorker = MySegmentsUpdateWorker(mySegmentsSyncTask, telemetryTracker); - mySegmentUpdateWorker.put(100); + mySegmentUpdateWorker.put({ cn: 100 }); mySegmentUpdateWorker.stop(); await new Promise(res => setTimeout(res, 20)); // Wait to assert no more calls to `execute` after stopping expect(mySegmentsSyncTask.execute).toBeCalledTimes(1); - mySegmentUpdateWorker.put(150, undefined, 10); + mySegmentUpdateWorker.put({ cn: 150 }, undefined, 10); mySegmentUpdateWorker.stop(); await new Promise(res => setTimeout(res, 20)); // Wait to assert no calls to `execute` after stopping (fetch request with delay is cleared) @@ -149,8 +149,8 @@ describe('MySegmentsUpdateWorker', () => { const mySegmentUpdateWorker = MySegmentsUpdateWorker(mySegmentsSyncTask, telemetryTracker); // If a delayed fetch request is queued while another fetch request is waiting, it is discarded - mySegmentUpdateWorker.put(100, undefined, 50); - mySegmentUpdateWorker.put(150, undefined, 100); + mySegmentUpdateWorker.put({ cn: 100 }, undefined, 50); + mySegmentUpdateWorker.put({ cn: 150 }, undefined, 100); await new Promise(res => setTimeout(res, 60)); expect(mySegmentsSyncTask.execute).toBeCalledTimes(1); @@ -161,9 +161,9 @@ describe('MySegmentsUpdateWorker', () => { expect(mySegmentsSyncTask.execute).toBeCalledTimes(1); // If an event with segmentData (i.e., an instant update) is queued while a delayed fetch request is waiting, the instant update is discarded - mySegmentUpdateWorker.put(200, undefined, 50); + mySegmentUpdateWorker.put({ cn: 200 }, undefined, 50); await new Promise(res => setTimeout(res, 10)); - mySegmentUpdateWorker.put(230, ['some_segment']); + mySegmentUpdateWorker.put({ cn: 230 }, { added: ['some_segment'] }); await new Promise(res => setTimeout(res, 60)); expect(mySegmentsSyncTask.execute).toBeCalledTimes(2); @@ -171,8 +171,8 @@ describe('MySegmentsUpdateWorker', () => { mySegmentsSyncTask.__resolveMySegmentsUpdaterCall(); // fetch success await new Promise(res => setTimeout(res)); - mySegmentUpdateWorker.put(250, ['some_segment']); + mySegmentUpdateWorker.put({ cn: 250 }, { added: ['some_segment'] }); expect(mySegmentsSyncTask.execute).toBeCalledTimes(3); - expect(mySegmentsSyncTask.execute).toHaveBeenLastCalledWith(['some_segment'], true); + expect(mySegmentsSyncTask.execute).toHaveBeenLastCalledWith({ cn: 250, added: ['some_segment'] }, true); }); }); diff --git a/src/sync/streaming/pushManager.ts b/src/sync/streaming/pushManager.ts index 5978d7a2..9d343fe4 100644 --- a/src/sync/streaming/pushManager.ts +++ b/src/sync/streaming/pushManager.ts @@ -59,7 +59,7 @@ export function pushManagerFactory( // For server-side we pass the segmentsSyncTask, used by SplitsUpdateWorker to fetch new segments const splitsUpdateWorker = SplitsUpdateWorker(log, storage.splits, pollingManager.splitsSyncTask, readiness.splits, telemetryTracker, userKey ? undefined : pollingManager.segmentsSyncTask as ISegmentsSyncTask); - // [Only for client-side] map of hashes to user keys, to dispatch update events to the corresponding MySegmentsUpdateWorker + // [Only for client-side] map of hashes to user keys, to dispatch membership update events to the corresponding MySegmentsUpdateWorker const userKeyHashes: Record = {}; // [Only for client-side] map of user keys to their corresponding hash64 and MySegmentsUpdateWorkers. // Hash64 is used to process membership update events and dispatch actions to the corresponding MySegmentsUpdateWorker. @@ -253,7 +253,7 @@ export function pushManagerFactory( forOwn(clients, ({ hash64, worker, workerLarge }, matchingKey) => { if (isInBitmap(bitmap, hash64.hex)) { - (isLS ? workerLarge : worker).put(parsedData.cn, undefined, getDelay(parsedData, matchingKey)); + (isLS ? workerLarge : worker).put(parsedData, undefined, getDelay(parsedData, matchingKey)); } }); return; @@ -277,9 +277,7 @@ export function pushManagerFactory( forOwn(clients, ({ hash64, worker, workerLarge }) => { const add = added.has(hash64.dec) ? true : removed.has(hash64.dec) ? false : undefined; if (add !== undefined) { - (isLS ? workerLarge : worker).put(parsedData.cn, { - isLS, - cn: parsedData.cn, + (isLS ? workerLarge : worker).put(parsedData, { added: add ? [parsedData.n![0]] : [], removed: add ? [] : [parsedData.n![0]] }); @@ -294,9 +292,7 @@ export function pushManagerFactory( } forOwn(clients, ({ worker, workerLarge }) => { - (isLS ? workerLarge : worker).put(parsedData.cn, { - isLS, - cn: parsedData.cn, + (isLS ? workerLarge : worker).put(parsedData, { added: [], removed: parsedData.n! }); @@ -306,7 +302,7 @@ export function pushManagerFactory( // `UpdateStrategy.UnboundedFetchRequest` and fallbacks of other cases forOwn(clients, ({ worker, workerLarge }, matchingKey) => { - (isLS ? workerLarge : worker).put(parsedData.cn, undefined, getDelay(parsedData, matchingKey)); + (isLS ? workerLarge : worker).put(parsedData, undefined, getDelay(parsedData, matchingKey)); }); } From 7c5396aa286671e2efd064a961d7dd269c5001d3 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Sun, 1 Sep 2024 17:49:31 -0300 Subject: [PATCH 061/146] Add optional till param to fetchMemberships endpoint --- src/services/__tests__/splitApi.spec.ts | 4 ++-- src/services/splitApi.ts | 4 ++-- src/services/types.ts | 2 +- src/sync/polling/fetchers/mySegmentsFetcher.ts | 3 ++- src/sync/polling/fetchers/types.ts | 1 + src/sync/polling/types.ts | 2 +- src/sync/polling/updaters/mySegmentsUpdater.ts | 11 ++++++----- 7 files changed, 15 insertions(+), 12 deletions(-) diff --git a/src/services/__tests__/splitApi.spec.ts b/src/services/__tests__/splitApi.spec.ts index c0b7eb21..d935c6de 100644 --- a/src/services/__tests__/splitApi.spec.ts +++ b/src/services/__tests__/splitApi.spec.ts @@ -30,10 +30,10 @@ describe('splitApi', () => { assertHeaders(settings, headers); expect(url).toBe('auth/v2/auth?s=1.1&users=key1&users=key2'); - splitApi.fetchMemberships('userKey'); + splitApi.fetchMemberships('userKey', false, 80); [url, { headers }] = fetchMock.mock.calls[1]; assertHeaders(settings, headers); - expect(url).toBe('sdk/memberships/userKey'); + expect(url).toBe('sdk/memberships/userKey?till=80'); splitApi.fetchSegmentChanges(-1, 'segmentName', false, 90); [url, { headers }] = fetchMock.mock.calls[2]; diff --git a/src/services/splitApi.ts b/src/services/splitApi.ts index e03c83bd..232adad4 100644 --- a/src/services/splitApi.ts +++ b/src/services/splitApi.ts @@ -67,14 +67,14 @@ export function splitApiFactory( return splitHttpClient(url, noCache ? noCacheHeaderOptions : undefined, telemetryTracker.trackHttp(SEGMENT)); }, - fetchMemberships(userMatchingKey: string, noCache?: boolean) { + fetchMemberships(userMatchingKey: string, noCache?: boolean, till?: number) { /** * URI encoding of user keys in order to: * - avoid 400 responses (due to URI malformed). E.g.: '/api/memberships/%' * - avoid 404 responses. E.g.: '/api/memberships/foo/bar' * - match user keys with special characters. E.g.: 'foo%bar', 'foo/bar' */ - const url = `${urls.sdk}/memberships/${encodeURIComponent(userMatchingKey)}`; + const url = `${urls.sdk}/memberships/${encodeURIComponent(userMatchingKey)}${till ? '?till=' + till : ''}`; return splitHttpClient(url, noCache ? noCacheHeaderOptions : undefined, telemetryTracker.trackHttp(MEMBERSHIPS)); }, diff --git a/src/services/types.ts b/src/services/types.ts index 96b918f3..34708f90 100644 --- a/src/services/types.ts +++ b/src/services/types.ts @@ -39,7 +39,7 @@ export type IFetchSplitChanges = (since: number, noCache?: boolean, till?: numbe export type IFetchSegmentChanges = (since: number, segmentName: string, noCache?: boolean, till?: number) => Promise -export type IFetchMemberships = (userMatchingKey: string, noCache?: boolean) => Promise +export type IFetchMemberships = (userMatchingKey: string, noCache?: boolean, till?: number) => Promise export type IPostEventsBulk = (body: string, headers?: Record) => Promise diff --git a/src/sync/polling/fetchers/mySegmentsFetcher.ts b/src/sync/polling/fetchers/mySegmentsFetcher.ts index c91bd3c3..8773a7aa 100644 --- a/src/sync/polling/fetchers/mySegmentsFetcher.ts +++ b/src/sync/polling/fetchers/mySegmentsFetcher.ts @@ -11,11 +11,12 @@ export function mySegmentsFetcherFactory(fetchMemberships: IFetchMemberships): I return function mySegmentsFetcher( userMatchingKey: string, noCache?: boolean, + till?: number, // Optional decorator for `fetchMemberships` promise, such as timeout or time tracker decorator?: (promise: Promise) => Promise ): Promise { - let mySegmentsPromise = fetchMemberships(userMatchingKey, noCache); + let mySegmentsPromise = fetchMemberships(userMatchingKey, noCache, till); if (decorator) mySegmentsPromise = decorator(mySegmentsPromise); return mySegmentsPromise.then(resp => resp.json()); diff --git a/src/sync/polling/fetchers/types.ts b/src/sync/polling/fetchers/types.ts index bf14d0ba..72968a5f 100644 --- a/src/sync/polling/fetchers/types.ts +++ b/src/sync/polling/fetchers/types.ts @@ -19,5 +19,6 @@ export type ISegmentChangesFetcher = ( export type IMySegmentsFetcher = ( userMatchingKey: string, noCache?: boolean, + till?: number, decorator?: (promise: Promise) => Promise ) => Promise diff --git a/src/sync/polling/types.ts b/src/sync/polling/types.ts index 6611a5a8..84ce598d 100644 --- a/src/sync/polling/types.ts +++ b/src/sync/polling/types.ts @@ -15,7 +15,7 @@ export type MySegmentsData = { removed: string[] } -export interface IMySegmentsSyncTask extends ISyncTask<[segmentsData?: MySegmentsData, noCache?: boolean], boolean> { } +export interface IMySegmentsSyncTask extends ISyncTask<[segmentsData?: MySegmentsData, noCache?: boolean, till?: number], boolean> { } export interface IPollingManager extends ITask { syncAll(): Promise diff --git a/src/sync/polling/updaters/mySegmentsUpdater.ts b/src/sync/polling/updaters/mySegmentsUpdater.ts index f315e5f7..31ffa513 100644 --- a/src/sync/polling/updaters/mySegmentsUpdater.ts +++ b/src/sync/polling/updaters/mySegmentsUpdater.ts @@ -9,7 +9,7 @@ import { MySegmentsData } from '../types'; import { IMembershipsResponse } from '../../../dtos/types'; import { MEMBERSHIP_LS_UPDATE } from '../../streaming/constants'; -type IMySegmentsUpdater = (segmentList?: MySegmentsData, noCache?: boolean) => Promise +type IMySegmentsUpdater = (segmentsData?: MySegmentsData, noCache?: boolean, till?: number) => Promise /** * factory of MySegments updater, a task that: @@ -57,12 +57,12 @@ export function mySegmentsUpdaterFactory( } } - function _mySegmentsUpdater(retry: number, segmentsData?: MySegmentsData, noCache?: boolean): Promise { + function _mySegmentsUpdater(retry: number, segmentsData?: MySegmentsData, noCache?: boolean, till?: number): Promise { const updaterPromise: Promise = segmentsData ? // If segmentsData is provided, there is no need to fetch mySegments new Promise((res) => { updateSegments(segmentsData); res(true); }) : // If not provided, fetch mySegments - mySegmentsFetcher(matchingKey, noCache, _promiseDecorator).then(segments => { + mySegmentsFetcher(matchingKey, noCache, till, _promiseDecorator).then(segments => { // Only when we have downloaded segments completely, we should not keep retrying anymore startingUp = false; @@ -92,9 +92,10 @@ export function mySegmentsUpdaterFactory( * (2) an object with a segment name and action (true: add, or false: delete) to update the storage, * (3) or `undefined`, for which the updater will fetch mySegments in order to sync the storage. * @param {boolean | undefined} noCache true to revalidate data to fetch + * @param {boolean | undefined} till query param to bypass CDN requests */ - return function mySegmentsUpdater(segmentsData?: MySegmentsData, noCache?: boolean) { - return _mySegmentsUpdater(0, segmentsData, noCache); + return function mySegmentsUpdater(segmentsData?: MySegmentsData, noCache?: boolean, till?: number) { + return _mySegmentsUpdater(0, segmentsData, noCache, till); }; } From 68c0c141637437e3345ec4279d8a1a63578d9234 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Mon, 2 Sep 2024 11:19:15 -0300 Subject: [PATCH 062/146] rc --- package-lock.json | 4 ++-- package.json | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/package-lock.json b/package-lock.json index 2635def7..e7720358 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "@splitsoftware/splitio-commons", - "version": "1.16.1-rc.10", + "version": "1.16.1-rc.11", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "@splitsoftware/splitio-commons", - "version": "1.16.1-rc.10", + "version": "1.16.1-rc.11", "license": "Apache-2.0", "dependencies": { "tslib": "^2.3.1" diff --git a/package.json b/package.json index ea569a0e..6f16997b 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@splitsoftware/splitio-commons", - "version": "1.16.1-rc.10", + "version": "1.16.1-rc.11", "description": "Split JavaScript SDK common components", "main": "cjs/index.js", "module": "esm/index.js", From 451d6f5aeb8cc2f3d3d248b584b1df1ee7381596 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Mon, 2 Sep 2024 14:04:54 -0300 Subject: [PATCH 063/146] Add CDN bypass logic to MySegmentsUpdateWorker --- src/storages/types.ts | 2 +- .../UpdateWorkers/MySegmentsUpdateWorker.ts | 47 +++++++++++++++---- .../__tests__/MySegmentsUpdateWorker.spec.ts | 30 +++++++----- src/sync/streaming/pushManager.ts | 4 +- 4 files changed, 59 insertions(+), 24 deletions(-) diff --git a/src/storages/types.ts b/src/storages/types.ts index a416a783..b3b1076c 100644 --- a/src/storages/types.ts +++ b/src/storages/types.ts @@ -270,7 +270,7 @@ export interface ISegmentsCacheSync extends ISegmentsCacheBase { getRegisteredSegments(): string[] getKeysCount(): number // only used for telemetry setChangeNumber(name: string, changeNumber: number): boolean | void - getChangeNumber(name: string): number + getChangeNumber(name?: string): number resetSegments(segmentsData: MySegmentsData | IMySegmentsResponse): boolean // only for Sync Client-Side clear(): void } diff --git a/src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts b/src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts index c94755fa..e3011ec7 100644 --- a/src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts +++ b/src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts @@ -3,16 +3,20 @@ import { Backoff } from '../../../utils/Backoff'; import { IUpdateWorker } from './types'; import { ITelemetryTracker } from '../../../trackers/types'; import { MEMBERSHIPS } from '../../../utils/constants'; +import { ISegmentsCacheSync } from '../../../storages/types'; +import { ILogger } from '../../../logger/types'; +import { FETCH_BACKOFF_MAX_RETRIES } from './constants'; /** * MySegmentsUpdateWorker factory */ -export function MySegmentsUpdateWorker(mySegmentsSyncTask: IMySegmentsSyncTask, telemetryTracker: ITelemetryTracker): IUpdateWorker<[mySegmentsData?: Pick, payload?: Pick, delay?: number]> { +export function MySegmentsUpdateWorker(log: ILogger, mySegmentsCache: ISegmentsCacheSync, mySegmentsSyncTask: IMySegmentsSyncTask, telemetryTracker: ITelemetryTracker): IUpdateWorker<[mySegmentsData?: Pick, payload?: Pick, delay?: number]> { let maxChangeNumber = 0; // keeps the maximum changeNumber among queued events let currentChangeNumber = -1; let handleNewEvent = false; let isHandlingEvent: boolean; + let cdnBypass: boolean; let _segmentsData: MySegmentsData | undefined; // keeps the segmentsData (if included in notification payload) from the queued event with maximum changeNumber let _delay: undefined | number; let _delayTimeoutID: any; @@ -20,7 +24,7 @@ export function MySegmentsUpdateWorker(mySegmentsSyncTask: IMySegmentsSyncTask, function __handleMySegmentsUpdateCall() { isHandlingEvent = true; - if (maxChangeNumber > currentChangeNumber) { + if (maxChangeNumber > Math.max(currentChangeNumber, mySegmentsCache.getChangeNumber())) { handleNewEvent = false; const currentMaxChangeNumber = maxChangeNumber; @@ -29,21 +33,45 @@ export function MySegmentsUpdateWorker(mySegmentsSyncTask: IMySegmentsSyncTask, new Promise(res => { _delayTimeoutID = setTimeout(() => { _delay = undefined; - mySegmentsSyncTask.execute(_segmentsData, true).then(res); + mySegmentsSyncTask.execute(_segmentsData, true, cdnBypass ? maxChangeNumber : undefined).then(res); }, _delay); }) : - mySegmentsSyncTask.execute(_segmentsData, true); + mySegmentsSyncTask.execute(_segmentsData, true, cdnBypass ? maxChangeNumber : undefined); syncTask.then((result) => { if (!isHandlingEvent) return; // halt if `stop` has been called - if (result !== false) { // Unlike `Splits|SegmentsUpdateWorker`, we cannot use `mySegmentsCache.getChangeNumber` since `/mySegments` endpoint doesn't provide this value. - if (_segmentsData) telemetryTracker.trackUpdatesFromSSE(MEMBERSHIPS); - currentChangeNumber = Math.max(currentChangeNumber, currentMaxChangeNumber); // use `currentMaxChangeNumber`, in case that `maxChangeNumber` was updated during fetch. + if (result !== false) { // Unlike `Splits|SegmentsUpdateWorker`, `mySegmentsCache.getChangeNumber` can be -1, since `/memberships` change number is optional + const storageChangeNumber = mySegmentsCache.getChangeNumber(); + currentChangeNumber = storageChangeNumber > -1 ? + storageChangeNumber : + Math.max(currentChangeNumber, currentMaxChangeNumber); // use `currentMaxChangeNumber`, in case that `maxChangeNumber` was updated during fetch. } if (handleNewEvent) { __handleMySegmentsUpdateCall(); } else { - backoff.scheduleCall(); + if (_segmentsData) telemetryTracker.trackUpdatesFromSSE(MEMBERSHIPS); + + const attempts = backoff.attempts + 1; + + if (maxChangeNumber <= currentChangeNumber) { + log.debug(`Refresh completed${cdnBypass ? ' bypassing the CDN' : ''} in ${attempts} attempts.`); + isHandlingEvent = false; + return; + } + + if (attempts < FETCH_BACKOFF_MAX_RETRIES) { + backoff.scheduleCall(); + return; + } + + if (cdnBypass) { + log.debug(`No changes fetched after ${attempts} attempts with CDN bypassed.`); + isHandlingEvent = false; + } else { + backoff.reset(); + cdnBypass = true; + __handleMySegmentsUpdateCall(); + } } }); } else { @@ -62,10 +90,11 @@ export function MySegmentsUpdateWorker(mySegmentsSyncTask: IMySegmentsSyncTask, put(mySegmentsData: Pick, payload?: Pick, delay?: number) { const { type, cn } = mySegmentsData; // Ignore event if it is outdated or if there is a pending fetch request (_delay is set) - if (cn <= currentChangeNumber || cn <= maxChangeNumber || _delay) return; + if (cn <= Math.max(currentChangeNumber, mySegmentsCache.getChangeNumber()) || cn <= maxChangeNumber || _delay) return; maxChangeNumber = cn; handleNewEvent = true; + cdnBypass = false; _segmentsData = payload && { type, cn, added: payload.added, removed: payload.removed }; _delay = delay; diff --git a/src/sync/streaming/UpdateWorkers/__tests__/MySegmentsUpdateWorker.spec.ts b/src/sync/streaming/UpdateWorkers/__tests__/MySegmentsUpdateWorker.spec.ts index 26b7c55b..8581019b 100644 --- a/src/sync/streaming/UpdateWorkers/__tests__/MySegmentsUpdateWorker.spec.ts +++ b/src/sync/streaming/UpdateWorkers/__tests__/MySegmentsUpdateWorker.spec.ts @@ -1,5 +1,7 @@ // @ts-nocheck import { MySegmentsUpdateWorker } from '../MySegmentsUpdateWorker'; +import { MySegmentsCacheInMemory } from '../../../../storages/inMemory/MySegmentsCacheInMemory'; +import { loggerMock } from '../../../../logger/__tests__/sdkLogger.mock'; import { syncTaskFactory } from '../../../syncTask'; import { Backoff } from '../../../../utils/Backoff'; import { telemetryTrackerFactory } from '../../../../trackers/telemetryTracker'; @@ -43,9 +45,10 @@ describe('MySegmentsUpdateWorker', () => { test('put', async () => { // setup + const mySegmentsCache = new MySegmentsCacheInMemory(); const mySegmentsSyncTask = mySegmentsSyncTaskMock(); Backoff.__TEST__BASE_MILLIS = 1; // retry immediately - const mySegmentUpdateWorker = MySegmentsUpdateWorker(mySegmentsSyncTask, telemetryTracker); + const mySegmentUpdateWorker = MySegmentsUpdateWorker(loggerMock, mySegmentsCache, mySegmentsSyncTask, telemetryTracker); // assert calling `mySegmentsSyncTask.execute` if `isExecuting` is false expect(mySegmentsSyncTask.isExecuting()).toBe(false); @@ -73,7 +76,7 @@ describe('MySegmentsUpdateWorker', () => { mySegmentsSyncTask.__resolveMySegmentsUpdaterCall(); // fetch success await new Promise(res => setTimeout(res, 10)); expect(mySegmentsSyncTask.execute).toBeCalledTimes(3); // doesn't synchronize MySegments while queue is empty - expect(mySegmentsSyncTask.execute.mock.calls).toEqual([[undefined, true], [undefined, true], [undefined, true]]); + expect(mySegmentsSyncTask.execute.mock.calls).toEqual([[undefined, true, undefined], [undefined, true, undefined], [undefined, true, undefined]]); // assert handling an event with segmentList after an event without segmentList, // to validate the special case than the fetch associated to the first event is resolved after a second event with payload arrives @@ -83,12 +86,12 @@ describe('MySegmentsUpdateWorker', () => { expect(mySegmentsSyncTask.isExecuting()).toBe(true); mySegmentUpdateWorker.put({ cn: 120 }, { removed: ['some_segment'] }); expect(mySegmentsSyncTask.execute).toBeCalledTimes(1); // doesn't synchronize MySegments if `isExecuting` is true, even if payload (segmentList) is included - expect(mySegmentsSyncTask.execute).toHaveBeenLastCalledWith(undefined, true); + expect(mySegmentsSyncTask.execute).toHaveBeenLastCalledWith(undefined, true, undefined); mySegmentsSyncTask.__resolveMySegmentsUpdaterCall(); // fetch success await new Promise(res => setTimeout(res, 10)); expect(mySegmentsSyncTask.execute).toBeCalledTimes(2); // re-synchronizes MySegments once previous event was handled - expect(mySegmentsSyncTask.execute).toHaveBeenLastCalledWith({ cn: 120, removed: ['some_segment'] }, true); // synchronizes MySegments with given segmentList + expect(mySegmentsSyncTask.execute).toHaveBeenLastCalledWith({ cn: 120, removed: ['some_segment'] }, true, undefined); // synchronizes MySegments with given segmentList mySegmentsSyncTask.__resolveMySegmentsUpdaterCall(); // fetch success await new Promise(res => setTimeout(res, 10)); @@ -97,12 +100,12 @@ describe('MySegmentsUpdateWorker', () => { mySegmentUpdateWorker.put({ cn: 130 }, { removed: ['other_segment'] }); mySegmentUpdateWorker.put({ cn: 140 }); expect(mySegmentsSyncTask.execute).toBeCalledTimes(1); // synchronizes MySegments once, until event is handled - expect(mySegmentsSyncTask.execute).toHaveBeenLastCalledWith({ cn: 130, removed: ['other_segment'] }, true); + expect(mySegmentsSyncTask.execute).toHaveBeenLastCalledWith({ cn: 130, removed: ['other_segment'] }, true, undefined); mySegmentsSyncTask.__resolveMySegmentsUpdaterCall(); // fetch success await new Promise(res => setTimeout(res)); expect(mySegmentsSyncTask.execute).toBeCalledTimes(2); // re-synchronizes MySegments once previous event was handled - expect(mySegmentsSyncTask.execute).toHaveBeenLastCalledWith(undefined, true); // synchronizes MySegments without segmentList if the event doesn't have payload + expect(mySegmentsSyncTask.execute).toHaveBeenLastCalledWith(undefined, true, undefined); // synchronizes MySegments without segmentList if the event doesn't have payload mySegmentsSyncTask.__resolveMySegmentsUpdaterCall(); // fetch success await new Promise(res => setTimeout(res, 20)); // Wait to assert no more calls with backoff to `execute` @@ -112,8 +115,9 @@ describe('MySegmentsUpdateWorker', () => { test('put, backoff', async () => { // setup Backoff.__TEST__BASE_MILLIS = 50; + const mySegmentsCache = new MySegmentsCacheInMemory(); const mySegmentsSyncTask = mySegmentsSyncTaskMock([false, false, false]); // fetch fail - const mySegmentUpdateWorker = MySegmentsUpdateWorker(mySegmentsSyncTask, telemetryTracker); + const mySegmentUpdateWorker = MySegmentsUpdateWorker(loggerMock, mySegmentsCache, mySegmentsSyncTask, telemetryTracker); // while fetch fails, should retry with backoff mySegmentUpdateWorker.put({ cn: 100 }); @@ -127,8 +131,9 @@ describe('MySegmentsUpdateWorker', () => { test('stop', async () => { // setup + const mySegmentsCache = new MySegmentsCacheInMemory(); const mySegmentsSyncTask = mySegmentsSyncTaskMock([false]); - const mySegmentUpdateWorker = MySegmentsUpdateWorker(mySegmentsSyncTask, telemetryTracker); + const mySegmentUpdateWorker = MySegmentsUpdateWorker(loggerMock, mySegmentsCache, mySegmentsSyncTask, telemetryTracker); mySegmentUpdateWorker.put({ cn: 100 }); mySegmentUpdateWorker.stop(); @@ -145,8 +150,9 @@ describe('MySegmentsUpdateWorker', () => { test('put with delay', async () => { // setup + const mySegmentsCache = new MySegmentsCacheInMemory(); const mySegmentsSyncTask = mySegmentsSyncTaskMock(); - const mySegmentUpdateWorker = MySegmentsUpdateWorker(mySegmentsSyncTask, telemetryTracker); + const mySegmentUpdateWorker = MySegmentsUpdateWorker(loggerMock, mySegmentsCache, mySegmentsSyncTask, telemetryTracker); // If a delayed fetch request is queued while another fetch request is waiting, it is discarded mySegmentUpdateWorker.put({ cn: 100 }, undefined, 50); @@ -154,7 +160,7 @@ describe('MySegmentsUpdateWorker', () => { await new Promise(res => setTimeout(res, 60)); expect(mySegmentsSyncTask.execute).toBeCalledTimes(1); - expect(mySegmentsSyncTask.execute).toHaveBeenLastCalledWith(undefined, true); + expect(mySegmentsSyncTask.execute).toHaveBeenLastCalledWith(undefined, true, undefined); mySegmentsSyncTask.__resolveMySegmentsUpdaterCall(); // fetch success await new Promise(res => setTimeout(res, 60)); @@ -167,12 +173,12 @@ describe('MySegmentsUpdateWorker', () => { await new Promise(res => setTimeout(res, 60)); expect(mySegmentsSyncTask.execute).toBeCalledTimes(2); - expect(mySegmentsSyncTask.execute).toHaveBeenLastCalledWith(undefined, true); + expect(mySegmentsSyncTask.execute).toHaveBeenLastCalledWith(undefined, true, undefined); mySegmentsSyncTask.__resolveMySegmentsUpdaterCall(); // fetch success await new Promise(res => setTimeout(res)); mySegmentUpdateWorker.put({ cn: 250 }, { added: ['some_segment'] }); expect(mySegmentsSyncTask.execute).toBeCalledTimes(3); - expect(mySegmentsSyncTask.execute).toHaveBeenLastCalledWith({ cn: 250, added: ['some_segment'] }, true); + expect(mySegmentsSyncTask.execute).toHaveBeenLastCalledWith({ cn: 250, added: ['some_segment'] }, true, undefined); }); }); diff --git a/src/sync/streaming/pushManager.ts b/src/sync/streaming/pushManager.ts index 9d343fe4..90916f7d 100644 --- a/src/sync/streaming/pushManager.ts +++ b/src/sync/streaming/pushManager.ts @@ -348,8 +348,8 @@ export function pushManagerFactory( userKeyHashes[hash] = userKey; clients[userKey] = { hash64: hash64(userKey), - worker: MySegmentsUpdateWorker(mySegmentsSyncTask, telemetryTracker), - workerLarge: MySegmentsUpdateWorker(mySegmentsSyncTask, telemetryTracker) + worker: MySegmentsUpdateWorker(log, storage.segments, mySegmentsSyncTask, telemetryTracker), + workerLarge: MySegmentsUpdateWorker(log, storage.largeSegments!, mySegmentsSyncTask, telemetryTracker) }; connectForNewClient = true; // we must reconnect on start, to listen the channel for the new user key From 6a296cd93f9209baa5e7317555313f55b90bd788 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Mon, 2 Sep 2024 14:05:34 -0300 Subject: [PATCH 064/146] Refactor: keep a single MySegmentsUpdateWorker per user key --- .../UpdateWorkers/MySegmentsUpdateWorker.ts | 193 ++++++++++-------- .../__tests__/MySegmentsUpdateWorker.spec.ts | 74 +++---- src/sync/streaming/pushManager.ts | 28 +-- 3 files changed, 156 insertions(+), 139 deletions(-) diff --git a/src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts b/src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts index e3011ec7..7dc2dfbe 100644 --- a/src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts +++ b/src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts @@ -3,110 +3,129 @@ import { Backoff } from '../../../utils/Backoff'; import { IUpdateWorker } from './types'; import { ITelemetryTracker } from '../../../trackers/types'; import { MEMBERSHIPS } from '../../../utils/constants'; -import { ISegmentsCacheSync } from '../../../storages/types'; +import { ISegmentsCacheSync, IStorageSync } from '../../../storages/types'; import { ILogger } from '../../../logger/types'; import { FETCH_BACKOFF_MAX_RETRIES } from './constants'; +import { MEMBERSHIP_LS_UPDATE, MEMBERSHIP_MS_UPDATE } from '../constants'; /** * MySegmentsUpdateWorker factory */ -export function MySegmentsUpdateWorker(log: ILogger, mySegmentsCache: ISegmentsCacheSync, mySegmentsSyncTask: IMySegmentsSyncTask, telemetryTracker: ITelemetryTracker): IUpdateWorker<[mySegmentsData?: Pick, payload?: Pick, delay?: number]> { - - let maxChangeNumber = 0; // keeps the maximum changeNumber among queued events - let currentChangeNumber = -1; - let handleNewEvent = false; - let isHandlingEvent: boolean; - let cdnBypass: boolean; - let _segmentsData: MySegmentsData | undefined; // keeps the segmentsData (if included in notification payload) from the queued event with maximum changeNumber - let _delay: undefined | number; - let _delayTimeoutID: any; - const backoff = new Backoff(__handleMySegmentsUpdateCall); - - function __handleMySegmentsUpdateCall() { - isHandlingEvent = true; - if (maxChangeNumber > Math.max(currentChangeNumber, mySegmentsCache.getChangeNumber())) { - handleNewEvent = false; - const currentMaxChangeNumber = maxChangeNumber; - - // fetch mySegments revalidating data if cached - const syncTask = _delay ? - new Promise(res => { - _delayTimeoutID = setTimeout(() => { - _delay = undefined; - mySegmentsSyncTask.execute(_segmentsData, true, cdnBypass ? maxChangeNumber : undefined).then(res); - }, _delay); - }) : - mySegmentsSyncTask.execute(_segmentsData, true, cdnBypass ? maxChangeNumber : undefined); - - syncTask.then((result) => { - if (!isHandlingEvent) return; // halt if `stop` has been called - if (result !== false) { // Unlike `Splits|SegmentsUpdateWorker`, `mySegmentsCache.getChangeNumber` can be -1, since `/memberships` change number is optional - const storageChangeNumber = mySegmentsCache.getChangeNumber(); - currentChangeNumber = storageChangeNumber > -1 ? - storageChangeNumber : - Math.max(currentChangeNumber, currentMaxChangeNumber); // use `currentMaxChangeNumber`, in case that `maxChangeNumber` was updated during fetch. - } - if (handleNewEvent) { - __handleMySegmentsUpdateCall(); - } else { - if (_segmentsData) telemetryTracker.trackUpdatesFromSSE(MEMBERSHIPS); - - const attempts = backoff.attempts + 1; - - if (maxChangeNumber <= currentChangeNumber) { - log.debug(`Refresh completed${cdnBypass ? ' bypassing the CDN' : ''} in ${attempts} attempts.`); - isHandlingEvent = false; - return; - } +export function MySegmentsUpdateWorker(log: ILogger, storage: Pick, mySegmentsSyncTask: IMySegmentsSyncTask, telemetryTracker: ITelemetryTracker): IUpdateWorker<[mySegmentsData?: Pick, payload?: Pick, delay?: number]> { - if (attempts < FETCH_BACKOFF_MAX_RETRIES) { - backoff.scheduleCall(); - return; - } + function createUpdateWorker(mySegmentsCache: ISegmentsCacheSync) { - if (cdnBypass) { - log.debug(`No changes fetched after ${attempts} attempts with CDN bypassed.`); - isHandlingEvent = false; - } else { - backoff.reset(); - cdnBypass = true; + let maxChangeNumber = 0; // keeps the maximum changeNumber among queued events + let currentChangeNumber = -1; + let handleNewEvent = false; + let isHandlingEvent: boolean; + let cdnBypass: boolean; + let _segmentsData: MySegmentsData | undefined; // keeps the segmentsData (if included in notification payload) from the queued event with maximum changeNumber + let _delay: undefined | number; + let _delayTimeoutID: any; + const backoff = new Backoff(__handleMySegmentsUpdateCall); + + function __handleMySegmentsUpdateCall() { + isHandlingEvent = true; + if (maxChangeNumber > Math.max(currentChangeNumber, mySegmentsCache.getChangeNumber())) { + handleNewEvent = false; + const currentMaxChangeNumber = maxChangeNumber; + + // fetch mySegments revalidating data if cached + const syncTask = _delay ? + new Promise(res => { + _delayTimeoutID = setTimeout(() => { + _delay = undefined; + mySegmentsSyncTask.execute(_segmentsData, true, cdnBypass ? maxChangeNumber : undefined).then(res); + }, _delay); + }) : + mySegmentsSyncTask.execute(_segmentsData, true, cdnBypass ? maxChangeNumber : undefined); + + syncTask.then((result) => { + if (!isHandlingEvent) return; // halt if `stop` has been called + if (result !== false) { // Unlike `Splits|SegmentsUpdateWorker`, `mySegmentsCache.getChangeNumber` can be -1, since `/memberships` change number is optional + const storageChangeNumber = mySegmentsCache.getChangeNumber(); + currentChangeNumber = storageChangeNumber > -1 ? + storageChangeNumber : + Math.max(currentChangeNumber, currentMaxChangeNumber); // use `currentMaxChangeNumber`, in case that `maxChangeNumber` was updated during fetch. + } + if (handleNewEvent) { __handleMySegmentsUpdateCall(); + } else { + if (_segmentsData) telemetryTracker.trackUpdatesFromSSE(MEMBERSHIPS); + + const attempts = backoff.attempts + 1; + + if (maxChangeNumber <= currentChangeNumber) { + log.debug(`Refresh completed${cdnBypass ? ' bypassing the CDN' : ''} in ${attempts} attempts.`); + isHandlingEvent = false; + return; + } + + if (attempts < FETCH_BACKOFF_MAX_RETRIES) { + backoff.scheduleCall(); + return; + } + + if (cdnBypass) { + log.debug(`No changes fetched after ${attempts} attempts with CDN bypassed.`); + isHandlingEvent = false; + } else { + backoff.reset(); + cdnBypass = true; + __handleMySegmentsUpdateCall(); + } } - } - }); - } else { - isHandlingEvent = false; + }); + } else { + isHandlingEvent = false; + } } + + return { + /** + * Invoked by NotificationProcessor on MY_(LARGE)_SEGMENTS_UPDATE notifications + * + * @param changeNumber change number of the notification + * @param segmentsData data for KeyList or SegmentRemoval instant updates + * @param delay optional time to wait for BoundedFetchRequest or BoundedFetchRequest updates + */ + put(mySegmentsData: Pick, payload?: Pick, delay?: number) { + const { type, cn } = mySegmentsData; + // Ignore event if it is outdated or if there is a pending fetch request (_delay is set) + if (cn <= Math.max(currentChangeNumber, mySegmentsCache.getChangeNumber()) || cn <= maxChangeNumber || _delay) return; + + maxChangeNumber = cn; + handleNewEvent = true; + cdnBypass = false; + _segmentsData = payload && { type, cn, added: payload.added, removed: payload.removed }; + _delay = delay; + + if (backoff.timeoutID || !isHandlingEvent) __handleMySegmentsUpdateCall(); + backoff.reset(); + }, + + stop() { + clearTimeout(_delayTimeoutID); + _delay = undefined; + isHandlingEvent = false; + backoff.reset(); + } + }; } + const updateWorkers = { + [MEMBERSHIP_MS_UPDATE]: createUpdateWorker(storage.segments), + [MEMBERSHIP_LS_UPDATE]: createUpdateWorker(storage.largeSegments!), + }; + return { - /** - * Invoked by NotificationProcessor on MY_(LARGE)_SEGMENTS_UPDATE notifications - * - * @param changeNumber change number of the notification - * @param segmentsData data for KeyList or SegmentRemoval instant updates - * @param delay optional time to wait for BoundedFetchRequest or BoundedFetchRequest updates - */ put(mySegmentsData: Pick, payload?: Pick, delay?: number) { - const { type, cn } = mySegmentsData; - // Ignore event if it is outdated or if there is a pending fetch request (_delay is set) - if (cn <= Math.max(currentChangeNumber, mySegmentsCache.getChangeNumber()) || cn <= maxChangeNumber || _delay) return; - - maxChangeNumber = cn; - handleNewEvent = true; - cdnBypass = false; - _segmentsData = payload && { type, cn, added: payload.added, removed: payload.removed }; - _delay = delay; - - if (backoff.timeoutID || !isHandlingEvent) __handleMySegmentsUpdateCall(); - backoff.reset(); + updateWorkers[mySegmentsData.type].put(mySegmentsData, payload, delay); }, - stop() { - clearTimeout(_delayTimeoutID); - _delay = undefined; - isHandlingEvent = false; - backoff.reset(); + updateWorkers[MEMBERSHIP_MS_UPDATE].stop(); + updateWorkers[MEMBERSHIP_LS_UPDATE].stop(); } }; } diff --git a/src/sync/streaming/UpdateWorkers/__tests__/MySegmentsUpdateWorker.spec.ts b/src/sync/streaming/UpdateWorkers/__tests__/MySegmentsUpdateWorker.spec.ts index 8581019b..c6a1a606 100644 --- a/src/sync/streaming/UpdateWorkers/__tests__/MySegmentsUpdateWorker.spec.ts +++ b/src/sync/streaming/UpdateWorkers/__tests__/MySegmentsUpdateWorker.spec.ts @@ -1,20 +1,28 @@ -// @ts-nocheck import { MySegmentsUpdateWorker } from '../MySegmentsUpdateWorker'; import { MySegmentsCacheInMemory } from '../../../../storages/inMemory/MySegmentsCacheInMemory'; import { loggerMock } from '../../../../logger/__tests__/sdkLogger.mock'; import { syncTaskFactory } from '../../../syncTask'; import { Backoff } from '../../../../utils/Backoff'; import { telemetryTrackerFactory } from '../../../../trackers/telemetryTracker'; +import { MEMBERSHIP_LS_UPDATE, MEMBERSHIP_MS_UPDATE } from '../../constants'; -function mySegmentsSyncTaskMock(values = []) { +function createStorage() { + return { + segments: new MySegmentsCacheInMemory(), + largeSegments: new MySegmentsCacheInMemory(), + }; +} + +function mySegmentsSyncTaskMock(values: Array = []) { - const __mySegmentsUpdaterCalls = []; + const __mySegmentsUpdaterCalls: Array<{ res: (value?: boolean) => void }> = []; - function __resolveMySegmentsUpdaterCall(value) { - if (__mySegmentsUpdaterCalls.length) __mySegmentsUpdaterCalls.shift().res(value); // resolve previous call + function __resolveMySegmentsUpdaterCall(value?: boolean) { + if (__mySegmentsUpdaterCalls.length) __mySegmentsUpdaterCalls.shift()!.res(value); // resolve previous call else values.push(value); } + // @ts-expect-error const syncTask = syncTaskFactory( { debug() { } }, // no-op logger () => { @@ -22,7 +30,7 @@ function mySegmentsSyncTaskMock(values = []) { __mySegmentsUpdaterCalls.push({ res }); if (values.length) __resolveMySegmentsUpdaterCall(values.shift()); }); - } + }, ); return { @@ -45,21 +53,20 @@ describe('MySegmentsUpdateWorker', () => { test('put', async () => { // setup - const mySegmentsCache = new MySegmentsCacheInMemory(); const mySegmentsSyncTask = mySegmentsSyncTaskMock(); Backoff.__TEST__BASE_MILLIS = 1; // retry immediately - const mySegmentUpdateWorker = MySegmentsUpdateWorker(loggerMock, mySegmentsCache, mySegmentsSyncTask, telemetryTracker); + const mySegmentUpdateWorker = MySegmentsUpdateWorker(loggerMock, createStorage(), mySegmentsSyncTask as any, telemetryTracker); // assert calling `mySegmentsSyncTask.execute` if `isExecuting` is false expect(mySegmentsSyncTask.isExecuting()).toBe(false); - mySegmentUpdateWorker.put({ cn: 100 }); + mySegmentUpdateWorker.put({ type: MEMBERSHIP_MS_UPDATE, cn: 100 }); expect(mySegmentsSyncTask.execute).toBeCalledTimes(1); // synchronizes MySegments if `isExecuting` is false // assert queueing changeNumber if `isExecuting` is true expect(mySegmentsSyncTask.isExecuting()).toBe(true); - mySegmentUpdateWorker.put({ cn: 105 }); - mySegmentUpdateWorker.put({ cn: 104 }); - mySegmentUpdateWorker.put({ cn: 106 }); + mySegmentUpdateWorker.put({ type: MEMBERSHIP_MS_UPDATE, cn: 105 }); + mySegmentUpdateWorker.put({ type: MEMBERSHIP_MS_UPDATE, cn: 104 }); + mySegmentUpdateWorker.put({ type: MEMBERSHIP_MS_UPDATE, cn: 106 }); expect(mySegmentsSyncTask.execute).toBeCalledTimes(1); // doesn't synchronize MySegments if `isExecuting` is true // assert calling `mySegmentsSyncTask.execute` if previous call is resolved and a new changeNumber in queue @@ -82,25 +89,25 @@ describe('MySegmentsUpdateWorker', () => { // to validate the special case than the fetch associated to the first event is resolved after a second event with payload arrives mySegmentsSyncTask.execute.mockClear(); expect(mySegmentsSyncTask.isExecuting()).toBe(false); - mySegmentUpdateWorker.put({ cn: 110 }); + mySegmentUpdateWorker.put({ type: MEMBERSHIP_MS_UPDATE, cn: 110 }); expect(mySegmentsSyncTask.isExecuting()).toBe(true); - mySegmentUpdateWorker.put({ cn: 120 }, { removed: ['some_segment'] }); + mySegmentUpdateWorker.put({ type: MEMBERSHIP_MS_UPDATE, cn: 120 }, { added: [], removed: ['some_segment'] }); expect(mySegmentsSyncTask.execute).toBeCalledTimes(1); // doesn't synchronize MySegments if `isExecuting` is true, even if payload (segmentList) is included expect(mySegmentsSyncTask.execute).toHaveBeenLastCalledWith(undefined, true, undefined); mySegmentsSyncTask.__resolveMySegmentsUpdaterCall(); // fetch success await new Promise(res => setTimeout(res, 10)); expect(mySegmentsSyncTask.execute).toBeCalledTimes(2); // re-synchronizes MySegments once previous event was handled - expect(mySegmentsSyncTask.execute).toHaveBeenLastCalledWith({ cn: 120, removed: ['some_segment'] }, true, undefined); // synchronizes MySegments with given segmentList + expect(mySegmentsSyncTask.execute).toHaveBeenLastCalledWith({ type: MEMBERSHIP_MS_UPDATE, cn: 120, added: [], removed: ['some_segment'] }, true, undefined); // synchronizes MySegments with given segmentList mySegmentsSyncTask.__resolveMySegmentsUpdaterCall(); // fetch success await new Promise(res => setTimeout(res, 10)); // assert handling an event without segmentList after one with segmentList mySegmentsSyncTask.execute.mockClear(); - mySegmentUpdateWorker.put({ cn: 130 }, { removed: ['other_segment'] }); - mySegmentUpdateWorker.put({ cn: 140 }); + mySegmentUpdateWorker.put({ type: MEMBERSHIP_MS_UPDATE, cn: 130 }, { added: [], removed: ['other_segment'] }); + mySegmentUpdateWorker.put({ type: MEMBERSHIP_MS_UPDATE, cn: 140 }); expect(mySegmentsSyncTask.execute).toBeCalledTimes(1); // synchronizes MySegments once, until event is handled - expect(mySegmentsSyncTask.execute).toHaveBeenLastCalledWith({ cn: 130, removed: ['other_segment'] }, true, undefined); + expect(mySegmentsSyncTask.execute).toHaveBeenLastCalledWith({ type: MEMBERSHIP_MS_UPDATE, cn: 130, added: [], removed: ['other_segment'] }, true, undefined); mySegmentsSyncTask.__resolveMySegmentsUpdaterCall(); // fetch success await new Promise(res => setTimeout(res)); @@ -115,33 +122,31 @@ describe('MySegmentsUpdateWorker', () => { test('put, backoff', async () => { // setup Backoff.__TEST__BASE_MILLIS = 50; - const mySegmentsCache = new MySegmentsCacheInMemory(); const mySegmentsSyncTask = mySegmentsSyncTaskMock([false, false, false]); // fetch fail - const mySegmentUpdateWorker = MySegmentsUpdateWorker(loggerMock, mySegmentsCache, mySegmentsSyncTask, telemetryTracker); + const mySegmentUpdateWorker = MySegmentsUpdateWorker(loggerMock, createStorage(), mySegmentsSyncTask as any, telemetryTracker); // while fetch fails, should retry with backoff - mySegmentUpdateWorker.put({ cn: 100 }); - await new Promise(res => setTimeout(res, Backoff.__TEST__BASE_MILLIS * 3 + 100 /* some delay */)); + mySegmentUpdateWorker.put({ type: MEMBERSHIP_MS_UPDATE, cn: 100 }); + await new Promise(res => setTimeout(res, Backoff.__TEST__BASE_MILLIS! * 3 + 100 /* some delay */)); expect(mySegmentsSyncTask.execute).toBeCalledTimes(3); // if backoff is scheduled and a new event is queued, it must be handled immediately - mySegmentUpdateWorker.put({ cn: 105 }); + mySegmentUpdateWorker.put({ type: MEMBERSHIP_MS_UPDATE, cn: 105 }); expect(mySegmentsSyncTask.execute).toBeCalledTimes(4); }); test('stop', async () => { // setup - const mySegmentsCache = new MySegmentsCacheInMemory(); const mySegmentsSyncTask = mySegmentsSyncTaskMock([false]); - const mySegmentUpdateWorker = MySegmentsUpdateWorker(loggerMock, mySegmentsCache, mySegmentsSyncTask, telemetryTracker); + const mySegmentUpdateWorker = MySegmentsUpdateWorker(loggerMock, createStorage(), mySegmentsSyncTask as any, telemetryTracker); - mySegmentUpdateWorker.put({ cn: 100 }); + mySegmentUpdateWorker.put({ type: MEMBERSHIP_LS_UPDATE, cn: 100 }); mySegmentUpdateWorker.stop(); await new Promise(res => setTimeout(res, 20)); // Wait to assert no more calls to `execute` after stopping expect(mySegmentsSyncTask.execute).toBeCalledTimes(1); - mySegmentUpdateWorker.put({ cn: 150 }, undefined, 10); + mySegmentUpdateWorker.put({ type: MEMBERSHIP_LS_UPDATE, cn: 150 }, undefined, 10); mySegmentUpdateWorker.stop(); await new Promise(res => setTimeout(res, 20)); // Wait to assert no calls to `execute` after stopping (fetch request with delay is cleared) @@ -150,13 +155,12 @@ describe('MySegmentsUpdateWorker', () => { test('put with delay', async () => { // setup - const mySegmentsCache = new MySegmentsCacheInMemory(); const mySegmentsSyncTask = mySegmentsSyncTaskMock(); - const mySegmentUpdateWorker = MySegmentsUpdateWorker(loggerMock, mySegmentsCache, mySegmentsSyncTask, telemetryTracker); + const mySegmentUpdateWorker = MySegmentsUpdateWorker(loggerMock, createStorage(), mySegmentsSyncTask as any, telemetryTracker); // If a delayed fetch request is queued while another fetch request is waiting, it is discarded - mySegmentUpdateWorker.put({ cn: 100 }, undefined, 50); - mySegmentUpdateWorker.put({ cn: 150 }, undefined, 100); + mySegmentUpdateWorker.put({ type: MEMBERSHIP_LS_UPDATE, cn: 100 }, undefined, 50); + mySegmentUpdateWorker.put({ type: MEMBERSHIP_LS_UPDATE, cn: 150 }, undefined, 100); await new Promise(res => setTimeout(res, 60)); expect(mySegmentsSyncTask.execute).toBeCalledTimes(1); @@ -167,9 +171,9 @@ describe('MySegmentsUpdateWorker', () => { expect(mySegmentsSyncTask.execute).toBeCalledTimes(1); // If an event with segmentData (i.e., an instant update) is queued while a delayed fetch request is waiting, the instant update is discarded - mySegmentUpdateWorker.put({ cn: 200 }, undefined, 50); + mySegmentUpdateWorker.put({ type: MEMBERSHIP_LS_UPDATE, cn: 200 }, undefined, 50); await new Promise(res => setTimeout(res, 10)); - mySegmentUpdateWorker.put({ cn: 230 }, { added: ['some_segment'] }); + mySegmentUpdateWorker.put({ type: MEMBERSHIP_LS_UPDATE, cn: 230 }, { added: ['some_segment'], removed: [] }); await new Promise(res => setTimeout(res, 60)); expect(mySegmentsSyncTask.execute).toBeCalledTimes(2); @@ -177,8 +181,8 @@ describe('MySegmentsUpdateWorker', () => { mySegmentsSyncTask.__resolveMySegmentsUpdaterCall(); // fetch success await new Promise(res => setTimeout(res)); - mySegmentUpdateWorker.put({ cn: 250 }, { added: ['some_segment'] }); + mySegmentUpdateWorker.put({ type: MEMBERSHIP_LS_UPDATE, cn: 250 }, { added: ['some_segment'], removed: [] }); expect(mySegmentsSyncTask.execute).toBeCalledTimes(3); - expect(mySegmentsSyncTask.execute).toHaveBeenLastCalledWith({ cn: 250, added: ['some_segment'] }, true, undefined); + expect(mySegmentsSyncTask.execute).toHaveBeenLastCalledWith({ type: MEMBERSHIP_LS_UPDATE, cn: 250, added: ['some_segment'], removed: [] }, true, undefined); }); }); diff --git a/src/sync/streaming/pushManager.ts b/src/sync/streaming/pushManager.ts index 90916f7d..ecac1509 100644 --- a/src/sync/streaming/pushManager.ts +++ b/src/sync/streaming/pushManager.ts @@ -63,7 +63,7 @@ export function pushManagerFactory( const userKeyHashes: Record = {}; // [Only for client-side] map of user keys to their corresponding hash64 and MySegmentsUpdateWorkers. // Hash64 is used to process membership update events and dispatch actions to the corresponding MySegmentsUpdateWorker. - const clients: Record, workerLarge: ReturnType }> = {}; + const clients: Record }> = {}; // [Only for client-side] variable to flag that a new client was added. It is needed to reconnect streaming. let connectForNewClient = false; @@ -170,10 +170,7 @@ export function pushManagerFactory( // cancel scheduled fetch retries of Splits, Segments, and MySegments Update Workers function stopWorkers() { splitsUpdateWorker.stop(); - if (userKey) forOwn(clients, ({ worker, workerLarge }) => { - worker.stop(); - workerLarge.stop(); - }); + if (userKey) forOwn(clients, ({ worker }) => worker.stop()); else segmentsUpdateWorker!.stop(); } @@ -239,8 +236,6 @@ export function pushManagerFactory( }); function handleMySegmentsUpdate(parsedData: IMembershipMSUpdateData | IMembershipLSUpdateData) { - const isLS = parsedData.type === MEMBERSHIP_LS_UPDATE; - switch (parsedData.u) { case UpdateStrategy.BoundedFetchRequest: { let bitmap: Uint8Array; @@ -251,9 +246,9 @@ export function pushManagerFactory( break; } - forOwn(clients, ({ hash64, worker, workerLarge }, matchingKey) => { + forOwn(clients, ({ hash64, worker }, matchingKey) => { if (isInBitmap(bitmap, hash64.hex)) { - (isLS ? workerLarge : worker).put(parsedData, undefined, getDelay(parsedData, matchingKey)); + worker.put(parsedData, undefined, getDelay(parsedData, matchingKey)); } }); return; @@ -274,10 +269,10 @@ export function pushManagerFactory( break; } - forOwn(clients, ({ hash64, worker, workerLarge }) => { + forOwn(clients, ({ hash64, worker }) => { const add = added.has(hash64.dec) ? true : removed.has(hash64.dec) ? false : undefined; if (add !== undefined) { - (isLS ? workerLarge : worker).put(parsedData, { + worker.put(parsedData, { added: add ? [parsedData.n![0]] : [], removed: add ? [] : [parsedData.n![0]] }); @@ -291,8 +286,8 @@ export function pushManagerFactory( break; } - forOwn(clients, ({ worker, workerLarge }) => { - (isLS ? workerLarge : worker).put(parsedData, { + forOwn(clients, ({ worker }) => { + worker.put(parsedData, { added: [], removed: parsedData.n! }); @@ -301,8 +296,8 @@ export function pushManagerFactory( } // `UpdateStrategy.UnboundedFetchRequest` and fallbacks of other cases - forOwn(clients, ({ worker, workerLarge }, matchingKey) => { - (isLS ? workerLarge : worker).put(parsedData, undefined, getDelay(parsedData, matchingKey)); + forOwn(clients, ({ worker }, matchingKey) => { + worker.put(parsedData, undefined, getDelay(parsedData, matchingKey)); }); } @@ -348,8 +343,7 @@ export function pushManagerFactory( userKeyHashes[hash] = userKey; clients[userKey] = { hash64: hash64(userKey), - worker: MySegmentsUpdateWorker(log, storage.segments, mySegmentsSyncTask, telemetryTracker), - workerLarge: MySegmentsUpdateWorker(log, storage.largeSegments!, mySegmentsSyncTask, telemetryTracker) + worker: MySegmentsUpdateWorker(log, storage, mySegmentsSyncTask, telemetryTracker) }; connectForNewClient = true; // we must reconnect on start, to listen the channel for the new user key From eae7b3e16bf113b8df846bdc123aae95de6bfe5d Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Tue, 3 Sep 2024 15:29:14 -0300 Subject: [PATCH 065/146] Refactor notification types --- ..._UPDATE.SEGMENT_REMOVAL.1457552653000.json | 4 ++ ...IPS_LS_UPDATE.UNBOUNDED.1457552650000.json | 4 ++ ..._MS_UPDATE.BOUNDED.GZIP.1457552651000.json | 4 ++ ..._MS_UPDATE.KEYLIST.GZIP.1457552652000.json | 4 ++ ..._UPDATE.SEGMENT_REMOVAL.1457552653000.json | 4 ++ ...IPS_MS_UPDATE.UNBOUNDED.1457552650000.json | 4 ++ ..._UPDATE.SEGMENT_REMOVAL.1457552653000.json | 4 -- ...HIP_LS_UPDATE.UNBOUNDED.1457552650000.json | 4 -- ..._MS_UPDATE.BOUNDED.GZIP.1457552651000.json | 4 -- ..._MS_UPDATE.KEYLIST.GZIP.1457552652000.json | 4 -- ..._UPDATE.SEGMENT_REMOVAL.1457552653000.json | 4 -- ...HIP_MS_UPDATE.UNBOUNDED.1457552650000.json | 4 -- src/logger/constants.ts | 2 +- src/logger/messages/warn.ts | 2 +- src/sync/polling/types.ts | 4 +- .../polling/updaters/mySegmentsUpdater.ts | 4 +- .../SSEHandler/__tests__/index.spec.ts | 42 +++++++++---------- src/sync/streaming/SSEHandler/index.ts | 6 +-- src/sync/streaming/SSEHandler/types.ts | 6 +-- src/sync/streaming/constants.ts | 4 +- src/sync/streaming/pushManager.ts | 18 ++++---- src/sync/streaming/types.ts | 10 ++--- 22 files changed, 73 insertions(+), 73 deletions(-) create mode 100644 src/__tests__/mocks/message.MEMBERSHIPS_LS_UPDATE.SEGMENT_REMOVAL.1457552653000.json create mode 100644 src/__tests__/mocks/message.MEMBERSHIPS_LS_UPDATE.UNBOUNDED.1457552650000.json create mode 100644 src/__tests__/mocks/message.MEMBERSHIPS_MS_UPDATE.BOUNDED.GZIP.1457552651000.json create mode 100644 src/__tests__/mocks/message.MEMBERSHIPS_MS_UPDATE.KEYLIST.GZIP.1457552652000.json create mode 100644 src/__tests__/mocks/message.MEMBERSHIPS_MS_UPDATE.SEGMENT_REMOVAL.1457552653000.json create mode 100644 src/__tests__/mocks/message.MEMBERSHIPS_MS_UPDATE.UNBOUNDED.1457552650000.json delete mode 100644 src/__tests__/mocks/message.MEMBERSHIP_LS_UPDATE.SEGMENT_REMOVAL.1457552653000.json delete mode 100644 src/__tests__/mocks/message.MEMBERSHIP_LS_UPDATE.UNBOUNDED.1457552650000.json delete mode 100644 src/__tests__/mocks/message.MEMBERSHIP_MS_UPDATE.BOUNDED.GZIP.1457552651000.json delete mode 100644 src/__tests__/mocks/message.MEMBERSHIP_MS_UPDATE.KEYLIST.GZIP.1457552652000.json delete mode 100644 src/__tests__/mocks/message.MEMBERSHIP_MS_UPDATE.SEGMENT_REMOVAL.1457552653000.json delete mode 100644 src/__tests__/mocks/message.MEMBERSHIP_MS_UPDATE.UNBOUNDED.1457552650000.json diff --git a/src/__tests__/mocks/message.MEMBERSHIPS_LS_UPDATE.SEGMENT_REMOVAL.1457552653000.json b/src/__tests__/mocks/message.MEMBERSHIPS_LS_UPDATE.SEGMENT_REMOVAL.1457552653000.json new file mode 100644 index 00000000..5a59b352 --- /dev/null +++ b/src/__tests__/mocks/message.MEMBERSHIPS_LS_UPDATE.SEGMENT_REMOVAL.1457552653000.json @@ -0,0 +1,4 @@ +{ + "type": "message", + "data": "{\"data\":\"{\\\"type\\\":\\\"MEMBERSHIPS_LS_UPDATE\\\",\\\"cn\\\":1457552653000,\\\"l\\\":[\\\"employees\\\"],\\\"c\\\": 0,\\\"u\\\": 3,\\\"d\\\":\\\"\\\"}\"}" +} \ No newline at end of file diff --git a/src/__tests__/mocks/message.MEMBERSHIPS_LS_UPDATE.UNBOUNDED.1457552650000.json b/src/__tests__/mocks/message.MEMBERSHIPS_LS_UPDATE.UNBOUNDED.1457552650000.json new file mode 100644 index 00000000..e9b07c7a --- /dev/null +++ b/src/__tests__/mocks/message.MEMBERSHIPS_LS_UPDATE.UNBOUNDED.1457552650000.json @@ -0,0 +1,4 @@ +{ + "type": "message", + "data": "{\"data\":\"{\\\"type\\\":\\\"MEMBERSHIPS_LS_UPDATE\\\",\\\"cn\\\":1457552650000,\\\"l\\\":[],\\\"c\\\": 0,\\\"u\\\": 0,\\\"d\\\":\\\"\\\",\\\"i\\\":300,\\\"h\\\":1,\\\"s\\\":0}\"}" +} \ No newline at end of file diff --git a/src/__tests__/mocks/message.MEMBERSHIPS_MS_UPDATE.BOUNDED.GZIP.1457552651000.json b/src/__tests__/mocks/message.MEMBERSHIPS_MS_UPDATE.BOUNDED.GZIP.1457552651000.json new file mode 100644 index 00000000..cd1a0736 --- /dev/null +++ b/src/__tests__/mocks/message.MEMBERSHIPS_MS_UPDATE.BOUNDED.GZIP.1457552651000.json @@ -0,0 +1,4 @@ +{ + "type": "message", + "data": "{\"data\":\"{\\\"type\\\":\\\"MEMBERSHIPS_MS_UPDATE\\\",\\\"cn\\\":1457552651000,\\\"l\\\":[],\\\"c\\\": 1,\\\"u\\\": 1,\\\"d\\\":\\\"H4sIAAAAAAAA/2JABxzYeIxQLguYFIBLN8Bl4EABjc+EzOnAsA4QAAD//8YBvWeAAAAA\\\"}\"}" +} \ No newline at end of file diff --git a/src/__tests__/mocks/message.MEMBERSHIPS_MS_UPDATE.KEYLIST.GZIP.1457552652000.json b/src/__tests__/mocks/message.MEMBERSHIPS_MS_UPDATE.KEYLIST.GZIP.1457552652000.json new file mode 100644 index 00000000..7f553dad --- /dev/null +++ b/src/__tests__/mocks/message.MEMBERSHIPS_MS_UPDATE.KEYLIST.GZIP.1457552652000.json @@ -0,0 +1,4 @@ +{ + "type": "message", + "data": "{\"data\":\"{\\\"type\\\":\\\"MEMBERSHIPS_MS_UPDATE\\\",\\\"cn\\\":1457552652000,\\\"l\\\":[\\\"splitters\\\"],\\\"c\\\": 1,\\\"u\\\": 2,\\\"d\\\":\\\"H4sIAAAAAAAA/wTAsRHDUAgD0F2ofwEIkPAqPhdZIW0uu/v97GPXHU004ULuMGrYR6XUbIjlXULPPse+dt1yhJibBODjrTmj3GJ4emduuDDP/w0AAP//18WLsl0AAAA=\\\"}\"}" +} \ No newline at end of file diff --git a/src/__tests__/mocks/message.MEMBERSHIPS_MS_UPDATE.SEGMENT_REMOVAL.1457552653000.json b/src/__tests__/mocks/message.MEMBERSHIPS_MS_UPDATE.SEGMENT_REMOVAL.1457552653000.json new file mode 100644 index 00000000..d13e0f53 --- /dev/null +++ b/src/__tests__/mocks/message.MEMBERSHIPS_MS_UPDATE.SEGMENT_REMOVAL.1457552653000.json @@ -0,0 +1,4 @@ +{ + "type": "message", + "data": "{\"data\":\"{\\\"type\\\":\\\"MEMBERSHIPS_MS_UPDATE\\\",\\\"cn\\\":1457552653000,\\\"l\\\":[\\\"splitters\\\"],\\\"c\\\": 0,\\\"u\\\": 3,\\\"d\\\":\\\"\\\"}\"}" +} \ No newline at end of file diff --git a/src/__tests__/mocks/message.MEMBERSHIPS_MS_UPDATE.UNBOUNDED.1457552650000.json b/src/__tests__/mocks/message.MEMBERSHIPS_MS_UPDATE.UNBOUNDED.1457552650000.json new file mode 100644 index 00000000..4505b73e --- /dev/null +++ b/src/__tests__/mocks/message.MEMBERSHIPS_MS_UPDATE.UNBOUNDED.1457552650000.json @@ -0,0 +1,4 @@ +{ + "type": "message", + "data": "{\"data\":\"{\\\"type\\\":\\\"MEMBERSHIPS_MS_UPDATE\\\",\\\"cn\\\":1457552650000,\\\"l\\\":[],\\\"c\\\": 0,\\\"u\\\": 0,\\\"d\\\":\\\"\\\"}\"}" +} \ No newline at end of file diff --git a/src/__tests__/mocks/message.MEMBERSHIP_LS_UPDATE.SEGMENT_REMOVAL.1457552653000.json b/src/__tests__/mocks/message.MEMBERSHIP_LS_UPDATE.SEGMENT_REMOVAL.1457552653000.json deleted file mode 100644 index b2ebeb21..00000000 --- a/src/__tests__/mocks/message.MEMBERSHIP_LS_UPDATE.SEGMENT_REMOVAL.1457552653000.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "type": "message", - "data": "{\"data\":\"{\\\"type\\\":\\\"MEMBERSHIP_LS_UPDATE\\\",\\\"cn\\\":1457552653000,\\\"l\\\":[\\\"employees\\\"],\\\"c\\\": 0,\\\"u\\\": 3,\\\"d\\\":\\\"\\\"}\"}" -} \ No newline at end of file diff --git a/src/__tests__/mocks/message.MEMBERSHIP_LS_UPDATE.UNBOUNDED.1457552650000.json b/src/__tests__/mocks/message.MEMBERSHIP_LS_UPDATE.UNBOUNDED.1457552650000.json deleted file mode 100644 index f5e29c7f..00000000 --- a/src/__tests__/mocks/message.MEMBERSHIP_LS_UPDATE.UNBOUNDED.1457552650000.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "type": "message", - "data": "{\"data\":\"{\\\"type\\\":\\\"MEMBERSHIP_LS_UPDATE\\\",\\\"cn\\\":1457552650000,\\\"l\\\":[],\\\"c\\\": 0,\\\"u\\\": 0,\\\"d\\\":\\\"\\\",\\\"i\\\":300,\\\"h\\\":1,\\\"s\\\":0}\"}" -} \ No newline at end of file diff --git a/src/__tests__/mocks/message.MEMBERSHIP_MS_UPDATE.BOUNDED.GZIP.1457552651000.json b/src/__tests__/mocks/message.MEMBERSHIP_MS_UPDATE.BOUNDED.GZIP.1457552651000.json deleted file mode 100644 index a2036733..00000000 --- a/src/__tests__/mocks/message.MEMBERSHIP_MS_UPDATE.BOUNDED.GZIP.1457552651000.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "type": "message", - "data": "{\"data\":\"{\\\"type\\\":\\\"MEMBERSHIP_MS_UPDATE\\\",\\\"cn\\\":1457552651000,\\\"l\\\":[],\\\"c\\\": 1,\\\"u\\\": 1,\\\"d\\\":\\\"H4sIAAAAAAAA/2JABxzYeIxQLguYFIBLN8Bl4EABjc+EzOnAsA4QAAD//8YBvWeAAAAA\\\"}\"}" -} \ No newline at end of file diff --git a/src/__tests__/mocks/message.MEMBERSHIP_MS_UPDATE.KEYLIST.GZIP.1457552652000.json b/src/__tests__/mocks/message.MEMBERSHIP_MS_UPDATE.KEYLIST.GZIP.1457552652000.json deleted file mode 100644 index 751d0314..00000000 --- a/src/__tests__/mocks/message.MEMBERSHIP_MS_UPDATE.KEYLIST.GZIP.1457552652000.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "type": "message", - "data": "{\"data\":\"{\\\"type\\\":\\\"MEMBERSHIP_MS_UPDATE\\\",\\\"cn\\\":1457552652000,\\\"l\\\":[\\\"splitters\\\"],\\\"c\\\": 1,\\\"u\\\": 2,\\\"d\\\":\\\"H4sIAAAAAAAA/wTAsRHDUAgD0F2ofwEIkPAqPhdZIW0uu/v97GPXHU004ULuMGrYR6XUbIjlXULPPse+dt1yhJibBODjrTmj3GJ4emduuDDP/w0AAP//18WLsl0AAAA=\\\"}\"}" -} \ No newline at end of file diff --git a/src/__tests__/mocks/message.MEMBERSHIP_MS_UPDATE.SEGMENT_REMOVAL.1457552653000.json b/src/__tests__/mocks/message.MEMBERSHIP_MS_UPDATE.SEGMENT_REMOVAL.1457552653000.json deleted file mode 100644 index fa136a72..00000000 --- a/src/__tests__/mocks/message.MEMBERSHIP_MS_UPDATE.SEGMENT_REMOVAL.1457552653000.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "type": "message", - "data": "{\"data\":\"{\\\"type\\\":\\\"MEMBERSHIP_MS_UPDATE\\\",\\\"cn\\\":1457552653000,\\\"l\\\":[\\\"splitters\\\"],\\\"c\\\": 0,\\\"u\\\": 3,\\\"d\\\":\\\"\\\"}\"}" -} \ No newline at end of file diff --git a/src/__tests__/mocks/message.MEMBERSHIP_MS_UPDATE.UNBOUNDED.1457552650000.json b/src/__tests__/mocks/message.MEMBERSHIP_MS_UPDATE.UNBOUNDED.1457552650000.json deleted file mode 100644 index 81d41475..00000000 --- a/src/__tests__/mocks/message.MEMBERSHIP_MS_UPDATE.UNBOUNDED.1457552650000.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "type": "message", - "data": "{\"data\":\"{\\\"type\\\":\\\"MEMBERSHIP_MS_UPDATE\\\",\\\"cn\\\":1457552650000,\\\"l\\\":[],\\\"c\\\": 0,\\\"u\\\": 0,\\\"d\\\":\\\"\\\"}\"}" -} \ No newline at end of file diff --git a/src/logger/constants.ts b/src/logger/constants.ts index 3ffa711a..293def7e 100644 --- a/src/logger/constants.ts +++ b/src/logger/constants.ts @@ -79,7 +79,7 @@ export const WARN_SPLITS_FILTER_IGNORED = 219; export const WARN_SPLITS_FILTER_INVALID = 220; export const WARN_SPLITS_FILTER_EMPTY = 221; export const WARN_SDK_KEY = 222; -export const STREAMING_PARSING_MEMBERSHIP_UPDATE = 223; +export const STREAMING_PARSING_MEMBERSHIPS_UPDATE = 223; export const STREAMING_PARSING_SPLIT_UPDATE = 224; export const WARN_INVALID_FLAGSET = 225; export const WARN_LOWERCASE_FLAGSET = 226; diff --git a/src/logger/messages/warn.ts b/src/logger/messages/warn.ts index f0940901..52487f95 100644 --- a/src/logger/messages/warn.ts +++ b/src/logger/messages/warn.ts @@ -32,7 +32,7 @@ export const codesWarn: [number, string][] = codesError.concat([ [c.WARN_SPLITS_FILTER_EMPTY, c.LOG_PREFIX_SETTINGS + ': feature flag filter configuration must be a non-empty array of filter objects.'], [c.WARN_SDK_KEY, c.LOG_PREFIX_SETTINGS + ': You already have %s. We recommend keeping only one instance of the factory at all times (Singleton pattern) and reusing it throughout your application'], - [c.STREAMING_PARSING_MEMBERSHIP_UPDATE, c.LOG_PREFIX_SYNC_STREAMING + 'Fetching Memberships due to an error processing %s notification: %s'], + [c.STREAMING_PARSING_MEMBERSHIPS_UPDATE, c.LOG_PREFIX_SYNC_STREAMING + 'Fetching Memberships due to an error processing %s notification: %s'], [c.STREAMING_PARSING_SPLIT_UPDATE, c.LOG_PREFIX_SYNC_STREAMING + 'Fetching SplitChanges due to an error processing SPLIT_UPDATE notification: %s'], [c.WARN_INVALID_FLAGSET, '%s: you passed %s, flag set must adhere to the regular expressions %s. This means a flag set must start with a letter or number, be in lowercase, alphanumeric and have a max length of 50 characters. %s was discarded.'], [c.WARN_LOWERCASE_FLAGSET, '%s: flag set %s should be all lowercase - converting string to lowercase.'], diff --git a/src/sync/polling/types.ts b/src/sync/polling/types.ts index 6611a5a8..f7d1c842 100644 --- a/src/sync/polling/types.ts +++ b/src/sync/polling/types.ts @@ -1,7 +1,7 @@ import { ISplit } from '../../dtos/types'; import { IReadinessManager } from '../../readiness/types'; import { IStorageSync } from '../../storages/types'; -import { MEMBERSHIP_LS_UPDATE, MEMBERSHIP_MS_UPDATE } from '../streaming/types'; +import { MEMBERSHIPS_LS_UPDATE, MEMBERSHIPS_MS_UPDATE } from '../streaming/types'; import { ITask, ISyncTask } from '../types'; export interface ISplitsSyncTask extends ISyncTask<[noCache?: boolean, till?: number, splitUpdateNotification?: { payload: ISplit, changeNumber: number }], boolean> { } @@ -9,7 +9,7 @@ export interface ISplitsSyncTask extends ISyncTask<[noCache?: boolean, till?: nu export interface ISegmentsSyncTask extends ISyncTask<[fetchOnlyNew?: boolean, segmentName?: string, noCache?: boolean, till?: number], boolean> { } export type MySegmentsData = { - type: MEMBERSHIP_MS_UPDATE | MEMBERSHIP_LS_UPDATE + type: MEMBERSHIPS_MS_UPDATE | MEMBERSHIPS_LS_UPDATE cn: number added: string[] removed: string[] diff --git a/src/sync/polling/updaters/mySegmentsUpdater.ts b/src/sync/polling/updaters/mySegmentsUpdater.ts index f315e5f7..176fd6ee 100644 --- a/src/sync/polling/updaters/mySegmentsUpdater.ts +++ b/src/sync/polling/updaters/mySegmentsUpdater.ts @@ -7,7 +7,7 @@ import { ILogger } from '../../../logger/types'; import { SYNC_MYSEGMENTS_FETCH_RETRY } from '../../../logger/constants'; import { MySegmentsData } from '../types'; import { IMembershipsResponse } from '../../../dtos/types'; -import { MEMBERSHIP_LS_UPDATE } from '../../streaming/constants'; +import { MEMBERSHIPS_LS_UPDATE } from '../../streaming/constants'; type IMySegmentsUpdater = (segmentList?: MySegmentsData, noCache?: boolean) => Promise @@ -42,7 +42,7 @@ export function mySegmentsUpdaterFactory( let shouldNotifyUpdate; if ((segmentsData as MySegmentsData).type !== undefined) { - shouldNotifyUpdate = (segmentsData as MySegmentsData).type === MEMBERSHIP_LS_UPDATE ? + shouldNotifyUpdate = (segmentsData as MySegmentsData).type === MEMBERSHIPS_LS_UPDATE ? largeSegments!.resetSegments(segmentsData as MySegmentsData) : segments.resetSegments(segmentsData as MySegmentsData); } else { diff --git a/src/sync/streaming/SSEHandler/__tests__/index.spec.ts b/src/sync/streaming/SSEHandler/__tests__/index.spec.ts index ea3eac8e..e85b22d8 100644 --- a/src/sync/streaming/SSEHandler/__tests__/index.spec.ts +++ b/src/sync/streaming/SSEHandler/__tests__/index.spec.ts @@ -1,6 +1,6 @@ // @ts-nocheck import { SSEHandlerFactory } from '..'; -import { PUSH_SUBSYSTEM_UP, PUSH_NONRETRYABLE_ERROR, PUSH_SUBSYSTEM_DOWN, PUSH_RETRYABLE_ERROR, SEGMENT_UPDATE, SPLIT_KILL, SPLIT_UPDATE, MEMBERSHIP_MS_UPDATE, MEMBERSHIP_LS_UPDATE, ControlType } from '../../constants'; +import { PUSH_SUBSYSTEM_UP, PUSH_NONRETRYABLE_ERROR, PUSH_SUBSYSTEM_DOWN, PUSH_RETRYABLE_ERROR, SEGMENT_UPDATE, SPLIT_KILL, SPLIT_UPDATE, MEMBERSHIPS_MS_UPDATE, MEMBERSHIPS_LS_UPDATE, ControlType } from '../../constants'; import { loggerMock } from '../../../../logger/__tests__/sdkLogger.mock'; // update messages @@ -8,16 +8,16 @@ import splitUpdateMessage from '../../../../__tests__/mocks/message.SPLIT_UPDATE import splitKillMessage from '../../../../__tests__/mocks/message.SPLIT_KILL.1457552650000.json'; import segmentUpdateMessage from '../../../../__tests__/mocks/message.SEGMENT_UPDATE.1457552640000.json'; -// update messages MEMBERSHIP_MS_UPDATE -import unboundedMessage from '../../../../__tests__/mocks/message.MEMBERSHIP_MS_UPDATE.UNBOUNDED.1457552650000.json'; -import boundedGzipMessage from '../../../../__tests__/mocks/message.MEMBERSHIP_MS_UPDATE.BOUNDED.GZIP.1457552651000.json'; -import keylistGzipMessage from '../../../../__tests__/mocks/message.MEMBERSHIP_MS_UPDATE.KEYLIST.GZIP.1457552652000.json'; -import segmentRemovalMessage from '../../../../__tests__/mocks/message.MEMBERSHIP_MS_UPDATE.SEGMENT_REMOVAL.1457552653000.json'; +// update messages MEMBERSHIPS_MS_UPDATE +import unboundedMessage from '../../../../__tests__/mocks/message.MEMBERSHIPS_MS_UPDATE.UNBOUNDED.1457552650000.json'; +import boundedGzipMessage from '../../../../__tests__/mocks/message.MEMBERSHIPS_MS_UPDATE.BOUNDED.GZIP.1457552651000.json'; +import keylistGzipMessage from '../../../../__tests__/mocks/message.MEMBERSHIPS_MS_UPDATE.KEYLIST.GZIP.1457552652000.json'; +import segmentRemovalMessage from '../../../../__tests__/mocks/message.MEMBERSHIPS_MS_UPDATE.SEGMENT_REMOVAL.1457552653000.json'; import { keylists, bitmaps } from '../../__tests__/dataMocks'; -// update messages MEMBERSHIP_LS_UPDATE -import largeSegmentUnboundedMessage from '../../../../__tests__/mocks/message.MEMBERSHIP_LS_UPDATE.UNBOUNDED.1457552650000.json'; -import largeSegmentRemovalMessage from '../../../../__tests__/mocks/message.MEMBERSHIP_LS_UPDATE.SEGMENT_REMOVAL.1457552653000.json'; +// update messages MEMBERSHIPS_LS_UPDATE +import largeSegmentUnboundedMessage from '../../../../__tests__/mocks/message.MEMBERSHIPS_LS_UPDATE.UNBOUNDED.1457552650000.json'; +import largeSegmentRemovalMessage from '../../../../__tests__/mocks/message.MEMBERSHIPS_LS_UPDATE.SEGMENT_REMOVAL.1457552653000.json'; // occupancy messages import occupancy1ControlPri from '../../../../__tests__/mocks/message.OCCUPANCY.1.control_pri.1586987434450.json'; @@ -152,29 +152,29 @@ test('`handlerMessage` for update notifications (NotificationProcessor) and stre sseHandler.handleMessage(segmentUpdateMessage); expect(pushEmitter.emit).toHaveBeenLastCalledWith(SEGMENT_UPDATE, ...expectedParams); // must emit SEGMENT_UPDATE with the message change number and segment name - expectedParams = [{ type: 'MEMBERSHIP_MS_UPDATE', cn: 1457552650000, c: 0, d: '', u: 0, l: [] }]; + expectedParams = [{ type: 'MEMBERSHIPS_MS_UPDATE', cn: 1457552650000, c: 0, d: '', u: 0, l: [] }]; sseHandler.handleMessage(unboundedMessage); - expect(pushEmitter.emit).toHaveBeenLastCalledWith(MEMBERSHIP_MS_UPDATE, ...expectedParams); // must emit MEMBERSHIP_MS_UPDATE with the message parsed data + expect(pushEmitter.emit).toHaveBeenLastCalledWith(MEMBERSHIPS_MS_UPDATE, ...expectedParams); // must emit MEMBERSHIPS_MS_UPDATE with the message parsed data - expectedParams = [{ type: 'MEMBERSHIP_MS_UPDATE', cn: 1457552651000, c: 1, d: bitmaps[0].bitmapDataCompressed, u: 1, l: [] }]; + expectedParams = [{ type: 'MEMBERSHIPS_MS_UPDATE', cn: 1457552651000, c: 1, d: bitmaps[0].bitmapDataCompressed, u: 1, l: [] }]; sseHandler.handleMessage(boundedGzipMessage); - expect(pushEmitter.emit).toHaveBeenLastCalledWith(MEMBERSHIP_MS_UPDATE, ...expectedParams); // must emit MEMBERSHIP_MS_UPDATE with the message parsed data + expect(pushEmitter.emit).toHaveBeenLastCalledWith(MEMBERSHIPS_MS_UPDATE, ...expectedParams); // must emit MEMBERSHIPS_MS_UPDATE with the message parsed data - expectedParams = [{ type: 'MEMBERSHIP_MS_UPDATE', cn: 1457552652000, c: 1, d: keylists[0].keyListDataCompressed, u: 2, l: ['splitters'] }]; + expectedParams = [{ type: 'MEMBERSHIPS_MS_UPDATE', cn: 1457552652000, c: 1, d: keylists[0].keyListDataCompressed, u: 2, l: ['splitters'] }]; sseHandler.handleMessage(keylistGzipMessage); - expect(pushEmitter.emit).toHaveBeenLastCalledWith(MEMBERSHIP_MS_UPDATE, ...expectedParams); // must emit MEMBERSHIP_MS_UPDATE with the message parsed data + expect(pushEmitter.emit).toHaveBeenLastCalledWith(MEMBERSHIPS_MS_UPDATE, ...expectedParams); // must emit MEMBERSHIPS_MS_UPDATE with the message parsed data - expectedParams = [{ type: 'MEMBERSHIP_MS_UPDATE', cn: 1457552653000, c: 0, d: '', u: 3, l: ['splitters'] }]; + expectedParams = [{ type: 'MEMBERSHIPS_MS_UPDATE', cn: 1457552653000, c: 0, d: '', u: 3, l: ['splitters'] }]; sseHandler.handleMessage(segmentRemovalMessage); - expect(pushEmitter.emit).toHaveBeenLastCalledWith(MEMBERSHIP_MS_UPDATE, ...expectedParams); // must emit MEMBERSHIP_MS_UPDATE with the message parsed data + expect(pushEmitter.emit).toHaveBeenLastCalledWith(MEMBERSHIPS_MS_UPDATE, ...expectedParams); // must emit MEMBERSHIPS_MS_UPDATE with the message parsed data - expectedParams = [{ type: 'MEMBERSHIP_LS_UPDATE', cn: 1457552650000, c: 0, d: '', u: 0, l: [], i: 300, h: 1, s: 0 }]; + expectedParams = [{ type: 'MEMBERSHIPS_LS_UPDATE', cn: 1457552650000, c: 0, d: '', u: 0, l: [], i: 300, h: 1, s: 0 }]; sseHandler.handleMessage(largeSegmentUnboundedMessage); - expect(pushEmitter.emit).toHaveBeenLastCalledWith(MEMBERSHIP_LS_UPDATE, ...expectedParams); // must emit MEMBERSHIP_LS_UPDATE with the message parsed data + expect(pushEmitter.emit).toHaveBeenLastCalledWith(MEMBERSHIPS_LS_UPDATE, ...expectedParams); // must emit MEMBERSHIPS_LS_UPDATE with the message parsed data - expectedParams = [{ type: 'MEMBERSHIP_LS_UPDATE', cn: 1457552653000, c: 0, d: '', u: 3, l: ['employees'] }]; + expectedParams = [{ type: 'MEMBERSHIPS_LS_UPDATE', cn: 1457552653000, c: 0, d: '', u: 3, l: ['employees'] }]; sseHandler.handleMessage(largeSegmentRemovalMessage); - expect(pushEmitter.emit).toHaveBeenLastCalledWith(MEMBERSHIP_LS_UPDATE, ...expectedParams); // must emit MEMBERSHIP_LS_UPDATE with the message parsed data + expect(pushEmitter.emit).toHaveBeenLastCalledWith(MEMBERSHIPS_LS_UPDATE, ...expectedParams); // must emit MEMBERSHIPS_LS_UPDATE with the message parsed data sseHandler.handleMessage(streamingReset); expect(pushEmitter.emit).toHaveBeenLastCalledWith(ControlType.STREAMING_RESET); // must emit STREAMING_RESET diff --git a/src/sync/streaming/SSEHandler/index.ts b/src/sync/streaming/SSEHandler/index.ts index ea2d63b8..bbe39d0f 100644 --- a/src/sync/streaming/SSEHandler/index.ts +++ b/src/sync/streaming/SSEHandler/index.ts @@ -1,6 +1,6 @@ import { errorParser, messageParser } from './NotificationParser'; import { notificationKeeperFactory } from './NotificationKeeper'; -import { PUSH_RETRYABLE_ERROR, PUSH_NONRETRYABLE_ERROR, OCCUPANCY, CONTROL, SEGMENT_UPDATE, SPLIT_KILL, SPLIT_UPDATE, MEMBERSHIP_MS_UPDATE, MEMBERSHIP_LS_UPDATE } from '../constants'; +import { PUSH_RETRYABLE_ERROR, PUSH_NONRETRYABLE_ERROR, OCCUPANCY, CONTROL, SEGMENT_UPDATE, SPLIT_KILL, SPLIT_UPDATE, MEMBERSHIPS_MS_UPDATE, MEMBERSHIPS_LS_UPDATE } from '../constants'; import { IPushEventEmitter } from '../types'; import { ISseEventHandler } from '../SSEClient/types'; import { INotificationError, INotificationMessage } from './types'; @@ -81,8 +81,8 @@ export function SSEHandlerFactory(log: ILogger, pushEmitter: IPushEventEmitter, /* update events */ case SPLIT_UPDATE: case SEGMENT_UPDATE: - case MEMBERSHIP_MS_UPDATE: - case MEMBERSHIP_LS_UPDATE: + case MEMBERSHIPS_MS_UPDATE: + case MEMBERSHIPS_LS_UPDATE: case SPLIT_KILL: pushEmitter.emit(parsedData.type, parsedData); break; diff --git a/src/sync/streaming/SSEHandler/types.ts b/src/sync/streaming/SSEHandler/types.ts index 240dd393..192583c3 100644 --- a/src/sync/streaming/SSEHandler/types.ts +++ b/src/sync/streaming/SSEHandler/types.ts @@ -1,5 +1,5 @@ import { ControlType } from '../constants'; -import { SEGMENT_UPDATE, SPLIT_UPDATE, SPLIT_KILL, CONTROL, OCCUPANCY, MEMBERSHIP_LS_UPDATE, MEMBERSHIP_MS_UPDATE } from '../types'; +import { SEGMENT_UPDATE, SPLIT_UPDATE, SPLIT_KILL, CONTROL, OCCUPANCY, MEMBERSHIPS_LS_UPDATE, MEMBERSHIPS_MS_UPDATE } from '../types'; export enum Compression { None = 0, @@ -31,9 +31,9 @@ interface IMembershipUpdateData { s?: number, // seed for hash function } -export interface IMembershipMSUpdateData extends IMembershipUpdateData { } +export interface IMembershipMSUpdateData extends IMembershipUpdateData { } -export interface IMembershipLSUpdateData extends IMembershipUpdateData { } +export interface IMembershipLSUpdateData extends IMembershipUpdateData { } export interface ISegmentUpdateData { type: SEGMENT_UPDATE, diff --git a/src/sync/streaming/constants.ts b/src/sync/streaming/constants.ts index 27eb4b02..ed958ee7 100644 --- a/src/sync/streaming/constants.ts +++ b/src/sync/streaming/constants.ts @@ -25,8 +25,8 @@ export const PUSH_SUBSYSTEM_UP = 'PUSH_SUBSYSTEM_UP'; export const PUSH_SUBSYSTEM_DOWN = 'PUSH_SUBSYSTEM_DOWN'; // Update-type push notifications, handled by NotificationProcessor -export const MEMBERSHIP_MS_UPDATE = 'MEMBERSHIP_MS_UPDATE'; -export const MEMBERSHIP_LS_UPDATE = 'MEMBERSHIP_LS_UPDATE'; +export const MEMBERSHIPS_MS_UPDATE = 'MEMBERSHIPS_MS_UPDATE'; +export const MEMBERSHIPS_LS_UPDATE = 'MEMBERSHIPS_LS_UPDATE'; export const SEGMENT_UPDATE = 'SEGMENT_UPDATE'; export const SPLIT_KILL = 'SPLIT_KILL'; export const SPLIT_UPDATE = 'SPLIT_UPDATE'; diff --git a/src/sync/streaming/pushManager.ts b/src/sync/streaming/pushManager.ts index 9d343fe4..ada637c0 100644 --- a/src/sync/streaming/pushManager.ts +++ b/src/sync/streaming/pushManager.ts @@ -11,8 +11,8 @@ import { authenticateFactory, hashUserKey } from './AuthClient'; import { forOwn } from '../../utils/lang'; import { SSEClient } from './SSEClient'; import { getMatching } from '../../utils/key'; -import { MEMBERSHIP_MS_UPDATE, MEMBERSHIP_LS_UPDATE, PUSH_NONRETRYABLE_ERROR, PUSH_SUBSYSTEM_DOWN, SECONDS_BEFORE_EXPIRATION, SEGMENT_UPDATE, SPLIT_KILL, SPLIT_UPDATE, PUSH_RETRYABLE_ERROR, PUSH_SUBSYSTEM_UP, ControlType } from './constants'; -import { STREAMING_FALLBACK, STREAMING_REFRESH_TOKEN, STREAMING_CONNECTING, STREAMING_DISABLED, ERROR_STREAMING_AUTH, STREAMING_DISCONNECTING, STREAMING_RECONNECT, STREAMING_PARSING_MEMBERSHIP_UPDATE, STREAMING_PARSING_SPLIT_UPDATE } from '../../logger/constants'; +import { MEMBERSHIPS_MS_UPDATE, MEMBERSHIPS_LS_UPDATE, PUSH_NONRETRYABLE_ERROR, PUSH_SUBSYSTEM_DOWN, SECONDS_BEFORE_EXPIRATION, SEGMENT_UPDATE, SPLIT_KILL, SPLIT_UPDATE, PUSH_RETRYABLE_ERROR, PUSH_SUBSYSTEM_UP, ControlType } from './constants'; +import { STREAMING_FALLBACK, STREAMING_REFRESH_TOKEN, STREAMING_CONNECTING, STREAMING_DISABLED, ERROR_STREAMING_AUTH, STREAMING_DISCONNECTING, STREAMING_RECONNECT, STREAMING_PARSING_MEMBERSHIPS_UPDATE, STREAMING_PARSING_SPLIT_UPDATE } from '../../logger/constants'; import { IMembershipMSUpdateData, IMembershipLSUpdateData, KeyList, UpdateStrategy } from './SSEHandler/types'; import { getDelay, isInBitmap, parseBitmap, parseFFUpdatePayload, parseKeyList } from './parseUtils'; import { ISet, _Set } from '../../utils/lang/sets'; @@ -239,7 +239,7 @@ export function pushManagerFactory( }); function handleMySegmentsUpdate(parsedData: IMembershipMSUpdateData | IMembershipLSUpdateData) { - const isLS = parsedData.type === MEMBERSHIP_LS_UPDATE; + const isLS = parsedData.type === MEMBERSHIPS_LS_UPDATE; switch (parsedData.u) { case UpdateStrategy.BoundedFetchRequest: { @@ -247,7 +247,7 @@ export function pushManagerFactory( try { bitmap = parseBitmap(parsedData.d!, parsedData.c!); } catch (e) { - log.warn(STREAMING_PARSING_MEMBERSHIP_UPDATE, ['BoundedFetchRequest', e]); + log.warn(STREAMING_PARSING_MEMBERSHIPS_UPDATE, ['BoundedFetchRequest', e]); break; } @@ -265,12 +265,12 @@ export function pushManagerFactory( added = new _Set(keyList.a); removed = new _Set(keyList.r); } catch (e) { - log.warn(STREAMING_PARSING_MEMBERSHIP_UPDATE, ['KeyList', e]); + log.warn(STREAMING_PARSING_MEMBERSHIPS_UPDATE, ['KeyList', e]); break; } if (!parsedData.n || !parsedData.n.length) { - log.warn(STREAMING_PARSING_MEMBERSHIP_UPDATE, ['KeyList', 'No segment name was provided']); + log.warn(STREAMING_PARSING_MEMBERSHIPS_UPDATE, ['KeyList', 'No segment name was provided']); break; } @@ -287,7 +287,7 @@ export function pushManagerFactory( } case UpdateStrategy.SegmentRemoval: if (!parsedData.n || !parsedData.n.length) { - log.warn(STREAMING_PARSING_MEMBERSHIP_UPDATE, ['SegmentRemoval', 'No segment name was provided']); + log.warn(STREAMING_PARSING_MEMBERSHIPS_UPDATE, ['SegmentRemoval', 'No segment name was provided']); break; } @@ -307,8 +307,8 @@ export function pushManagerFactory( } if (userKey) { - pushEmitter.on(MEMBERSHIP_MS_UPDATE, handleMySegmentsUpdate); - pushEmitter.on(MEMBERSHIP_LS_UPDATE, handleMySegmentsUpdate); + pushEmitter.on(MEMBERSHIPS_MS_UPDATE, handleMySegmentsUpdate); + pushEmitter.on(MEMBERSHIPS_LS_UPDATE, handleMySegmentsUpdate); } else { pushEmitter.on(SEGMENT_UPDATE, segmentsUpdateWorker!.put); } diff --git a/src/sync/streaming/types.ts b/src/sync/streaming/types.ts index be76c578..0684c099 100644 --- a/src/sync/streaming/types.ts +++ b/src/sync/streaming/types.ts @@ -11,8 +11,8 @@ export type PUSH_NONRETRYABLE_ERROR = 'PUSH_NONRETRYABLE_ERROR' export type PUSH_RETRYABLE_ERROR = 'PUSH_RETRYABLE_ERROR' // Update-type push notifications, handled by NotificationProcessor -export type MEMBERSHIP_MS_UPDATE = 'MEMBERSHIP_MS_UPDATE'; -export type MEMBERSHIP_LS_UPDATE = 'MEMBERSHIP_LS_UPDATE'; +export type MEMBERSHIPS_MS_UPDATE = 'MEMBERSHIPS_MS_UPDATE'; +export type MEMBERSHIPS_LS_UPDATE = 'MEMBERSHIPS_LS_UPDATE'; export type SEGMENT_UPDATE = 'SEGMENT_UPDATE'; export type SPLIT_KILL = 'SPLIT_KILL'; export type SPLIT_UPDATE = 'SPLIT_UPDATE'; @@ -21,11 +21,11 @@ export type SPLIT_UPDATE = 'SPLIT_UPDATE'; export type CONTROL = 'CONTROL'; export type OCCUPANCY = 'OCCUPANCY'; -export type IPushEvent = PUSH_SUBSYSTEM_UP | PUSH_SUBSYSTEM_DOWN | PUSH_NONRETRYABLE_ERROR | PUSH_RETRYABLE_ERROR | MEMBERSHIP_MS_UPDATE | MEMBERSHIP_LS_UPDATE | SEGMENT_UPDATE | SPLIT_UPDATE | SPLIT_KILL | ControlType.STREAMING_RESET +export type IPushEvent = PUSH_SUBSYSTEM_UP | PUSH_SUBSYSTEM_DOWN | PUSH_NONRETRYABLE_ERROR | PUSH_RETRYABLE_ERROR | MEMBERSHIPS_MS_UPDATE | MEMBERSHIPS_LS_UPDATE | SEGMENT_UPDATE | SPLIT_UPDATE | SPLIT_KILL | ControlType.STREAMING_RESET type IParsedData = - T extends MEMBERSHIP_MS_UPDATE ? IMembershipMSUpdateData : - T extends MEMBERSHIP_LS_UPDATE ? IMembershipLSUpdateData : + T extends MEMBERSHIPS_MS_UPDATE ? IMembershipMSUpdateData : + T extends MEMBERSHIPS_LS_UPDATE ? IMembershipLSUpdateData : T extends SEGMENT_UPDATE ? ISegmentUpdateData : T extends SPLIT_UPDATE ? ISplitUpdateData : T extends SPLIT_KILL ? ISplitKillData : INotificationData; From 907950b4cf49e62890a220178a050dd8788953f0 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Tue, 3 Sep 2024 15:46:36 -0300 Subject: [PATCH 066/146] Refactor notification types --- .../UpdateWorkers/MySegmentsUpdateWorker.ts | 10 ++--- .../__tests__/MySegmentsUpdateWorker.spec.ts | 42 +++++++++---------- 2 files changed, 26 insertions(+), 26 deletions(-) diff --git a/src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts b/src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts index 7dc2dfbe..1a478a1c 100644 --- a/src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts +++ b/src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts @@ -6,7 +6,7 @@ import { MEMBERSHIPS } from '../../../utils/constants'; import { ISegmentsCacheSync, IStorageSync } from '../../../storages/types'; import { ILogger } from '../../../logger/types'; import { FETCH_BACKOFF_MAX_RETRIES } from './constants'; -import { MEMBERSHIP_LS_UPDATE, MEMBERSHIP_MS_UPDATE } from '../constants'; +import { MEMBERSHIPS_LS_UPDATE, MEMBERSHIPS_MS_UPDATE } from '../constants'; /** * MySegmentsUpdateWorker factory @@ -115,8 +115,8 @@ export function MySegmentsUpdateWorker(log: ILogger, storage: Pick { // assert calling `mySegmentsSyncTask.execute` if `isExecuting` is false expect(mySegmentsSyncTask.isExecuting()).toBe(false); - mySegmentUpdateWorker.put({ type: MEMBERSHIP_MS_UPDATE, cn: 100 }); + mySegmentUpdateWorker.put({ type: MEMBERSHIPS_MS_UPDATE, cn: 100 }); expect(mySegmentsSyncTask.execute).toBeCalledTimes(1); // synchronizes MySegments if `isExecuting` is false // assert queueing changeNumber if `isExecuting` is true expect(mySegmentsSyncTask.isExecuting()).toBe(true); - mySegmentUpdateWorker.put({ type: MEMBERSHIP_MS_UPDATE, cn: 105 }); - mySegmentUpdateWorker.put({ type: MEMBERSHIP_MS_UPDATE, cn: 104 }); - mySegmentUpdateWorker.put({ type: MEMBERSHIP_MS_UPDATE, cn: 106 }); + mySegmentUpdateWorker.put({ type: MEMBERSHIPS_MS_UPDATE, cn: 105 }); + mySegmentUpdateWorker.put({ type: MEMBERSHIPS_MS_UPDATE, cn: 104 }); + mySegmentUpdateWorker.put({ type: MEMBERSHIPS_MS_UPDATE, cn: 106 }); expect(mySegmentsSyncTask.execute).toBeCalledTimes(1); // doesn't synchronize MySegments if `isExecuting` is true // assert calling `mySegmentsSyncTask.execute` if previous call is resolved and a new changeNumber in queue @@ -89,25 +89,25 @@ describe('MySegmentsUpdateWorker', () => { // to validate the special case than the fetch associated to the first event is resolved after a second event with payload arrives mySegmentsSyncTask.execute.mockClear(); expect(mySegmentsSyncTask.isExecuting()).toBe(false); - mySegmentUpdateWorker.put({ type: MEMBERSHIP_MS_UPDATE, cn: 110 }); + mySegmentUpdateWorker.put({ type: MEMBERSHIPS_MS_UPDATE, cn: 110 }); expect(mySegmentsSyncTask.isExecuting()).toBe(true); - mySegmentUpdateWorker.put({ type: MEMBERSHIP_MS_UPDATE, cn: 120 }, { added: [], removed: ['some_segment'] }); + mySegmentUpdateWorker.put({ type: MEMBERSHIPS_MS_UPDATE, cn: 120 }, { added: [], removed: ['some_segment'] }); expect(mySegmentsSyncTask.execute).toBeCalledTimes(1); // doesn't synchronize MySegments if `isExecuting` is true, even if payload (segmentList) is included expect(mySegmentsSyncTask.execute).toHaveBeenLastCalledWith(undefined, true, undefined); mySegmentsSyncTask.__resolveMySegmentsUpdaterCall(); // fetch success await new Promise(res => setTimeout(res, 10)); expect(mySegmentsSyncTask.execute).toBeCalledTimes(2); // re-synchronizes MySegments once previous event was handled - expect(mySegmentsSyncTask.execute).toHaveBeenLastCalledWith({ type: MEMBERSHIP_MS_UPDATE, cn: 120, added: [], removed: ['some_segment'] }, true, undefined); // synchronizes MySegments with given segmentList + expect(mySegmentsSyncTask.execute).toHaveBeenLastCalledWith({ type: MEMBERSHIPS_MS_UPDATE, cn: 120, added: [], removed: ['some_segment'] }, true, undefined); // synchronizes MySegments with given segmentList mySegmentsSyncTask.__resolveMySegmentsUpdaterCall(); // fetch success await new Promise(res => setTimeout(res, 10)); // assert handling an event without segmentList after one with segmentList mySegmentsSyncTask.execute.mockClear(); - mySegmentUpdateWorker.put({ type: MEMBERSHIP_MS_UPDATE, cn: 130 }, { added: [], removed: ['other_segment'] }); - mySegmentUpdateWorker.put({ type: MEMBERSHIP_MS_UPDATE, cn: 140 }); + mySegmentUpdateWorker.put({ type: MEMBERSHIPS_MS_UPDATE, cn: 130 }, { added: [], removed: ['other_segment'] }); + mySegmentUpdateWorker.put({ type: MEMBERSHIPS_MS_UPDATE, cn: 140 }); expect(mySegmentsSyncTask.execute).toBeCalledTimes(1); // synchronizes MySegments once, until event is handled - expect(mySegmentsSyncTask.execute).toHaveBeenLastCalledWith({ type: MEMBERSHIP_MS_UPDATE, cn: 130, added: [], removed: ['other_segment'] }, true, undefined); + expect(mySegmentsSyncTask.execute).toHaveBeenLastCalledWith({ type: MEMBERSHIPS_MS_UPDATE, cn: 130, added: [], removed: ['other_segment'] }, true, undefined); mySegmentsSyncTask.__resolveMySegmentsUpdaterCall(); // fetch success await new Promise(res => setTimeout(res)); @@ -126,12 +126,12 @@ describe('MySegmentsUpdateWorker', () => { const mySegmentUpdateWorker = MySegmentsUpdateWorker(loggerMock, createStorage(), mySegmentsSyncTask as any, telemetryTracker); // while fetch fails, should retry with backoff - mySegmentUpdateWorker.put({ type: MEMBERSHIP_MS_UPDATE, cn: 100 }); + mySegmentUpdateWorker.put({ type: MEMBERSHIPS_MS_UPDATE, cn: 100 }); await new Promise(res => setTimeout(res, Backoff.__TEST__BASE_MILLIS! * 3 + 100 /* some delay */)); expect(mySegmentsSyncTask.execute).toBeCalledTimes(3); // if backoff is scheduled and a new event is queued, it must be handled immediately - mySegmentUpdateWorker.put({ type: MEMBERSHIP_MS_UPDATE, cn: 105 }); + mySegmentUpdateWorker.put({ type: MEMBERSHIPS_MS_UPDATE, cn: 105 }); expect(mySegmentsSyncTask.execute).toBeCalledTimes(4); }); @@ -140,13 +140,13 @@ describe('MySegmentsUpdateWorker', () => { const mySegmentsSyncTask = mySegmentsSyncTaskMock([false]); const mySegmentUpdateWorker = MySegmentsUpdateWorker(loggerMock, createStorage(), mySegmentsSyncTask as any, telemetryTracker); - mySegmentUpdateWorker.put({ type: MEMBERSHIP_LS_UPDATE, cn: 100 }); + mySegmentUpdateWorker.put({ type: MEMBERSHIPS_LS_UPDATE, cn: 100 }); mySegmentUpdateWorker.stop(); await new Promise(res => setTimeout(res, 20)); // Wait to assert no more calls to `execute` after stopping expect(mySegmentsSyncTask.execute).toBeCalledTimes(1); - mySegmentUpdateWorker.put({ type: MEMBERSHIP_LS_UPDATE, cn: 150 }, undefined, 10); + mySegmentUpdateWorker.put({ type: MEMBERSHIPS_LS_UPDATE, cn: 150 }, undefined, 10); mySegmentUpdateWorker.stop(); await new Promise(res => setTimeout(res, 20)); // Wait to assert no calls to `execute` after stopping (fetch request with delay is cleared) @@ -159,8 +159,8 @@ describe('MySegmentsUpdateWorker', () => { const mySegmentUpdateWorker = MySegmentsUpdateWorker(loggerMock, createStorage(), mySegmentsSyncTask as any, telemetryTracker); // If a delayed fetch request is queued while another fetch request is waiting, it is discarded - mySegmentUpdateWorker.put({ type: MEMBERSHIP_LS_UPDATE, cn: 100 }, undefined, 50); - mySegmentUpdateWorker.put({ type: MEMBERSHIP_LS_UPDATE, cn: 150 }, undefined, 100); + mySegmentUpdateWorker.put({ type: MEMBERSHIPS_LS_UPDATE, cn: 100 }, undefined, 50); + mySegmentUpdateWorker.put({ type: MEMBERSHIPS_LS_UPDATE, cn: 150 }, undefined, 100); await new Promise(res => setTimeout(res, 60)); expect(mySegmentsSyncTask.execute).toBeCalledTimes(1); @@ -171,9 +171,9 @@ describe('MySegmentsUpdateWorker', () => { expect(mySegmentsSyncTask.execute).toBeCalledTimes(1); // If an event with segmentData (i.e., an instant update) is queued while a delayed fetch request is waiting, the instant update is discarded - mySegmentUpdateWorker.put({ type: MEMBERSHIP_LS_UPDATE, cn: 200 }, undefined, 50); + mySegmentUpdateWorker.put({ type: MEMBERSHIPS_LS_UPDATE, cn: 200 }, undefined, 50); await new Promise(res => setTimeout(res, 10)); - mySegmentUpdateWorker.put({ type: MEMBERSHIP_LS_UPDATE, cn: 230 }, { added: ['some_segment'], removed: [] }); + mySegmentUpdateWorker.put({ type: MEMBERSHIPS_LS_UPDATE, cn: 230 }, { added: ['some_segment'], removed: [] }); await new Promise(res => setTimeout(res, 60)); expect(mySegmentsSyncTask.execute).toBeCalledTimes(2); @@ -181,8 +181,8 @@ describe('MySegmentsUpdateWorker', () => { mySegmentsSyncTask.__resolveMySegmentsUpdaterCall(); // fetch success await new Promise(res => setTimeout(res)); - mySegmentUpdateWorker.put({ type: MEMBERSHIP_LS_UPDATE, cn: 250 }, { added: ['some_segment'], removed: [] }); + mySegmentUpdateWorker.put({ type: MEMBERSHIPS_LS_UPDATE, cn: 250 }, { added: ['some_segment'], removed: [] }); expect(mySegmentsSyncTask.execute).toBeCalledTimes(3); - expect(mySegmentsSyncTask.execute).toHaveBeenLastCalledWith({ type: MEMBERSHIP_LS_UPDATE, cn: 250, added: ['some_segment'], removed: [] }, true, undefined); + expect(mySegmentsSyncTask.execute).toHaveBeenLastCalledWith({ type: MEMBERSHIPS_LS_UPDATE, cn: 250, added: ['some_segment'], removed: [] }, true, undefined); }); }); From 47040f0901f527ad672c391e012d9615cd5def97 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Tue, 3 Sep 2024 17:41:31 -0300 Subject: [PATCH 067/146] Refactor telemetry largeSegmentCount --- src/storages/inMemory/TelemetryCacheInMemory.ts | 2 +- src/sync/submitters/types.ts | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/storages/inMemory/TelemetryCacheInMemory.ts b/src/storages/inMemory/TelemetryCacheInMemory.ts index ddb57086..7e7e3f98 100644 --- a/src/storages/inMemory/TelemetryCacheInMemory.ts +++ b/src/storages/inMemory/TelemetryCacheInMemory.ts @@ -51,7 +51,7 @@ export class TelemetryCacheInMemory implements ITelemetryCacheSync { spC: this.splits && this.splits.getSplitNames().length, seC: this.segments && this.segments.getRegisteredSegments().length, skC: this.segments && this.segments.getKeysCount(), - lseC: this.largeSegments && this.largeSegments.getRegisteredSegments().length, + lsC: this.largeSegments && this.largeSegments.getRegisteredSegments().length, lskC: this.largeSegments && this.largeSegments.getKeysCount(), sL: this.getSessionLength(), eQ: this.getEventStats(QUEUED), diff --git a/src/sync/submitters/types.ts b/src/sync/submitters/types.ts index bd757b85..8aa61c2b 100644 --- a/src/sync/submitters/types.ts +++ b/src/sync/submitters/types.ts @@ -176,7 +176,7 @@ export type TelemetryUsageStatsPayload = TelemetryUsageStats & { spC?: number, // splitCount seC?: number, // segmentCount skC?: number, // segmentKeyCount - lseC?: number, // largeSegmentCount + lsC?: number, // largeSegmentCount lskC?: number, // largeSegmentKeyCount sL?: number, // sessionLengthMs eQ: number, // eventsQueued From 2d49c017e8c50733d04e57f0d2866f7a5a481453 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Tue, 3 Sep 2024 17:56:54 -0300 Subject: [PATCH 068/146] rc --- package-lock.json | 4 ++-- package.json | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/package-lock.json b/package-lock.json index e7720358..b4f7de18 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "@splitsoftware/splitio-commons", - "version": "1.16.1-rc.11", + "version": "1.16.1-rc.12", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "@splitsoftware/splitio-commons", - "version": "1.16.1-rc.11", + "version": "1.16.1-rc.12", "license": "Apache-2.0", "dependencies": { "tslib": "^2.3.1" diff --git a/package.json b/package.json index 6f16997b..a68c87af 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@splitsoftware/splitio-commons", - "version": "1.16.1-rc.11", + "version": "1.16.1-rc.12", "description": "Split JavaScript SDK common components", "main": "cjs/index.js", "module": "esm/index.js", From 8e0a1e356fd86b2aab9b8123f34198d916e4777c Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Wed, 4 Sep 2024 18:46:00 -0300 Subject: [PATCH 069/146] Adding some corner cases to unit tests --- .../UpdateWorkers/MySegmentsUpdateWorker.ts | 12 +++-- .../__tests__/MySegmentsUpdateWorker.spec.ts | 54 ++++++++++++++----- 2 files changed, 47 insertions(+), 19 deletions(-) diff --git a/src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts b/src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts index 1a478a1c..ae44e7c2 100644 --- a/src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts +++ b/src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts @@ -13,6 +13,9 @@ import { MEMBERSHIPS_LS_UPDATE, MEMBERSHIPS_MS_UPDATE } from '../constants'; */ export function MySegmentsUpdateWorker(log: ILogger, storage: Pick, mySegmentsSyncTask: IMySegmentsSyncTask, telemetryTracker: ITelemetryTracker): IUpdateWorker<[mySegmentsData?: Pick, payload?: Pick, delay?: number]> { + let _delay: undefined | number; + let _delayTimeoutID: any; + function createUpdateWorker(mySegmentsCache: ISegmentsCacheSync) { let maxChangeNumber = 0; // keeps the maximum changeNumber among queued events @@ -21,8 +24,6 @@ export function MySegmentsUpdateWorker(log: ILogger, storage: Pick, payload?: Pick, delay?: number) { const { type, cn } = mySegmentsData; - // Ignore event if it is outdated or if there is a pending fetch request (_delay is set) - if (cn <= Math.max(currentChangeNumber, mySegmentsCache.getChangeNumber()) || cn <= maxChangeNumber || _delay) return; - + // Discard event if it is outdated or there is a pending fetch request (_delay is set), but update target change number + if (cn <= Math.max(currentChangeNumber, mySegmentsCache.getChangeNumber()) || cn <= maxChangeNumber) return; maxChangeNumber = cn; + if (_delay) return; + handleNewEvent = true; cdnBypass = false; _segmentsData = payload && { type, cn, added: payload.added, removed: payload.removed }; diff --git a/src/sync/streaming/UpdateWorkers/__tests__/MySegmentsUpdateWorker.spec.ts b/src/sync/streaming/UpdateWorkers/__tests__/MySegmentsUpdateWorker.spec.ts index 4677f555..0ba329bb 100644 --- a/src/sync/streaming/UpdateWorkers/__tests__/MySegmentsUpdateWorker.spec.ts +++ b/src/sync/streaming/UpdateWorkers/__tests__/MySegmentsUpdateWorker.spec.ts @@ -1,15 +1,24 @@ import { MySegmentsUpdateWorker } from '../MySegmentsUpdateWorker'; -import { MySegmentsCacheInMemory } from '../../../../storages/inMemory/MySegmentsCacheInMemory'; import { loggerMock } from '../../../../logger/__tests__/sdkLogger.mock'; import { syncTaskFactory } from '../../../syncTask'; import { Backoff } from '../../../../utils/Backoff'; import { telemetryTrackerFactory } from '../../../../trackers/telemetryTracker'; import { MEMBERSHIPS_LS_UPDATE, MEMBERSHIPS_MS_UPDATE } from '../../constants'; -function createStorage() { +function storageMock() { return { - segments: new MySegmentsCacheInMemory(), - largeSegments: new MySegmentsCacheInMemory(), + segments: { + _changeNumber: -1, + getChangeNumber() { + return this._changeNumber; + } + }, + largeSegments: { + _changeNumber: -1, + getChangeNumber() { + return this._changeNumber; + } + }, }; } @@ -55,7 +64,7 @@ describe('MySegmentsUpdateWorker', () => { // setup const mySegmentsSyncTask = mySegmentsSyncTaskMock(); Backoff.__TEST__BASE_MILLIS = 1; // retry immediately - const mySegmentUpdateWorker = MySegmentsUpdateWorker(loggerMock, createStorage(), mySegmentsSyncTask as any, telemetryTracker); + const mySegmentUpdateWorker = MySegmentsUpdateWorker(loggerMock, storageMock() as any, mySegmentsSyncTask as any, telemetryTracker); // assert calling `mySegmentsSyncTask.execute` if `isExecuting` is false expect(mySegmentsSyncTask.isExecuting()).toBe(false); @@ -123,7 +132,7 @@ describe('MySegmentsUpdateWorker', () => { // setup Backoff.__TEST__BASE_MILLIS = 50; const mySegmentsSyncTask = mySegmentsSyncTaskMock([false, false, false]); // fetch fail - const mySegmentUpdateWorker = MySegmentsUpdateWorker(loggerMock, createStorage(), mySegmentsSyncTask as any, telemetryTracker); + const mySegmentUpdateWorker = MySegmentsUpdateWorker(loggerMock, storageMock() as any, mySegmentsSyncTask as any, telemetryTracker); // while fetch fails, should retry with backoff mySegmentUpdateWorker.put({ type: MEMBERSHIPS_MS_UPDATE, cn: 100 }); @@ -138,7 +147,7 @@ describe('MySegmentsUpdateWorker', () => { test('stop', async () => { // setup const mySegmentsSyncTask = mySegmentsSyncTaskMock([false]); - const mySegmentUpdateWorker = MySegmentsUpdateWorker(loggerMock, createStorage(), mySegmentsSyncTask as any, telemetryTracker); + const mySegmentUpdateWorker = MySegmentsUpdateWorker(loggerMock, storageMock() as any, mySegmentsSyncTask as any, telemetryTracker); mySegmentUpdateWorker.put({ type: MEMBERSHIPS_LS_UPDATE, cn: 100 }); mySegmentUpdateWorker.stop(); @@ -153,22 +162,33 @@ describe('MySegmentsUpdateWorker', () => { expect(mySegmentsSyncTask.execute).toBeCalledTimes(1); }); - test('put with delay', async () => { + test('put, with delay and storage change number', async () => { // setup + Backoff.__TEST__BASE_MILLIS = 1; // retry immediately const mySegmentsSyncTask = mySegmentsSyncTaskMock(); - const mySegmentUpdateWorker = MySegmentsUpdateWorker(loggerMock, createStorage(), mySegmentsSyncTask as any, telemetryTracker); + const storage = storageMock(); + const mySegmentUpdateWorker = MySegmentsUpdateWorker(loggerMock, storage as any, mySegmentsSyncTask as any, telemetryTracker); - // If a delayed fetch request is queued while another fetch request is waiting, it is discarded + // notification with delay mySegmentUpdateWorker.put({ type: MEMBERSHIPS_LS_UPDATE, cn: 100 }, undefined, 50); - mySegmentUpdateWorker.put({ type: MEMBERSHIPS_LS_UPDATE, cn: 150 }, undefined, 100); + + // If a notification is queued while a fetch request is waiting, the notification is discarded but its change number is used to update the target change number + mySegmentUpdateWorker.put({ type: MEMBERSHIPS_MS_UPDATE, cn: 150 }, undefined, 100); // target for segments storage is 150 + mySegmentUpdateWorker.put({ type: MEMBERSHIPS_LS_UPDATE, cn: 120 }); // target for large segments storage is 120 await new Promise(res => setTimeout(res, 60)); expect(mySegmentsSyncTask.execute).toBeCalledTimes(1); expect(mySegmentsSyncTask.execute).toHaveBeenLastCalledWith(undefined, true, undefined); + storage.largeSegments._changeNumber = 100; // change number update but not the expected one mySegmentsSyncTask.__resolveMySegmentsUpdaterCall(); // fetch success await new Promise(res => setTimeout(res, 60)); - expect(mySegmentsSyncTask.execute).toBeCalledTimes(1); + expect(mySegmentsSyncTask.execute).toBeCalledTimes(2); // fetch retry due to target change number mismatch + + storage.largeSegments._changeNumber = 120; + mySegmentsSyncTask.__resolveMySegmentsUpdaterCall(); + await new Promise(res => setTimeout(res, 60)); + expect(mySegmentsSyncTask.execute).toBeCalledTimes(2); // no more fetches since target change number is reached // If an event with segmentData (i.e., an instant update) is queued while a delayed fetch request is waiting, the instant update is discarded mySegmentUpdateWorker.put({ type: MEMBERSHIPS_LS_UPDATE, cn: 200 }, undefined, 50); @@ -176,13 +196,19 @@ describe('MySegmentsUpdateWorker', () => { mySegmentUpdateWorker.put({ type: MEMBERSHIPS_LS_UPDATE, cn: 230 }, { added: ['some_segment'], removed: [] }); await new Promise(res => setTimeout(res, 60)); - expect(mySegmentsSyncTask.execute).toBeCalledTimes(2); + expect(mySegmentsSyncTask.execute).toBeCalledTimes(3); expect(mySegmentsSyncTask.execute).toHaveBeenLastCalledWith(undefined, true, undefined); mySegmentsSyncTask.__resolveMySegmentsUpdaterCall(); // fetch success await new Promise(res => setTimeout(res)); mySegmentUpdateWorker.put({ type: MEMBERSHIPS_LS_UPDATE, cn: 250 }, { added: ['some_segment'], removed: [] }); - expect(mySegmentsSyncTask.execute).toBeCalledTimes(3); + expect(mySegmentsSyncTask.execute).toBeCalledTimes(4); expect(mySegmentsSyncTask.execute).toHaveBeenLastCalledWith({ type: MEMBERSHIPS_LS_UPDATE, cn: 250, added: ['some_segment'], removed: [] }, true, undefined); + + // Stop should clear the delayed fetch request + mySegmentUpdateWorker.put({ type: MEMBERSHIPS_MS_UPDATE, cn: 300 }, undefined, 10); + mySegmentUpdateWorker.stop(); + await new Promise(res => setTimeout(res, 20)); + expect(mySegmentsSyncTask.execute).toBeCalledTimes(4); }); }); From 439eb3d7c0e360037e100832fa1606c9deaf5893 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Mon, 9 Sep 2024 16:30:33 -0300 Subject: [PATCH 070/146] getDelay unit tests --- src/sync/streaming/__tests__/parseUtils.spec.ts | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/src/sync/streaming/__tests__/parseUtils.spec.ts b/src/sync/streaming/__tests__/parseUtils.spec.ts index ee1402bb..a1501917 100644 --- a/src/sync/streaming/__tests__/parseUtils.spec.ts +++ b/src/sync/streaming/__tests__/parseUtils.spec.ts @@ -62,8 +62,14 @@ test('split notification - parseKeyList', () => { }); test('getDelay', () => { + // if h === 0, return 0 (immediate, no delay) + expect(getDelay({ i: 300, h: 0, s: 1 }, 'anything')).toBe(0); + + // if h !== 0, calculate delay with provided hash, seed and interval expect(getDelay({ i: 300, h: 1, s: 0 }, 'nicolas@split.io')).toBe(241); expect(getDelay({ i: 60000, h: 1, s: 1 }, 'emi@split.io')).toBe(14389); expect(getDelay({ i: 60000, h: 1, s: 0 }, 'emi@split.io')).toBe(24593); + + // if i, h and s are not provided, use defaults expect(getDelay({}, 'emi@split.io')).toBe(24593); }); From 1921de941f3be464f6ee751de5b2520ad8390e59 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Mon, 9 Sep 2024 16:58:42 -0300 Subject: [PATCH 071/146] Update changelog entry --- CHANGES.txt | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CHANGES.txt b/CHANGES.txt index 1631862d..ba4fc466 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -1,6 +1,8 @@ 2.0.0 (September XX, 2024) + - Added support for targeting rules based on large segments. - BREAKING CHANGES: - Removed `/mySegments` endpoint from SplitAPI module, as it is replaced by `/memberships` endpoint. + - Removed support for MY_SEGMENTS_UPDATE and MY_SEGMENTS_UPDATE_V2 notification types, as they are replaced by MEMBERSHIPS_MS_UPDATE and MEMBERSHIPS_LS_UPDATE notification types. 1.17.0 (September 6, 2024) - Added `sync.requestOptions.getHeaderOverrides` configuration option to enhance SDK HTTP request Headers for Authorization Frameworks. From d55fb787ffe7f64b134b1ef55cfce0d7cb9bce97 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Mon, 16 Sep 2024 17:40:26 -0300 Subject: [PATCH 072/146] Refactor IN_LARGE_SEGMENT matcher --- package-lock.json | 246 +++++---------------- package.json | 4 +- src/dtos/types.ts | 7 +- src/evaluator/matchersTransform/index.ts | 7 +- src/evaluator/matchersTransform/segment.ts | 8 +- 5 files changed, 75 insertions(+), 197 deletions(-) diff --git a/package-lock.json b/package-lock.json index 9b433d2f..1e665ab3 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "@splitsoftware/splitio-commons", - "version": "1.16.1-rc.12", + "version": "1.17.1-rc.0", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "@splitsoftware/splitio-commons", - "version": "1.16.1-rc.12", + "version": "1.17.1-rc.0", "license": "Apache-2.0", "dependencies": { "tslib": "^2.3.1" @@ -22,7 +22,7 @@ "eslint": "^8.48.0", "eslint-plugin-compat": "^4.2.0", "eslint-plugin-import": "^2.25.3", - "fetch-mock": "^9.11.0", + "fetch-mock": "^11.1.3", "ioredis": "^4.28.0", "jest": "^27.2.3", "jest-localstorage-mock": "^2.4.3", @@ -492,18 +492,6 @@ "@babel/core": "^7.0.0-0" } }, - "node_modules/@babel/runtime": { - "version": "7.17.2", - "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.17.2.tgz", - "integrity": "sha512-hzeyJyMA1YGdJTuWU0e/j4wKXrU4OMFvY2MSlaI9B7VQb0r5cxTE3EAIS2Q7Tn2RIcDkRvTA/v2JsAEhxe99uw==", - "dev": true, - "dependencies": { - "regenerator-runtime": "^0.13.4" - }, - "engines": { - "node": ">=6.9.0" - } - }, "node_modules/@babel/template": { "version": "7.22.15", "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.22.15.tgz", @@ -1471,6 +1459,12 @@ "@babel/types": "^7.3.0" } }, + "node_modules/@types/glob-to-regexp": { + "version": "0.4.4", + "resolved": "https://registry.npmjs.org/@types/glob-to-regexp/-/glob-to-regexp-0.4.4.tgz", + "integrity": "sha512-nDKoaKJYbnn1MZxUY0cA1bPmmgZbg0cTq7Rh13d0KWYNOiKbqoR+2d89SnRPszGh7ROzSwZ/GOjZ4jPbmmZ6Eg==", + "dev": true + }, "node_modules/@types/google.analytics": { "version": "0.0.40", "resolved": "https://registry.npmjs.org/@types/google.analytics/-/google.analytics-0.0.40.tgz", @@ -2459,18 +2453,6 @@ "safe-buffer": "~5.1.1" } }, - "node_modules/core-js": { - "version": "3.6.5", - "resolved": "https://registry.npmjs.org/core-js/-/core-js-3.6.5.tgz", - "integrity": "sha512-vZVEEwZoIsI+vPEuoF9Iqf5H7/M3eeQqWlQnYa8FSKKePuYTf5MWnxb5SDAzCa60b3JBRS5g9b+Dq7b1y/RCrA==", - "deprecated": "core-js@<3.23.3 is no longer maintained and not recommended for usage due to the number of issues. Because of the V8 engine whims, feature detection in old core-js versions could cause a slowdown up to 100x even if nothing is polyfilled. Some versions have web compatibility issues. Please, upgrade your dependencies to the actual version of core-js.", - "dev": true, - "hasInstallScript": true, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/core-js" - } - }, "node_modules/cross-env": { "version": "7.0.2", "resolved": "https://registry.npmjs.org/cross-env/-/cross-env-7.0.2.tgz", @@ -2615,6 +2597,15 @@ "node": ">=0.10" } }, + "node_modules/dequal": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/dequal/-/dequal-2.0.3.tgz", + "integrity": "sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==", + "dev": true, + "engines": { + "node": ">=6" + } + }, "node_modules/detect-newline": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/detect-newline/-/detect-newline-3.1.0.tgz", @@ -3603,31 +3594,19 @@ } }, "node_modules/fetch-mock": { - "version": "9.11.0", - "resolved": "https://registry.npmjs.org/fetch-mock/-/fetch-mock-9.11.0.tgz", - "integrity": "sha512-PG1XUv+x7iag5p/iNHD4/jdpxL9FtVSqRMUQhPab4hVDt80T1MH5ehzVrL2IdXO9Q2iBggArFvPqjUbHFuI58Q==", + "version": "11.1.3", + "resolved": "https://registry.npmjs.org/fetch-mock/-/fetch-mock-11.1.3.tgz", + "integrity": "sha512-ATh0dWgnVrUHiiXuvQm1Ry+ThWfSv1QQgqJTCtybrNxyUrFiSOaDKsNG29eyysp1SHeNP6Q+dH50+8VifN51Ig==", "dev": true, "dependencies": { - "@babel/core": "^7.0.0", - "@babel/runtime": "^7.0.0", - "core-js": "^3.0.0", - "debug": "^4.1.1", - "glob-to-regexp": "^0.4.0", + "@types/glob-to-regexp": "^0.4.4", + "dequal": "^2.0.3", + "glob-to-regexp": "^0.4.1", "is-subset": "^0.1.1", - "lodash.isequal": "^4.5.0", - "path-to-regexp": "^2.2.1", - "querystring": "^0.2.0", - "whatwg-url": "^6.5.0" + "regexparam": "^3.0.0" }, "engines": { - "node": ">=4.0.0" - }, - "funding": { - "type": "charity", - "url": "https://www.justgiving.com/refugee-support-europe" - }, - "peerDependencies": { - "node-fetch": "*" + "node": ">=8.0.0" }, "peerDependenciesMeta": { "node-fetch": { @@ -3635,32 +3614,6 @@ } } }, - "node_modules/fetch-mock/node_modules/tr46": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/tr46/-/tr46-1.0.1.tgz", - "integrity": "sha1-qLE/1r/SSJUZZ0zN5VujaTtwbQk=", - "dev": true, - "dependencies": { - "punycode": "^2.1.0" - } - }, - "node_modules/fetch-mock/node_modules/webidl-conversions": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-4.0.2.tgz", - "integrity": "sha512-YQ+BmxuTgd6UXZW3+ICGfyqRyHXVlD5GtQr5+qjiNW7bF0cqrzX500HVXPBOvgXb5YnzDd+h0zqyv61KUD7+Sg==", - "dev": true - }, - "node_modules/fetch-mock/node_modules/whatwg-url": { - "version": "6.5.0", - "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-6.5.0.tgz", - "integrity": "sha512-rhRZRqx/TLJQWUpQ6bmrt2UV4f0HCQ463yQuONJqC6fO2VoEb1pTYddbe59SkYq87aoM5A3bdhMZiUiVws+fzQ==", - "dev": true, - "dependencies": { - "lodash.sortby": "^4.7.0", - "tr46": "^1.0.1", - "webidl-conversions": "^4.0.2" - } - }, "node_modules/file-entry-cache": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz", @@ -6381,12 +6334,6 @@ "integrity": "sha512-chi4NHZlZqZD18a0imDHnZPrDeBbTtVN7GXMwuGdRH9qotxAjYs3aVLKc7zNOG9eddR5Ksd8rvFEBc9SsggPpg==", "dev": true }, - "node_modules/lodash.isequal": { - "version": "4.5.0", - "resolved": "https://registry.npmjs.org/lodash.isequal/-/lodash.isequal-4.5.0.tgz", - "integrity": "sha1-QVxEePK8wwEgwizhDtMib30+GOA=", - "dev": true - }, "node_modules/lodash.memoize": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/lodash.memoize/-/lodash.memoize-4.1.2.tgz", @@ -6399,12 +6346,6 @@ "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", "dev": true }, - "node_modules/lodash.sortby": { - "version": "4.7.0", - "resolved": "https://registry.npmjs.org/lodash.sortby/-/lodash.sortby-4.7.0.tgz", - "integrity": "sha1-7dFMgk4sycHgsKG0K7UhBRakJDg=", - "dev": true - }, "node_modules/lru-cache": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", @@ -6824,12 +6765,6 @@ "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", "dev": true }, - "node_modules/path-to-regexp": { - "version": "2.4.0", - "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-2.4.0.tgz", - "integrity": "sha512-G6zHoVqC6GGTQkZwF4lkuEyMbVOjoBKAEybQUypI1WTkqinCOrq2x6U2+phkJ1XsEMTy4LjtwPI7HW+NVrRR2w==", - "dev": true - }, "node_modules/path-type": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", @@ -6950,16 +6885,6 @@ "node": ">=6" } }, - "node_modules/querystring": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/querystring/-/querystring-0.2.1.tgz", - "integrity": "sha512-wkvS7mL/JMugcup3/rMitHmd9ecIGd2lhFhK9N3UUQ450h66d1r3Y9nvXzQAW1Lq+wyx61k/1pfKS5KuKiyEbg==", - "deprecated": "The querystring API is considered Legacy. new code should use the URLSearchParams API instead.", - "dev": true, - "engines": { - "node": ">=0.4.x" - } - }, "node_modules/querystringify": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/querystringify/-/querystringify-2.2.0.tgz", @@ -7031,11 +6956,14 @@ "node": ">=4.0.0" } }, - "node_modules/regenerator-runtime": { - "version": "0.13.9", - "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.9.tgz", - "integrity": "sha512-p3VT+cOEgxFsRRA9X4lkI1E+k2/CtnKtU4gcxyaCUreilL/vqI6CdZ3wxVUx3UOUg+gnUOQQcRI7BmSI656MYA==", - "dev": true + "node_modules/regexparam": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/regexparam/-/regexparam-3.0.0.tgz", + "integrity": "sha512-RSYAtP31mvYLkAHrOlh25pCNQ5hWnT106VukGaaFfuJrZFkGRX5GhUAdPqpSDXxOhA2c4akmRuplv1mRqnBn6Q==", + "dev": true, + "engines": { + "node": ">=8" + } }, "node_modules/require-directory": { "version": "2.1.1", @@ -8383,15 +8311,6 @@ "@babel/helper-plugin-utils": "^7.22.5" } }, - "@babel/runtime": { - "version": "7.17.2", - "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.17.2.tgz", - "integrity": "sha512-hzeyJyMA1YGdJTuWU0e/j4wKXrU4OMFvY2MSlaI9B7VQb0r5cxTE3EAIS2Q7Tn2RIcDkRvTA/v2JsAEhxe99uw==", - "dev": true, - "requires": { - "regenerator-runtime": "^0.13.4" - } - }, "@babel/template": { "version": "7.22.15", "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.22.15.tgz", @@ -9140,6 +9059,12 @@ "@babel/types": "^7.3.0" } }, + "@types/glob-to-regexp": { + "version": "0.4.4", + "resolved": "https://registry.npmjs.org/@types/glob-to-regexp/-/glob-to-regexp-0.4.4.tgz", + "integrity": "sha512-nDKoaKJYbnn1MZxUY0cA1bPmmgZbg0cTq7Rh13d0KWYNOiKbqoR+2d89SnRPszGh7ROzSwZ/GOjZ4jPbmmZ6Eg==", + "dev": true + }, "@types/google.analytics": { "version": "0.0.40", "resolved": "https://registry.npmjs.org/@types/google.analytics/-/google.analytics-0.0.40.tgz", @@ -9859,12 +9784,6 @@ "safe-buffer": "~5.1.1" } }, - "core-js": { - "version": "3.6.5", - "resolved": "https://registry.npmjs.org/core-js/-/core-js-3.6.5.tgz", - "integrity": "sha512-vZVEEwZoIsI+vPEuoF9Iqf5H7/M3eeQqWlQnYa8FSKKePuYTf5MWnxb5SDAzCa60b3JBRS5g9b+Dq7b1y/RCrA==", - "dev": true - }, "cross-env": { "version": "7.0.2", "resolved": "https://registry.npmjs.org/cross-env/-/cross-env-7.0.2.tgz", @@ -9973,6 +9892,12 @@ "integrity": "sha512-XwE+iZ4D6ZUB7mfYRMb5wByE8L74HCn30FBN7sWnXksWc1LO1bPDl67pBR9o/kC4z/xSNAwkMYcGgqDV3BE3Hw==", "dev": true }, + "dequal": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/dequal/-/dequal-2.0.3.tgz", + "integrity": "sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==", + "dev": true + }, "detect-newline": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/detect-newline/-/detect-newline-3.1.0.tgz", @@ -10708,49 +10633,16 @@ } }, "fetch-mock": { - "version": "9.11.0", - "resolved": "https://registry.npmjs.org/fetch-mock/-/fetch-mock-9.11.0.tgz", - "integrity": "sha512-PG1XUv+x7iag5p/iNHD4/jdpxL9FtVSqRMUQhPab4hVDt80T1MH5ehzVrL2IdXO9Q2iBggArFvPqjUbHFuI58Q==", + "version": "11.1.3", + "resolved": "https://registry.npmjs.org/fetch-mock/-/fetch-mock-11.1.3.tgz", + "integrity": "sha512-ATh0dWgnVrUHiiXuvQm1Ry+ThWfSv1QQgqJTCtybrNxyUrFiSOaDKsNG29eyysp1SHeNP6Q+dH50+8VifN51Ig==", "dev": true, "requires": { - "@babel/core": "^7.0.0", - "@babel/runtime": "^7.0.0", - "core-js": "^3.0.0", - "debug": "^4.1.1", - "glob-to-regexp": "^0.4.0", + "@types/glob-to-regexp": "^0.4.4", + "dequal": "^2.0.3", + "glob-to-regexp": "^0.4.1", "is-subset": "^0.1.1", - "lodash.isequal": "^4.5.0", - "path-to-regexp": "^2.2.1", - "querystring": "^0.2.0", - "whatwg-url": "^6.5.0" - }, - "dependencies": { - "tr46": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/tr46/-/tr46-1.0.1.tgz", - "integrity": "sha1-qLE/1r/SSJUZZ0zN5VujaTtwbQk=", - "dev": true, - "requires": { - "punycode": "^2.1.0" - } - }, - "webidl-conversions": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-4.0.2.tgz", - "integrity": "sha512-YQ+BmxuTgd6UXZW3+ICGfyqRyHXVlD5GtQr5+qjiNW7bF0cqrzX500HVXPBOvgXb5YnzDd+h0zqyv61KUD7+Sg==", - "dev": true - }, - "whatwg-url": { - "version": "6.5.0", - "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-6.5.0.tgz", - "integrity": "sha512-rhRZRqx/TLJQWUpQ6bmrt2UV4f0HCQ463yQuONJqC6fO2VoEb1pTYddbe59SkYq87aoM5A3bdhMZiUiVws+fzQ==", - "dev": true, - "requires": { - "lodash.sortby": "^4.7.0", - "tr46": "^1.0.1", - "webidl-conversions": "^4.0.2" - } - } + "regexparam": "^3.0.0" } }, "file-entry-cache": { @@ -12763,12 +12655,6 @@ "integrity": "sha512-chi4NHZlZqZD18a0imDHnZPrDeBbTtVN7GXMwuGdRH9qotxAjYs3aVLKc7zNOG9eddR5Ksd8rvFEBc9SsggPpg==", "dev": true }, - "lodash.isequal": { - "version": "4.5.0", - "resolved": "https://registry.npmjs.org/lodash.isequal/-/lodash.isequal-4.5.0.tgz", - "integrity": "sha1-QVxEePK8wwEgwizhDtMib30+GOA=", - "dev": true - }, "lodash.memoize": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/lodash.memoize/-/lodash.memoize-4.1.2.tgz", @@ -12781,12 +12667,6 @@ "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", "dev": true }, - "lodash.sortby": { - "version": "4.7.0", - "resolved": "https://registry.npmjs.org/lodash.sortby/-/lodash.sortby-4.7.0.tgz", - "integrity": "sha1-7dFMgk4sycHgsKG0K7UhBRakJDg=", - "dev": true - }, "lru-cache": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", @@ -13104,12 +12984,6 @@ "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", "dev": true }, - "path-to-regexp": { - "version": "2.4.0", - "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-2.4.0.tgz", - "integrity": "sha512-G6zHoVqC6GGTQkZwF4lkuEyMbVOjoBKAEybQUypI1WTkqinCOrq2x6U2+phkJ1XsEMTy4LjtwPI7HW+NVrRR2w==", - "dev": true - }, "path-type": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", @@ -13196,12 +13070,6 @@ "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==", "dev": true }, - "querystring": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/querystring/-/querystring-0.2.1.tgz", - "integrity": "sha512-wkvS7mL/JMugcup3/rMitHmd9ecIGd2lhFhK9N3UUQ450h66d1r3Y9nvXzQAW1Lq+wyx61k/1pfKS5KuKiyEbg==", - "dev": true - }, "querystringify": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/querystringify/-/querystringify-2.2.0.tgz", @@ -13250,10 +13118,10 @@ "promise-queue": "^2.2.5" } }, - "regenerator-runtime": { - "version": "0.13.9", - "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.9.tgz", - "integrity": "sha512-p3VT+cOEgxFsRRA9X4lkI1E+k2/CtnKtU4gcxyaCUreilL/vqI6CdZ3wxVUx3UOUg+gnUOQQcRI7BmSI656MYA==", + "regexparam": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/regexparam/-/regexparam-3.0.0.tgz", + "integrity": "sha512-RSYAtP31mvYLkAHrOlh25pCNQ5hWnT106VukGaaFfuJrZFkGRX5GhUAdPqpSDXxOhA2c4akmRuplv1mRqnBn6Q==", "dev": true }, "require-directory": { diff --git a/package.json b/package.json index a68c87af..bfa30107 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@splitsoftware/splitio-commons", - "version": "1.16.1-rc.12", + "version": "1.17.1-rc.0", "description": "Split JavaScript SDK common components", "main": "cjs/index.js", "module": "esm/index.js", @@ -66,7 +66,7 @@ "eslint": "^8.48.0", "eslint-plugin-compat": "^4.2.0", "eslint-plugin-import": "^2.25.3", - "fetch-mock": "^9.11.0", + "fetch-mock": "^11.1.3", "ioredis": "^4.28.0", "jest": "^27.2.3", "jest-localstorage-mock": "^2.4.3", diff --git a/src/dtos/types.ts b/src/dtos/types.ts index 85d5251e..f23bd372 100644 --- a/src/dtos/types.ts +++ b/src/dtos/types.ts @@ -30,6 +30,10 @@ export interface IInSegmentMatcherData { segmentName: string } +export interface IInLargeSegmentMatcherData { + largeSegmentName: string +} + export interface IDependencyMatcherData { split: string, treatments: string[] @@ -43,6 +47,7 @@ interface ISplitMatcherBase { attribute: string | null } userDefinedSegmentMatcherData?: null | IInSegmentMatcherData + userDefinedLargeSegmentMatcherData?: null | IInLargeSegmentMatcherData whitelistMatcherData?: null | IWhitelistMatcherData unaryNumericMatcherData?: null | IUnaryNumericMatcherData betweenMatcherData?: null | IBetweenMatcherData @@ -63,7 +68,7 @@ interface IInSegmentMatcher extends ISplitMatcherBase { interface IInLargeSegmentMatcher extends ISplitMatcherBase { matcherType: 'IN_LARGE_SEGMENT', - userDefinedSegmentMatcherData: IInSegmentMatcherData + userDefinedLargeSegmentMatcherData: IInLargeSegmentMatcherData } interface IWhitelistMatcher extends ISplitMatcherBase { diff --git a/src/evaluator/matchersTransform/index.ts b/src/evaluator/matchersTransform/index.ts index 877f368d..a5be15e3 100644 --- a/src/evaluator/matchersTransform/index.ts +++ b/src/evaluator/matchersTransform/index.ts @@ -4,7 +4,7 @@ import { segmentTransform } from './segment'; import { whitelistTransform } from './whitelist'; import { numericTransform } from './unaryNumeric'; import { zeroSinceHH, zeroSinceSS } from '../convertions'; -import { IBetweenMatcherData, IInSegmentMatcherData, ISplitMatcher, IUnaryNumericMatcherData } from '../../dtos/types'; +import { IBetweenMatcherData, IInLargeSegmentMatcherData, IInSegmentMatcherData, ISplitMatcher, IUnaryNumericMatcherData } from '../../dtos/types'; import { IMatcherDto } from '../types'; /** @@ -18,6 +18,7 @@ export function matchersTransform(matchers: ISplitMatcher[]): IMatcherDto[] { negate, keySelector, userDefinedSegmentMatcherData, + userDefinedLargeSegmentMatcherData, whitelistMatcherData, /* whitelistObject, provided by 'WHITELIST', 'IN_LIST_SEMVER', set and string matchers */ unaryNumericMatcherData, betweenMatcherData, @@ -33,8 +34,10 @@ export function matchersTransform(matchers: ISplitMatcher[]): IMatcherDto[] { let dataType = matcherDataTypes.STRING; let value = undefined; - if (type === matcherTypes.IN_SEGMENT || type === matcherTypes.IN_LARGE_SEGMENT) { + if (type === matcherTypes.IN_SEGMENT) { value = segmentTransform(userDefinedSegmentMatcherData as IInSegmentMatcherData); + } else if (type === matcherTypes.IN_LARGE_SEGMENT) { + value = segmentTransform(userDefinedLargeSegmentMatcherData as IInLargeSegmentMatcherData); } else if (type === matcherTypes.EQUAL_TO) { value = numericTransform(unaryNumericMatcherData as IUnaryNumericMatcherData); dataType = matcherDataTypes.NUMBER; diff --git a/src/evaluator/matchersTransform/segment.ts b/src/evaluator/matchersTransform/segment.ts index 00674cf2..c53264dc 100644 --- a/src/evaluator/matchersTransform/segment.ts +++ b/src/evaluator/matchersTransform/segment.ts @@ -1,8 +1,10 @@ -import { IInSegmentMatcherData } from '../../dtos/types'; +import { IInSegmentMatcherData, IInLargeSegmentMatcherData } from '../../dtos/types'; /** * Extract segment name as a plain string. */ -export function segmentTransform(segment?: IInSegmentMatcherData) { - return segment ? segment.segmentName : undefined; +export function segmentTransform(segment?: IInSegmentMatcherData | IInLargeSegmentMatcherData) { + return segment ? + (segment as IInSegmentMatcherData).segmentName || (segment as IInLargeSegmentMatcherData).largeSegmentName : + undefined; } From 2ce5a647b722934273071fe75c0eadf786796eb7 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Mon, 16 Sep 2024 18:08:42 -0300 Subject: [PATCH 073/146] Rollback fetch-mock upgrade --- package-lock.json | 241 +++++++++++++++++++++++++++++++++++----------- package.json | 2 +- 2 files changed, 187 insertions(+), 56 deletions(-) diff --git a/package-lock.json b/package-lock.json index 1e665ab3..0e7168d3 100644 --- a/package-lock.json +++ b/package-lock.json @@ -22,7 +22,7 @@ "eslint": "^8.48.0", "eslint-plugin-compat": "^4.2.0", "eslint-plugin-import": "^2.25.3", - "fetch-mock": "^11.1.3", + "fetch-mock": "^9.11.0", "ioredis": "^4.28.0", "jest": "^27.2.3", "jest-localstorage-mock": "^2.4.3", @@ -492,6 +492,18 @@ "@babel/core": "^7.0.0-0" } }, + "node_modules/@babel/runtime": { + "version": "7.25.6", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.25.6.tgz", + "integrity": "sha512-VBj9MYyDb9tuLq7yzqjgzt6Q+IBQLrGZfdjOekyEirZPHxXWoTSGUTMrpsfi58Up73d13NfYLv8HT9vmznjzhQ==", + "dev": true, + "dependencies": { + "regenerator-runtime": "^0.14.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, "node_modules/@babel/template": { "version": "7.22.15", "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.22.15.tgz", @@ -1459,12 +1471,6 @@ "@babel/types": "^7.3.0" } }, - "node_modules/@types/glob-to-regexp": { - "version": "0.4.4", - "resolved": "https://registry.npmjs.org/@types/glob-to-regexp/-/glob-to-regexp-0.4.4.tgz", - "integrity": "sha512-nDKoaKJYbnn1MZxUY0cA1bPmmgZbg0cTq7Rh13d0KWYNOiKbqoR+2d89SnRPszGh7ROzSwZ/GOjZ4jPbmmZ6Eg==", - "dev": true - }, "node_modules/@types/google.analytics": { "version": "0.0.40", "resolved": "https://registry.npmjs.org/@types/google.analytics/-/google.analytics-0.0.40.tgz", @@ -2453,6 +2459,17 @@ "safe-buffer": "~5.1.1" } }, + "node_modules/core-js": { + "version": "3.38.1", + "resolved": "https://registry.npmjs.org/core-js/-/core-js-3.38.1.tgz", + "integrity": "sha512-OP35aUorbU3Zvlx7pjsFdu1rGNnD4pgw/CWoYzRY3t2EzoVT7shKHY1dlAy3f41cGIO7ZDPQimhGFTlEYkG/Hw==", + "dev": true, + "hasInstallScript": true, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/core-js" + } + }, "node_modules/cross-env": { "version": "7.0.2", "resolved": "https://registry.npmjs.org/cross-env/-/cross-env-7.0.2.tgz", @@ -2597,15 +2614,6 @@ "node": ">=0.10" } }, - "node_modules/dequal": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/dequal/-/dequal-2.0.3.tgz", - "integrity": "sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==", - "dev": true, - "engines": { - "node": ">=6" - } - }, "node_modules/detect-newline": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/detect-newline/-/detect-newline-3.1.0.tgz", @@ -3594,19 +3602,31 @@ } }, "node_modules/fetch-mock": { - "version": "11.1.3", - "resolved": "https://registry.npmjs.org/fetch-mock/-/fetch-mock-11.1.3.tgz", - "integrity": "sha512-ATh0dWgnVrUHiiXuvQm1Ry+ThWfSv1QQgqJTCtybrNxyUrFiSOaDKsNG29eyysp1SHeNP6Q+dH50+8VifN51Ig==", + "version": "9.11.0", + "resolved": "https://registry.npmjs.org/fetch-mock/-/fetch-mock-9.11.0.tgz", + "integrity": "sha512-PG1XUv+x7iag5p/iNHD4/jdpxL9FtVSqRMUQhPab4hVDt80T1MH5ehzVrL2IdXO9Q2iBggArFvPqjUbHFuI58Q==", "dev": true, "dependencies": { - "@types/glob-to-regexp": "^0.4.4", - "dequal": "^2.0.3", - "glob-to-regexp": "^0.4.1", + "@babel/core": "^7.0.0", + "@babel/runtime": "^7.0.0", + "core-js": "^3.0.0", + "debug": "^4.1.1", + "glob-to-regexp": "^0.4.0", "is-subset": "^0.1.1", - "regexparam": "^3.0.0" + "lodash.isequal": "^4.5.0", + "path-to-regexp": "^2.2.1", + "querystring": "^0.2.0", + "whatwg-url": "^6.5.0" }, "engines": { - "node": ">=8.0.0" + "node": ">=4.0.0" + }, + "funding": { + "type": "charity", + "url": "https://www.justgiving.com/refugee-support-europe" + }, + "peerDependencies": { + "node-fetch": "*" }, "peerDependenciesMeta": { "node-fetch": { @@ -3614,6 +3634,32 @@ } } }, + "node_modules/fetch-mock/node_modules/tr46": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-1.0.1.tgz", + "integrity": "sha512-dTpowEjclQ7Kgx5SdBkqRzVhERQXov8/l9Ft9dVM9fmg0W0KQSVaXX9T4i6twCPNtYiZM53lpSSUAwJbFPOHxA==", + "dev": true, + "dependencies": { + "punycode": "^2.1.0" + } + }, + "node_modules/fetch-mock/node_modules/webidl-conversions": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-4.0.2.tgz", + "integrity": "sha512-YQ+BmxuTgd6UXZW3+ICGfyqRyHXVlD5GtQr5+qjiNW7bF0cqrzX500HVXPBOvgXb5YnzDd+h0zqyv61KUD7+Sg==", + "dev": true + }, + "node_modules/fetch-mock/node_modules/whatwg-url": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-6.5.0.tgz", + "integrity": "sha512-rhRZRqx/TLJQWUpQ6bmrt2UV4f0HCQ463yQuONJqC6fO2VoEb1pTYddbe59SkYq87aoM5A3bdhMZiUiVws+fzQ==", + "dev": true, + "dependencies": { + "lodash.sortby": "^4.7.0", + "tr46": "^1.0.1", + "webidl-conversions": "^4.0.2" + } + }, "node_modules/file-entry-cache": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz", @@ -6334,6 +6380,12 @@ "integrity": "sha512-chi4NHZlZqZD18a0imDHnZPrDeBbTtVN7GXMwuGdRH9qotxAjYs3aVLKc7zNOG9eddR5Ksd8rvFEBc9SsggPpg==", "dev": true }, + "node_modules/lodash.isequal": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/lodash.isequal/-/lodash.isequal-4.5.0.tgz", + "integrity": "sha512-pDo3lu8Jhfjqls6GkMgpahsF9kCyayhgykjyLMNFTKWrpVdAQtYyB4muAMWozBB4ig/dtWAmsMxLEI8wuz+DYQ==", + "dev": true + }, "node_modules/lodash.memoize": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/lodash.memoize/-/lodash.memoize-4.1.2.tgz", @@ -6346,6 +6398,12 @@ "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", "dev": true }, + "node_modules/lodash.sortby": { + "version": "4.7.0", + "resolved": "https://registry.npmjs.org/lodash.sortby/-/lodash.sortby-4.7.0.tgz", + "integrity": "sha512-HDWXG8isMntAyRF5vZ7xKuEvOhT4AhlRt/3czTSjvGUxjYCBVRQY48ViDHyfYz9VIoBkW4TMGQNapx+l3RUwdA==", + "dev": true + }, "node_modules/lru-cache": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", @@ -6765,6 +6823,12 @@ "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", "dev": true }, + "node_modules/path-to-regexp": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-2.4.0.tgz", + "integrity": "sha512-G6zHoVqC6GGTQkZwF4lkuEyMbVOjoBKAEybQUypI1WTkqinCOrq2x6U2+phkJ1XsEMTy4LjtwPI7HW+NVrRR2w==", + "dev": true + }, "node_modules/path-type": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", @@ -6885,6 +6949,16 @@ "node": ">=6" } }, + "node_modules/querystring": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/querystring/-/querystring-0.2.1.tgz", + "integrity": "sha512-wkvS7mL/JMugcup3/rMitHmd9ecIGd2lhFhK9N3UUQ450h66d1r3Y9nvXzQAW1Lq+wyx61k/1pfKS5KuKiyEbg==", + "deprecated": "The querystring API is considered Legacy. new code should use the URLSearchParams API instead.", + "dev": true, + "engines": { + "node": ">=0.4.x" + } + }, "node_modules/querystringify": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/querystringify/-/querystringify-2.2.0.tgz", @@ -6956,14 +7030,11 @@ "node": ">=4.0.0" } }, - "node_modules/regexparam": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/regexparam/-/regexparam-3.0.0.tgz", - "integrity": "sha512-RSYAtP31mvYLkAHrOlh25pCNQ5hWnT106VukGaaFfuJrZFkGRX5GhUAdPqpSDXxOhA2c4akmRuplv1mRqnBn6Q==", - "dev": true, - "engines": { - "node": ">=8" - } + "node_modules/regenerator-runtime": { + "version": "0.14.1", + "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.14.1.tgz", + "integrity": "sha512-dYnhHh0nJoMfnkZs6GmmhFknAGRrLznOu5nc9ML+EJxGvrx6H7teuevqVqCuPcPK//3eDrrjQhehXVx9cnkGdw==", + "dev": true }, "node_modules/require-directory": { "version": "2.1.1", @@ -8311,6 +8382,15 @@ "@babel/helper-plugin-utils": "^7.22.5" } }, + "@babel/runtime": { + "version": "7.25.6", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.25.6.tgz", + "integrity": "sha512-VBj9MYyDb9tuLq7yzqjgzt6Q+IBQLrGZfdjOekyEirZPHxXWoTSGUTMrpsfi58Up73d13NfYLv8HT9vmznjzhQ==", + "dev": true, + "requires": { + "regenerator-runtime": "^0.14.0" + } + }, "@babel/template": { "version": "7.22.15", "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.22.15.tgz", @@ -9059,12 +9139,6 @@ "@babel/types": "^7.3.0" } }, - "@types/glob-to-regexp": { - "version": "0.4.4", - "resolved": "https://registry.npmjs.org/@types/glob-to-regexp/-/glob-to-regexp-0.4.4.tgz", - "integrity": "sha512-nDKoaKJYbnn1MZxUY0cA1bPmmgZbg0cTq7Rh13d0KWYNOiKbqoR+2d89SnRPszGh7ROzSwZ/GOjZ4jPbmmZ6Eg==", - "dev": true - }, "@types/google.analytics": { "version": "0.0.40", "resolved": "https://registry.npmjs.org/@types/google.analytics/-/google.analytics-0.0.40.tgz", @@ -9784,6 +9858,12 @@ "safe-buffer": "~5.1.1" } }, + "core-js": { + "version": "3.38.1", + "resolved": "https://registry.npmjs.org/core-js/-/core-js-3.38.1.tgz", + "integrity": "sha512-OP35aUorbU3Zvlx7pjsFdu1rGNnD4pgw/CWoYzRY3t2EzoVT7shKHY1dlAy3f41cGIO7ZDPQimhGFTlEYkG/Hw==", + "dev": true + }, "cross-env": { "version": "7.0.2", "resolved": "https://registry.npmjs.org/cross-env/-/cross-env-7.0.2.tgz", @@ -9892,12 +9972,6 @@ "integrity": "sha512-XwE+iZ4D6ZUB7mfYRMb5wByE8L74HCn30FBN7sWnXksWc1LO1bPDl67pBR9o/kC4z/xSNAwkMYcGgqDV3BE3Hw==", "dev": true }, - "dequal": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/dequal/-/dequal-2.0.3.tgz", - "integrity": "sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==", - "dev": true - }, "detect-newline": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/detect-newline/-/detect-newline-3.1.0.tgz", @@ -10633,16 +10707,49 @@ } }, "fetch-mock": { - "version": "11.1.3", - "resolved": "https://registry.npmjs.org/fetch-mock/-/fetch-mock-11.1.3.tgz", - "integrity": "sha512-ATh0dWgnVrUHiiXuvQm1Ry+ThWfSv1QQgqJTCtybrNxyUrFiSOaDKsNG29eyysp1SHeNP6Q+dH50+8VifN51Ig==", + "version": "9.11.0", + "resolved": "https://registry.npmjs.org/fetch-mock/-/fetch-mock-9.11.0.tgz", + "integrity": "sha512-PG1XUv+x7iag5p/iNHD4/jdpxL9FtVSqRMUQhPab4hVDt80T1MH5ehzVrL2IdXO9Q2iBggArFvPqjUbHFuI58Q==", "dev": true, "requires": { - "@types/glob-to-regexp": "^0.4.4", - "dequal": "^2.0.3", - "glob-to-regexp": "^0.4.1", + "@babel/core": "^7.0.0", + "@babel/runtime": "^7.0.0", + "core-js": "^3.0.0", + "debug": "^4.1.1", + "glob-to-regexp": "^0.4.0", "is-subset": "^0.1.1", - "regexparam": "^3.0.0" + "lodash.isequal": "^4.5.0", + "path-to-regexp": "^2.2.1", + "querystring": "^0.2.0", + "whatwg-url": "^6.5.0" + }, + "dependencies": { + "tr46": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-1.0.1.tgz", + "integrity": "sha512-dTpowEjclQ7Kgx5SdBkqRzVhERQXov8/l9Ft9dVM9fmg0W0KQSVaXX9T4i6twCPNtYiZM53lpSSUAwJbFPOHxA==", + "dev": true, + "requires": { + "punycode": "^2.1.0" + } + }, + "webidl-conversions": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-4.0.2.tgz", + "integrity": "sha512-YQ+BmxuTgd6UXZW3+ICGfyqRyHXVlD5GtQr5+qjiNW7bF0cqrzX500HVXPBOvgXb5YnzDd+h0zqyv61KUD7+Sg==", + "dev": true + }, + "whatwg-url": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-6.5.0.tgz", + "integrity": "sha512-rhRZRqx/TLJQWUpQ6bmrt2UV4f0HCQ463yQuONJqC6fO2VoEb1pTYddbe59SkYq87aoM5A3bdhMZiUiVws+fzQ==", + "dev": true, + "requires": { + "lodash.sortby": "^4.7.0", + "tr46": "^1.0.1", + "webidl-conversions": "^4.0.2" + } + } } }, "file-entry-cache": { @@ -12655,6 +12762,12 @@ "integrity": "sha512-chi4NHZlZqZD18a0imDHnZPrDeBbTtVN7GXMwuGdRH9qotxAjYs3aVLKc7zNOG9eddR5Ksd8rvFEBc9SsggPpg==", "dev": true }, + "lodash.isequal": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/lodash.isequal/-/lodash.isequal-4.5.0.tgz", + "integrity": "sha512-pDo3lu8Jhfjqls6GkMgpahsF9kCyayhgykjyLMNFTKWrpVdAQtYyB4muAMWozBB4ig/dtWAmsMxLEI8wuz+DYQ==", + "dev": true + }, "lodash.memoize": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/lodash.memoize/-/lodash.memoize-4.1.2.tgz", @@ -12667,6 +12780,12 @@ "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", "dev": true }, + "lodash.sortby": { + "version": "4.7.0", + "resolved": "https://registry.npmjs.org/lodash.sortby/-/lodash.sortby-4.7.0.tgz", + "integrity": "sha512-HDWXG8isMntAyRF5vZ7xKuEvOhT4AhlRt/3czTSjvGUxjYCBVRQY48ViDHyfYz9VIoBkW4TMGQNapx+l3RUwdA==", + "dev": true + }, "lru-cache": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", @@ -12984,6 +13103,12 @@ "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", "dev": true }, + "path-to-regexp": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-2.4.0.tgz", + "integrity": "sha512-G6zHoVqC6GGTQkZwF4lkuEyMbVOjoBKAEybQUypI1WTkqinCOrq2x6U2+phkJ1XsEMTy4LjtwPI7HW+NVrRR2w==", + "dev": true + }, "path-type": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", @@ -13070,6 +13195,12 @@ "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==", "dev": true }, + "querystring": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/querystring/-/querystring-0.2.1.tgz", + "integrity": "sha512-wkvS7mL/JMugcup3/rMitHmd9ecIGd2lhFhK9N3UUQ450h66d1r3Y9nvXzQAW1Lq+wyx61k/1pfKS5KuKiyEbg==", + "dev": true + }, "querystringify": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/querystringify/-/querystringify-2.2.0.tgz", @@ -13118,10 +13249,10 @@ "promise-queue": "^2.2.5" } }, - "regexparam": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/regexparam/-/regexparam-3.0.0.tgz", - "integrity": "sha512-RSYAtP31mvYLkAHrOlh25pCNQ5hWnT106VukGaaFfuJrZFkGRX5GhUAdPqpSDXxOhA2c4akmRuplv1mRqnBn6Q==", + "regenerator-runtime": { + "version": "0.14.1", + "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.14.1.tgz", + "integrity": "sha512-dYnhHh0nJoMfnkZs6GmmhFknAGRrLznOu5nc9ML+EJxGvrx6H7teuevqVqCuPcPK//3eDrrjQhehXVx9cnkGdw==", "dev": true }, "require-directory": { diff --git a/package.json b/package.json index bfa30107..ec638f10 100644 --- a/package.json +++ b/package.json @@ -66,7 +66,7 @@ "eslint": "^8.48.0", "eslint-plugin-compat": "^4.2.0", "eslint-plugin-import": "^2.25.3", - "fetch-mock": "^11.1.3", + "fetch-mock": "^9.11.0", "ioredis": "^4.28.0", "jest": "^27.2.3", "jest-localstorage-mock": "^2.4.3", From 76f785d0e792f2d199e90eec3527f91274c02769 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Tue, 17 Sep 2024 12:04:55 -0300 Subject: [PATCH 074/146] Add clients map to factory context --- .../__tests__/sdkClientMethodCS.spec.ts | 4 +++- src/sdkClient/identity.ts | 5 +++++ src/sdkClient/sdkClientMethod.ts | 2 ++ src/sdkClient/sdkClientMethodCS.ts | 17 ++++++----------- src/sdkClient/sdkClientMethodCSWithTT.ts | 17 ++++++----------- src/sdkFactory/index.ts | 6 +++--- src/sdkFactory/types.ts | 3 ++- src/types.ts | 2 +- 8 files changed, 28 insertions(+), 28 deletions(-) create mode 100644 src/sdkClient/identity.ts diff --git a/src/sdkClient/__tests__/sdkClientMethodCS.spec.ts b/src/sdkClient/__tests__/sdkClientMethodCS.spec.ts index dbe57ec9..1abb2a93 100644 --- a/src/sdkClient/__tests__/sdkClientMethodCS.spec.ts +++ b/src/sdkClient/__tests__/sdkClientMethodCS.spec.ts @@ -45,7 +45,8 @@ const params = { syncManager: syncManagerMock, signalListener: { stop: jest.fn() }, settings: settingsWithKey, - telemetryTracker: telemetryTrackerFactory() + telemetryTracker: telemetryTrackerFactory(), + clients: {} }; const invalidAttributes = [ @@ -71,6 +72,7 @@ describe('sdkClientMethodCSFactory', () => { partialStorages.length = 0; partialSdkReadinessManagers.length = 0; partialSyncManagers.length = 0; + params.clients = {}; }); // list of factory functions and their types (whether it ignores TT or not) diff --git a/src/sdkClient/identity.ts b/src/sdkClient/identity.ts new file mode 100644 index 00000000..08822263 --- /dev/null +++ b/src/sdkClient/identity.ts @@ -0,0 +1,5 @@ +import { SplitIO } from '../types'; + +export function buildInstanceId(key: SplitIO.SplitKey, trafficType?: string) { // @ts-ignore + return `${key.matchingKey ? key.matchingKey : key}-${key.bucketingKey ? key.bucketingKey : key}-${trafficType ? trafficType : ''}`; +} diff --git a/src/sdkClient/sdkClientMethod.ts b/src/sdkClient/sdkClientMethod.ts index 9cd117ea..3fe6f4fa 100644 --- a/src/sdkClient/sdkClientMethod.ts +++ b/src/sdkClient/sdkClientMethod.ts @@ -10,6 +10,8 @@ export function sdkClientMethodFactory(params: ISdkFactoryContext): () => SplitI const log = params.settings.log; const clientInstance = sdkClientFactory(params); + params.clients[''] = clientInstance; + return function client() { if (arguments.length > 0) { throw new Error('Shared Client not supported by the storage mechanism. Create isolated instances instead.'); diff --git a/src/sdkClient/sdkClientMethodCS.ts b/src/sdkClient/sdkClientMethodCS.ts index 4fd0d626..01b54e05 100644 --- a/src/sdkClient/sdkClientMethodCS.ts +++ b/src/sdkClient/sdkClientMethodCS.ts @@ -8,18 +8,14 @@ import { objectAssign } from '../utils/lang/objectAssign'; import { RETRIEVE_CLIENT_DEFAULT, NEW_SHARED_CLIENT, RETRIEVE_CLIENT_EXISTING, LOG_PREFIX_CLIENT_INSTANTIATION } from '../logger/constants'; import { SDK_SEGMENTS_ARRIVED } from '../readiness/constants'; import { ISdkFactoryContext } from '../sdkFactory/types'; - -function buildInstanceId(key: SplitIO.SplitKey) { - // @ts-ignore - return `${key.matchingKey ? key.matchingKey : key}-${key.bucketingKey ? key.bucketingKey : key}-`; -} +import { buildInstanceId } from './identity'; /** * Factory of client method for the client-side API variant where TT is ignored. * Therefore, clients don't have a bound TT for the track method. */ export function sdkClientMethodCSFactory(params: ISdkFactoryContext): (key?: SplitIO.SplitKey) => SplitIO.ICsClient { - const { storage, syncManager, sdkReadinessManager, settings: { core: { key }, startup: { readyTimeout }, log } } = params; + const { clients, storage, syncManager, sdkReadinessManager, settings: { core: { key }, startup: { readyTimeout }, log } } = params; const mainClientInstance = clientCSDecorator( log, @@ -31,8 +27,7 @@ export function sdkClientMethodCSFactory(params: ISdkFactoryContext): (key?: Spl const defaultInstanceId = buildInstanceId(parsedDefaultKey); // Cache instances created per factory. - const clientInstances: Record = {}; - clientInstances[defaultInstanceId] = mainClientInstance; + clients[defaultInstanceId] = mainClientInstance; return function client(key?: SplitIO.SplitKey) { if (key === undefined) { @@ -48,7 +43,7 @@ export function sdkClientMethodCSFactory(params: ISdkFactoryContext): (key?: Spl const instanceId = buildInstanceId(validKey); - if (!clientInstances[instanceId]) { + if (!clients[instanceId]) { const matchingKey = getMatching(validKey); const sharedSdkReadiness = sdkReadinessManager.shared(readyTimeout); @@ -70,7 +65,7 @@ export function sdkClientMethodCSFactory(params: ISdkFactoryContext): (key?: Spl // As shared clients reuse all the storage information, we don't need to check here if we // will use offline or online mode. We should stick with the original decision. - clientInstances[instanceId] = clientCSDecorator( + clients[instanceId] = clientCSDecorator( log, sdkClientFactory(objectAssign({}, params, { sdkReadinessManager: sharedSdkReadiness, @@ -88,6 +83,6 @@ export function sdkClientMethodCSFactory(params: ISdkFactoryContext): (key?: Spl log.debug(RETRIEVE_CLIENT_EXISTING); } - return clientInstances[instanceId]; + return clients[instanceId]; }; } diff --git a/src/sdkClient/sdkClientMethodCSWithTT.ts b/src/sdkClient/sdkClientMethodCSWithTT.ts index fb4aa77e..06ced5f0 100644 --- a/src/sdkClient/sdkClientMethodCSWithTT.ts +++ b/src/sdkClient/sdkClientMethodCSWithTT.ts @@ -9,11 +9,7 @@ import { objectAssign } from '../utils/lang/objectAssign'; import { RETRIEVE_CLIENT_DEFAULT, NEW_SHARED_CLIENT, RETRIEVE_CLIENT_EXISTING, LOG_PREFIX_CLIENT_INSTANTIATION } from '../logger/constants'; import { SDK_SEGMENTS_ARRIVED } from '../readiness/constants'; import { ISdkFactoryContext } from '../sdkFactory/types'; - -function buildInstanceId(key: SplitIO.SplitKey, trafficType?: string) { - // @ts-ignore - return `${key.matchingKey ? key.matchingKey : key}-${key.bucketingKey ? key.bucketingKey : key}-${trafficType !== undefined ? trafficType : ''}`; -} +import { buildInstanceId } from './identity'; /** * Factory of client method for the client-side (browser) variant of the Isomorphic JS SDK, @@ -21,7 +17,7 @@ function buildInstanceId(key: SplitIO.SplitKey, trafficType?: string) { * (default client) or the client method (shared clients). */ export function sdkClientMethodCSFactory(params: ISdkFactoryContext): (key?: SplitIO.SplitKey, trafficType?: string) => SplitIO.ICsClient { - const { storage, syncManager, sdkReadinessManager, settings: { core: { key, trafficType }, startup: { readyTimeout }, log } } = params; + const { clients, storage, syncManager, sdkReadinessManager, settings: { core: { key, trafficType }, startup: { readyTimeout }, log } } = params; const mainClientInstance = clientCSDecorator( log, @@ -34,8 +30,7 @@ export function sdkClientMethodCSFactory(params: ISdkFactoryContext): (key?: Spl const defaultInstanceId = buildInstanceId(parsedDefaultKey, trafficType); // Cache instances created per factory. - const clientInstances: Record = {}; - clientInstances[defaultInstanceId] = mainClientInstance; + clients[defaultInstanceId] = mainClientInstance; return function client(key?: SplitIO.SplitKey, trafficType?: string) { if (key === undefined) { @@ -58,7 +53,7 @@ export function sdkClientMethodCSFactory(params: ISdkFactoryContext): (key?: Spl } const instanceId = buildInstanceId(validKey, validTrafficType); - if (!clientInstances[instanceId]) { + if (!clients[instanceId]) { const matchingKey = getMatching(validKey); const sharedSdkReadiness = sdkReadinessManager.shared(readyTimeout); @@ -80,7 +75,7 @@ export function sdkClientMethodCSFactory(params: ISdkFactoryContext): (key?: Spl // As shared clients reuse all the storage information, we don't need to check here if we // will use offline or online mode. We should stick with the original decision. - clientInstances[instanceId] = clientCSDecorator( + clients[instanceId] = clientCSDecorator( log, sdkClientFactory(objectAssign({}, params, { sdkReadinessManager: sharedSdkReadiness, @@ -99,6 +94,6 @@ export function sdkClientMethodCSFactory(params: ISdkFactoryContext): (key?: Spl log.debug(RETRIEVE_CLIENT_EXISTING); } - return clientInstances[instanceId]; + return clients[instanceId]; }; } diff --git a/src/sdkFactory/index.ts b/src/sdkFactory/index.ts index cd15e9ef..038001fe 100644 --- a/src/sdkFactory/index.ts +++ b/src/sdkFactory/index.ts @@ -3,7 +3,7 @@ import { sdkReadinessManagerFactory } from '../readiness/sdkReadinessManager'; import { impressionsTrackerFactory } from '../trackers/impressionsTracker'; import { eventTrackerFactory } from '../trackers/eventTracker'; import { telemetryTrackerFactory } from '../trackers/telemetryTracker'; -import { SplitIO } from '../types'; +import { IBasicClient, SplitIO } from '../types'; import { validateAndTrackApiKey } from '../utils/inputValidation/apiKey'; import { createLoggerAPI } from '../logger/sdkLogger'; import { NEW_FACTORY, RETRIEVE_MANAGER } from '../logger/constants'; @@ -48,7 +48,7 @@ export function sdkFactory(params: ISdkFactoryParams): SplitIO.ICsSDK | SplitIO. }, }); // @TODO add support for dataloader: `if (params.dataLoader) params.dataLoader(storage);` - + const clients: Record = {}; const telemetryTracker = telemetryTrackerFactory(storage.telemetry, platform.now); const integrationsManager = integrationsManagerFactory && integrationsManagerFactory({ settings, storage, telemetryTracker }); @@ -73,7 +73,7 @@ export function sdkFactory(params: ISdkFactoryParams): SplitIO.ICsSDK | SplitIO. // splitApi is used by SyncManager and Browser signal listener const splitApi = splitApiFactory && splitApiFactory(settings, platform, telemetryTracker); - const ctx: ISdkFactoryContext = { splitApi, eventTracker, impressionsTracker, telemetryTracker, uniqueKeysTracker, sdkReadinessManager, readiness, settings, storage, platform }; + const ctx: ISdkFactoryContext = { clients, splitApi, eventTracker, impressionsTracker, telemetryTracker, uniqueKeysTracker, sdkReadinessManager, readiness, settings, storage, platform }; const syncManager = syncManagerFactory && syncManagerFactory(ctx as ISdkFactoryContextSync); ctx.syncManager = syncManager; diff --git a/src/sdkFactory/types.ts b/src/sdkFactory/types.ts index c785db33..b0a3b3f2 100644 --- a/src/sdkFactory/types.ts +++ b/src/sdkFactory/types.ts @@ -8,7 +8,7 @@ import { IStorageAsync, IStorageSync, IStorageFactoryParams } from '../storages/ import { ISyncManager } from '../sync/types'; import { IImpressionObserver } from '../trackers/impressionObserver/types'; import { IImpressionsTracker, IEventTracker, ITelemetryTracker, IFilterAdapter, IUniqueKeysTracker } from '../trackers/types'; -import { SplitIO, ISettings, IEventEmitter } from '../types'; +import { SplitIO, ISettings, IEventEmitter, IBasicClient } from '../types'; /** * Environment related dependencies. @@ -49,6 +49,7 @@ export interface ISdkFactoryContext { signalListener?: ISignalListener splitApi?: ISplitApi syncManager?: ISyncManager, + clients: Record, } export interface ISdkFactoryContextSync extends ISdkFactoryContext { diff --git a/src/types.ts b/src/types.ts index ac061f28..68218b27 100644 --- a/src/types.ts +++ b/src/types.ts @@ -426,7 +426,7 @@ export interface IStatusInterface extends IEventEmitter { * @interface IBasicClient * @extends IStatusInterface */ -interface IBasicClient extends IStatusInterface { +export interface IBasicClient extends IStatusInterface { /** * Flush data * @function flush From 2c8335ed9682bc863a1b0d2d6b308ea57a271327 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Tue, 17 Sep 2024 14:32:18 -0300 Subject: [PATCH 075/146] Add factory.destroy method --- src/sdkClient/__tests__/sdkClientMethod.spec.ts | 6 ++++-- src/sdkClient/sdkClientMethodCS.ts | 2 +- src/sdkClient/sdkClientMethodCSWithTT.ts | 2 +- src/sdkFactory/index.ts | 4 ++++ 4 files changed, 10 insertions(+), 4 deletions(-) diff --git a/src/sdkClient/__tests__/sdkClientMethod.spec.ts b/src/sdkClient/__tests__/sdkClientMethod.spec.ts index 27be5258..2ae7dff3 100644 --- a/src/sdkClient/__tests__/sdkClientMethod.spec.ts +++ b/src/sdkClient/__tests__/sdkClientMethod.spec.ts @@ -14,7 +14,8 @@ const paramMocks = [ sdkReadinessManager: { sdkStatus: jest.fn(), readinessManager: { destroy: jest.fn() } }, signalListener: undefined, settings: { mode: CONSUMER_MODE, log: loggerMock, core: { authorizationKey: 'sdk key '} }, - telemetryTracker: telemetryTrackerFactory() + telemetryTracker: telemetryTrackerFactory(), + clients: {} }, // SyncManager (i.e., Sync SDK) and Signal listener { @@ -23,7 +24,8 @@ const paramMocks = [ sdkReadinessManager: { sdkStatus: jest.fn(), readinessManager: { destroy: jest.fn() } }, signalListener: { stop: jest.fn() }, settings: { mode: STANDALONE_MODE, log: loggerMock, core: { authorizationKey: 'sdk key '} }, - telemetryTracker: telemetryTrackerFactory() + telemetryTracker: telemetryTrackerFactory(), + clients: {} } ]; diff --git a/src/sdkClient/sdkClientMethodCS.ts b/src/sdkClient/sdkClientMethodCS.ts index 01b54e05..a69b3b27 100644 --- a/src/sdkClient/sdkClientMethodCS.ts +++ b/src/sdkClient/sdkClientMethodCS.ts @@ -83,6 +83,6 @@ export function sdkClientMethodCSFactory(params: ISdkFactoryContext): (key?: Spl log.debug(RETRIEVE_CLIENT_EXISTING); } - return clients[instanceId]; + return clients[instanceId] as SplitIO.ICsClient; }; } diff --git a/src/sdkClient/sdkClientMethodCSWithTT.ts b/src/sdkClient/sdkClientMethodCSWithTT.ts index 06ced5f0..f564b258 100644 --- a/src/sdkClient/sdkClientMethodCSWithTT.ts +++ b/src/sdkClient/sdkClientMethodCSWithTT.ts @@ -94,6 +94,6 @@ export function sdkClientMethodCSFactory(params: ISdkFactoryContext): (key?: Spl log.debug(RETRIEVE_CLIENT_EXISTING); } - return clients[instanceId]; + return clients[instanceId] as SplitIO.ICsClient; }; } diff --git a/src/sdkFactory/index.ts b/src/sdkFactory/index.ts index 038001fe..cf9518a7 100644 --- a/src/sdkFactory/index.ts +++ b/src/sdkFactory/index.ts @@ -105,5 +105,9 @@ export function sdkFactory(params: ISdkFactoryParams): SplitIO.ICsSDK | SplitIO. Logger: createLoggerAPI(log), settings, + + destroy() { + return Promise.all(Object.keys(clients).map(key => clients[key].destroy())).then(() => {}); + } }, extraProps && extraProps(ctx)); } From bcdff097b2cf506c879f01e4b04b5c7ceee8e849 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Tue, 17 Sep 2024 14:42:39 -0300 Subject: [PATCH 076/146] Add types and changelog entry --- CHANGES.txt | 3 +++ src/types.ts | 6 ++++++ 2 files changed, 9 insertions(+) diff --git a/CHANGES.txt b/CHANGES.txt index 5fff047b..61a0bfd9 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -1,3 +1,6 @@ +1.18.0 (September XX, 2024) + - Added `factory.destroy()` method, as a shortcut to destroy all SDK clients created by the factory. + 1.17.0 (September 6, 2024) - Added `sync.requestOptions.getHeaderOverrides` configuration option to enhance SDK HTTP request Headers for Authorization Frameworks. - Added `isTimedout` and `lastUpdate` properties to IStatusInterface to keep track of the timestamp of the last SDK event, used on React and Redux SDKs. diff --git a/src/types.ts b/src/types.ts index 68218b27..2a65b297 100644 --- a/src/types.ts +++ b/src/types.ts @@ -459,6 +459,12 @@ interface IBasicSDK { * @property Logger */ Logger: ILoggerAPI + /** + * Destroy all the clients created by this factory. + * @function destroy + * @returns {Promise} + */ + destroy(): Promise } /****** Exposed namespace ******/ /** From 670d213dfcdeae202927694f7a039f585be08d52 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Wed, 18 Sep 2024 13:50:08 -0300 Subject: [PATCH 077/146] Update detroy method to stop uniqueKeysTracker jobs in the main client only --- src/sdkClient/sdkClient.ts | 10 +++++----- src/sdkClient/sdkClientMethodCS.ts | 1 - src/sdkClient/sdkClientMethodCSWithTT.ts | 1 - 3 files changed, 5 insertions(+), 7 deletions(-) diff --git a/src/sdkClient/sdkClient.ts b/src/sdkClient/sdkClient.ts index b5360a28..fdfa135b 100644 --- a/src/sdkClient/sdkClient.ts +++ b/src/sdkClient/sdkClient.ts @@ -66,11 +66,11 @@ export function sdkClientFactory(params: ISdkFactoryContext, isSharedClient?: bo syncManager && syncManager.stop(); return __flush().then(() => { - // Cleanup event listeners - signalListener && signalListener.stop(); - - // @TODO stop only if last client is destroyed - if (uniqueKeysTracker) uniqueKeysTracker.stop(); + // For main client, cleanup event listeners and scheduled jobs + if (!isSharedClient) { + signalListener && signalListener.stop(); + uniqueKeysTracker && uniqueKeysTracker.stop(); + } // Cleanup storage return storage.destroy(); diff --git a/src/sdkClient/sdkClientMethodCS.ts b/src/sdkClient/sdkClientMethodCS.ts index a69b3b27..35e93c85 100644 --- a/src/sdkClient/sdkClientMethodCS.ts +++ b/src/sdkClient/sdkClientMethodCS.ts @@ -71,7 +71,6 @@ export function sdkClientMethodCSFactory(params: ISdkFactoryContext): (key?: Spl sdkReadinessManager: sharedSdkReadiness, storage: sharedStorage || storage, syncManager: sharedSyncManager, - signalListener: undefined, // only the main client "destroy" method stops the signal listener }), true) as SplitIO.IClient, validKey ); diff --git a/src/sdkClient/sdkClientMethodCSWithTT.ts b/src/sdkClient/sdkClientMethodCSWithTT.ts index f564b258..01ccf07b 100644 --- a/src/sdkClient/sdkClientMethodCSWithTT.ts +++ b/src/sdkClient/sdkClientMethodCSWithTT.ts @@ -81,7 +81,6 @@ export function sdkClientMethodCSFactory(params: ISdkFactoryContext): (key?: Spl sdkReadinessManager: sharedSdkReadiness, storage: sharedStorage || storage, syncManager: sharedSyncManager, - signalListener: undefined, // only the main client "destroy" method stops the signal listener }), true) as SplitIO.IClient, validKey, validTrafficType From f17519b886a49d1f6d1e398670ed76f2cecb0ab1 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Wed, 18 Sep 2024 14:09:17 -0300 Subject: [PATCH 078/146] Update detroy method to stop uniqueKeysTracker jobs in the main client only --- src/sdkClient/sdkClient.ts | 10 +++++----- src/sdkClient/sdkClientMethodCS.ts | 1 - src/sdkClient/sdkClientMethodCSWithTT.ts | 1 - 3 files changed, 5 insertions(+), 7 deletions(-) diff --git a/src/sdkClient/sdkClient.ts b/src/sdkClient/sdkClient.ts index b5360a28..fdfa135b 100644 --- a/src/sdkClient/sdkClient.ts +++ b/src/sdkClient/sdkClient.ts @@ -66,11 +66,11 @@ export function sdkClientFactory(params: ISdkFactoryContext, isSharedClient?: bo syncManager && syncManager.stop(); return __flush().then(() => { - // Cleanup event listeners - signalListener && signalListener.stop(); - - // @TODO stop only if last client is destroyed - if (uniqueKeysTracker) uniqueKeysTracker.stop(); + // For main client, cleanup event listeners and scheduled jobs + if (!isSharedClient) { + signalListener && signalListener.stop(); + uniqueKeysTracker && uniqueKeysTracker.stop(); + } // Cleanup storage return storage.destroy(); diff --git a/src/sdkClient/sdkClientMethodCS.ts b/src/sdkClient/sdkClientMethodCS.ts index 4fd0d626..8ad43a58 100644 --- a/src/sdkClient/sdkClientMethodCS.ts +++ b/src/sdkClient/sdkClientMethodCS.ts @@ -76,7 +76,6 @@ export function sdkClientMethodCSFactory(params: ISdkFactoryContext): (key?: Spl sdkReadinessManager: sharedSdkReadiness, storage: sharedStorage || storage, syncManager: sharedSyncManager, - signalListener: undefined, // only the main client "destroy" method stops the signal listener }), true) as SplitIO.IClient, validKey ); diff --git a/src/sdkClient/sdkClientMethodCSWithTT.ts b/src/sdkClient/sdkClientMethodCSWithTT.ts index fb4aa77e..18cca418 100644 --- a/src/sdkClient/sdkClientMethodCSWithTT.ts +++ b/src/sdkClient/sdkClientMethodCSWithTT.ts @@ -86,7 +86,6 @@ export function sdkClientMethodCSFactory(params: ISdkFactoryContext): (key?: Spl sdkReadinessManager: sharedSdkReadiness, storage: sharedStorage || storage, syncManager: sharedSyncManager, - signalListener: undefined, // only the main client "destroy" method stops the signal listener }), true) as SplitIO.IClient, validKey, validTrafficType From 6797cfb26f677ec9c669f5ed5449fbe0dbb2ad4f Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Wed, 18 Sep 2024 15:17:23 -0300 Subject: [PATCH 079/146] Update changelog entry --- CHANGES.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGES.txt b/CHANGES.txt index ba4fc466..6396126b 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -1,6 +1,7 @@ 2.0.0 (September XX, 2024) - Added support for targeting rules based on large segments. - BREAKING CHANGES: + - Updated default flag spec version to 1.2. - Removed `/mySegments` endpoint from SplitAPI module, as it is replaced by `/memberships` endpoint. - Removed support for MY_SEGMENTS_UPDATE and MY_SEGMENTS_UPDATE_V2 notification types, as they are replaced by MEMBERSHIPS_MS_UPDATE and MEMBERSHIPS_LS_UPDATE notification types. From 4de7204bcfe6d6f3e05f5b9ceb73d10fc2acc25a Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Thu, 19 Sep 2024 15:38:17 -0300 Subject: [PATCH 080/146] Segments cache: replace addToSegment, removeFromSegment and setChangeNumber with update method for simplicity --- src/storages/AbstractSegmentsCacheSync.ts | 1 + .../inMemory/SegmentsCacheInMemory.ts | 12 ++++++ src/storages/inRedis/SegmentsCacheInRedis.ts | 17 ++++++++ .../pluggable/SegmentsCachePluggable.ts | 17 ++++++++ src/storages/types.ts | 3 ++ .../polling/updaters/segmentChangesUpdater.ts | 42 ++++++------------- 6 files changed, 63 insertions(+), 29 deletions(-) diff --git a/src/storages/AbstractSegmentsCacheSync.ts b/src/storages/AbstractSegmentsCacheSync.ts index 7e398203..ce609fa1 100644 --- a/src/storages/AbstractSegmentsCacheSync.ts +++ b/src/storages/AbstractSegmentsCacheSync.ts @@ -39,6 +39,7 @@ export abstract class AbstractSegmentsCacheSync implements ISegmentsCacheSync { * For client-side synchronizer: the method is not used. */ registerSegments(names: string[]): boolean { return false; } + update(name: string, addedKeys: string[], removedKeys: string[], changeNumber: number) { return false; } /** * For server-side synchronizer: get the list of segments to fetch changes. diff --git a/src/storages/inMemory/SegmentsCacheInMemory.ts b/src/storages/inMemory/SegmentsCacheInMemory.ts index a7d52b7c..91635354 100644 --- a/src/storages/inMemory/SegmentsCacheInMemory.ts +++ b/src/storages/inMemory/SegmentsCacheInMemory.ts @@ -11,6 +11,18 @@ export class SegmentsCacheInMemory extends AbstractSegmentsCacheSync { private segmentCache: Record> = {}; private segmentChangeNumber: Record = {}; + update(name: string, addedKeys: string[], removedKeys: string[], changeNumber: number) { + const keySet = this.segmentCache[name] || new _Set(); + + addedKeys.forEach(k => keySet.add(k)); + removedKeys.forEach(k => keySet.delete(k)); + + this.segmentCache[name] = keySet; + this.segmentChangeNumber[name] = changeNumber; + + return addedKeys.length > 0 || removedKeys.length > 0; + } + addToSegment(name: string, segmentKeys: string[]): boolean { const values = this.segmentCache[name]; const keySet = values ? values : new _Set(); diff --git a/src/storages/inRedis/SegmentsCacheInRedis.ts b/src/storages/inRedis/SegmentsCacheInRedis.ts index 7ec2f20f..5537c823 100644 --- a/src/storages/inRedis/SegmentsCacheInRedis.ts +++ b/src/storages/inRedis/SegmentsCacheInRedis.ts @@ -17,6 +17,23 @@ export class SegmentsCacheInRedis implements ISegmentsCacheAsync { this.keys = keys; } + /** + * Update the given segment `name` with the lists of `addedKeys`, `removedKeys` and `changeNumber`. + * The returned promise is resolved if the operation success, with `true` if the segment was updated (i.e., some key was added or removed), + * or rejected if it fails (e.g., Redis operation fails). + */ + update(name: string, addedKeys: string[], removedKeys: string[], changeNumber: number) { + const segmentKey = this.keys.buildSegmentNameKey(name); + + return Promise.all([ + addedKeys.length && this.redis.sadd(segmentKey, addedKeys), + removedKeys.length && this.redis.srem(segmentKey, removedKeys), + this.redis.set(this.keys.buildSegmentTillKey(name), changeNumber + '') + ]).then(() => { + return addedKeys.length > 0 || removedKeys.length > 0; + }); + } + addToSegment(name: string, segmentKeys: string[]) { const segmentKey = this.keys.buildSegmentNameKey(name); diff --git a/src/storages/pluggable/SegmentsCachePluggable.ts b/src/storages/pluggable/SegmentsCachePluggable.ts index 995c66df..704b634c 100644 --- a/src/storages/pluggable/SegmentsCachePluggable.ts +++ b/src/storages/pluggable/SegmentsCachePluggable.ts @@ -22,6 +22,23 @@ export class SegmentsCachePluggable implements ISegmentsCacheAsync { this.wrapper = wrapper; } + /** + * Update the given segment `name` with the lists of `addedKeys`, `removedKeys` and `changeNumber`. + * The returned promise is resolved if the operation success, with `true` if the segment was updated (i.e., some key was added or removed), + * or rejected if it fails (e.g., wrapper operation fails). + */ + update(name: string, addedKeys: string[], removedKeys: string[], changeNumber: number) { + const segmentKey = this.keys.buildSegmentNameKey(name); + + return Promise.all([ + addedKeys.length && this.wrapper.addItems(segmentKey, addedKeys), + removedKeys.length && this.wrapper.removeItems(segmentKey, removedKeys), + this.wrapper.set(this.keys.buildSegmentTillKey(name), changeNumber + '') + ]).then(() => { + return addedKeys.length > 0 || removedKeys.length > 0; + }); + } + /** * Add a list of `segmentKeys` to the given segment `name`. * The returned promise is resolved when the operation success diff --git a/src/storages/types.ts b/src/storages/types.ts index b3b1076c..41f569b4 100644 --- a/src/storages/types.ts +++ b/src/storages/types.ts @@ -258,6 +258,7 @@ export interface ISegmentsCacheBase { getRegisteredSegments(): MaybeThenable // only for Server-Side setChangeNumber(name: string, changeNumber: number): MaybeThenable // only for Server-Side getChangeNumber(name: string): MaybeThenable // only for Server-Side + update(name: string, addedKeys: string[], removedKeys: string[], changeNumber: number): MaybeThenable // only for Server-Side clear(): MaybeThenable } @@ -271,6 +272,7 @@ export interface ISegmentsCacheSync extends ISegmentsCacheBase { getKeysCount(): number // only used for telemetry setChangeNumber(name: string, changeNumber: number): boolean | void getChangeNumber(name?: string): number + update(name: string, addedKeys: string[], removedKeys: string[], changeNumber: number): boolean // only for Server-Side resetSegments(segmentsData: MySegmentsData | IMySegmentsResponse): boolean // only for Sync Client-Side clear(): void } @@ -283,6 +285,7 @@ export interface ISegmentsCacheAsync extends ISegmentsCacheBase { getRegisteredSegments(): Promise setChangeNumber(name: string, changeNumber: number): Promise getChangeNumber(name: string): Promise + update(name: string, addedKeys: string[], removedKeys: string[], changeNumber: number): Promise clear(): Promise } diff --git a/src/sync/polling/updaters/segmentChangesUpdater.ts b/src/sync/polling/updaters/segmentChangesUpdater.ts index 39d147ff..a643826b 100644 --- a/src/sync/polling/updaters/segmentChangesUpdater.ts +++ b/src/sync/polling/updaters/segmentChangesUpdater.ts @@ -1,12 +1,9 @@ import { ISegmentChangesFetcher } from '../fetchers/types'; import { ISegmentsCacheBase } from '../../../storages/types'; import { IReadinessManager } from '../../../readiness/types'; -import { MaybeThenable } from '../../../dtos/types'; -import { findIndex } from '../../../utils/lang'; import { SDK_SEGMENTS_ARRIVED } from '../../../readiness/constants'; import { ILogger } from '../../../logger/types'; import { LOG_PREFIX_INSTANTIATION, LOG_PREFIX_SYNC_SEGMENTS } from '../../../logger/constants'; -import { thenable } from '../../../utils/promise/thenable'; type ISegmentChangesUpdater = (fetchOnlyNew?: boolean, segmentName?: string, noCache?: boolean, till?: number) => Promise @@ -30,31 +27,22 @@ export function segmentChangesUpdaterFactory( let readyOnAlreadyExistentState = true; - function updateSegment(segmentName: string, noCache?: boolean, till?: number, fetchOnlyNew?: boolean) { + function updateSegment(segmentName: string, noCache?: boolean, till?: number, fetchOnlyNew?: boolean): Promise { log.debug(`${LOG_PREFIX_SYNC_SEGMENTS}Processing segment ${segmentName}`); let sincePromise = Promise.resolve(segments.getChangeNumber(segmentName)); return sincePromise.then(since => { // if fetchOnlyNew flag, avoid processing already fetched segments - if (fetchOnlyNew && since !== -1) return -1; - - return segmentChangesFetcher(since, segmentName, noCache, till).then(function (changes) { - let changeNumber = -1; - const results: MaybeThenable[] = []; - changes.forEach(x => { - if (x.added.length > 0) results.push(segments.addToSegment(segmentName, x.added)); - if (x.removed.length > 0) results.push(segments.removeFromSegment(segmentName, x.removed)); - if (x.added.length > 0 || x.removed.length > 0) { - results.push(segments.setChangeNumber(segmentName, x.till)); - changeNumber = x.till; - } - - log.debug(`${LOG_PREFIX_SYNC_SEGMENTS}Processed ${segmentName} with till = ${x.till}. Added: ${x.added.length}. Removed: ${x.removed.length}`); + return fetchOnlyNew && since !== -1 ? + false : + segmentChangesFetcher(since, segmentName, noCache, till).then((changes) => { + return Promise.all(changes.map(x => { + log.debug(`${LOG_PREFIX_SYNC_SEGMENTS}Processing ${segmentName} with till = ${x.till}. Added: ${x.added.length}. Removed: ${x.removed.length}`); + return segments.update(x.name, x.added, x.removed, x.till); + })).then((updates) => { + return updates.some(update => update); + }); }); - // If at least one storage operation result is a promise, join all in a single promise. - if (results.some(result => thenable(result))) return Promise.all(results).then(() => changeNumber); - return changeNumber; - }); }); } /** @@ -75,16 +63,12 @@ export function segmentChangesUpdaterFactory( let segmentsPromise = Promise.resolve(segmentName ? [segmentName] : segments.getRegisteredSegments()); return segmentsPromise.then(segmentNames => { - // Async fetchers are collected here. - const updaters: Promise[] = []; - - for (let index = 0; index < segmentNames.length; index++) { - updaters.push(updateSegment(segmentNames[index], noCache, till, fetchOnlyNew)); - } + // Async fetchers + const updaters = segmentNames.map(segmentName => updateSegment(segmentName, noCache, till, fetchOnlyNew)); return Promise.all(updaters).then(shouldUpdateFlags => { // if at least one segment fetch succeeded, mark segments ready - if (findIndex(shouldUpdateFlags, v => v !== -1) !== -1 || readyOnAlreadyExistentState) { + if (shouldUpdateFlags.some(update => update) || readyOnAlreadyExistentState) { readyOnAlreadyExistentState = false; if (readiness) readiness.segments.emit(SDK_SEGMENTS_ARRIVED); } From 2fb780b6a03c586eefd741b34bded75b034b487e Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Thu, 19 Sep 2024 17:51:09 -0300 Subject: [PATCH 081/146] Remove addToSegment, removeFromSegment and setChangeNumber from ISegmentsCacheBase --- ...Sync.ts => AbstractMySegmentsCacheSync.ts} | 42 ++++++------------- .../inLocalStorage/MySegmentsCacheInLocal.ts | 10 ++--- .../__tests__/MySegmentsCacheInLocal.spec.ts | 6 ++- .../inMemory/MySegmentsCacheInMemory.ts | 10 ++--- .../inMemory/SegmentsCacheInMemory.ts | 38 +++-------------- src/storages/inMemory/SplitsCacheInMemory.ts | 1 - .../__tests__/SegmentsCacheInMemory.spec.ts | 20 ++++----- src/storages/inRedis/SegmentsCacheInRedis.ts | 26 ------------ .../__tests__/SegmentsCacheInRedis.spec.ts | 14 +++---- .../pluggable/SegmentsCachePluggable.ts | 41 ------------------ .../__tests__/SegmentsCachePluggable.spec.ts | 14 +++---- src/storages/types.ts | 9 ---- .../__tests__/SegmentsUpdateWorker.spec.ts | 2 +- 13 files changed, 52 insertions(+), 181 deletions(-) rename src/storages/{AbstractSegmentsCacheSync.ts => AbstractMySegmentsCacheSync.ts} (60%) diff --git a/src/storages/AbstractSegmentsCacheSync.ts b/src/storages/AbstractMySegmentsCacheSync.ts similarity index 60% rename from src/storages/AbstractSegmentsCacheSync.ts rename to src/storages/AbstractMySegmentsCacheSync.ts index ce609fa1..740b9644 100644 --- a/src/storages/AbstractSegmentsCacheSync.ts +++ b/src/storages/AbstractMySegmentsCacheSync.ts @@ -1,5 +1,3 @@ -/* eslint-disable @typescript-eslint/no-unused-vars */ -/* eslint-disable no-unused-vars */ import { IMySegmentsResponse } from '../dtos/types'; import { MySegmentsData } from '../sync/polling/types'; import { ISegmentsCacheSync } from './types'; @@ -8,18 +6,11 @@ import { ISegmentsCacheSync } from './types'; * This class provides a skeletal implementation of the ISegmentsCacheSync interface * to minimize the effort required to implement this interface. */ -export abstract class AbstractSegmentsCacheSync implements ISegmentsCacheSync { - /** - * For server-side synchronizer: add `segmentKeys` list of keys to `name` segment. - * For client-side synchronizer: add `name` segment to the cache. `segmentKeys` is undefined. - */ - abstract addToSegment(name: string, segmentKeys?: string[]): boolean +export abstract class AbstractMySegmentsCacheSync implements ISegmentsCacheSync { - /** - * For server-side synchronizer: remove `segmentKeys` list of keys from `name` segment. - * For client-side synchronizer: remove `name` segment from the cache. `segmentKeys` is undefined. - */ - abstract removeFromSegment(name: string, segmentKeys?: string[]): boolean + protected abstract addSegment(name: string): boolean + protected abstract removeSegment(name: string): boolean + protected abstract setChangeNumber(changeNumber?: number): boolean | void /** * For server-side synchronizer: check if `key` is in `name` segment. @@ -34,12 +25,10 @@ export abstract class AbstractSegmentsCacheSync implements ISegmentsCacheSync { this.resetSegments({}); } - /** - * For server-side synchronizer: add the given list of segments to the cache, with an empty list of keys. The segments that already exist are not modified. - * For client-side synchronizer: the method is not used. - */ - registerSegments(names: string[]): boolean { return false; } - update(name: string, addedKeys: string[], removedKeys: string[], changeNumber: number) { return false; } + + // No-op. Not used in client-side. + registerSegments(): boolean { return false; } + update() { return false; } /** * For server-side synchronizer: get the list of segments to fetch changes. @@ -53,11 +42,6 @@ export abstract class AbstractSegmentsCacheSync implements ISegmentsCacheSync { */ abstract getKeysCount(): number - /** - * For server-side synchronizer: change number of `name` segment. - * For client-side synchronizer: change number of mySegments. - */ - abstract setChangeNumber(name?: string, changeNumber?: number): boolean | void abstract getChangeNumber(name: string): number /** @@ -65,7 +49,7 @@ export abstract class AbstractSegmentsCacheSync implements ISegmentsCacheSync { * For client-side synchronizer: it resets or updates the cache. */ resetSegments(segmentsData: MySegmentsData | IMySegmentsResponse): boolean { - this.setChangeNumber(undefined, segmentsData.cn); + this.setChangeNumber(segmentsData.cn); const { added, removed } = segmentsData as MySegmentsData; @@ -73,11 +57,11 @@ export abstract class AbstractSegmentsCacheSync implements ISegmentsCacheSync { let isDiff = false; added.forEach(segment => { - isDiff = this.addToSegment(segment) || isDiff; + isDiff = this.addSegment(segment) || isDiff; }); removed.forEach(segment => { - isDiff = this.removeFromSegment(segment) || isDiff; + isDiff = this.removeSegment(segment) || isDiff; }); return isDiff; @@ -98,11 +82,11 @@ export abstract class AbstractSegmentsCacheSync implements ISegmentsCacheSync { // Slowest path => add and/or remove segments for (let removeIndex = index; removeIndex < storedSegmentKeys.length; removeIndex++) { - this.removeFromSegment(storedSegmentKeys[removeIndex]); + this.removeSegment(storedSegmentKeys[removeIndex]); } for (let addIndex = index; addIndex < names.length; addIndex++) { - this.addToSegment(names[addIndex]); + this.addSegment(names[addIndex]); } return true; diff --git a/src/storages/inLocalStorage/MySegmentsCacheInLocal.ts b/src/storages/inLocalStorage/MySegmentsCacheInLocal.ts index 7e01a906..8765db91 100644 --- a/src/storages/inLocalStorage/MySegmentsCacheInLocal.ts +++ b/src/storages/inLocalStorage/MySegmentsCacheInLocal.ts @@ -1,10 +1,10 @@ import { ILogger } from '../../logger/types'; import { isNaNNumber } from '../../utils/lang'; -import { AbstractSegmentsCacheSync } from '../AbstractSegmentsCacheSync'; +import { AbstractMySegmentsCacheSync } from '../AbstractMySegmentsCacheSync'; import type { MySegmentsKeyBuilder } from '../KeyBuilderCS'; import { LOG_PREFIX, DEFINED } from './constants'; -export class MySegmentsCacheInLocal extends AbstractSegmentsCacheSync { +export class MySegmentsCacheInLocal extends AbstractMySegmentsCacheSync { private readonly keys: MySegmentsKeyBuilder; private readonly log: ILogger; @@ -16,7 +16,7 @@ export class MySegmentsCacheInLocal extends AbstractSegmentsCacheSync { // There is not need to flush segments cache like splits cache, since resetSegments receives the up-to-date list of active segments } - addToSegment(name: string): boolean { + protected addSegment(name: string): boolean { const segmentKey = this.keys.buildSegmentNameKey(name); try { @@ -29,7 +29,7 @@ export class MySegmentsCacheInLocal extends AbstractSegmentsCacheSync { } } - removeFromSegment(name: string): boolean { + protected removeSegment(name: string): boolean { const segmentKey = this.keys.buildSegmentNameKey(name); try { @@ -81,7 +81,7 @@ export class MySegmentsCacheInLocal extends AbstractSegmentsCacheSync { return 1; } - setChangeNumber(name?: string, changeNumber?: number) { + protected setChangeNumber(changeNumber?: number) { try { if (changeNumber) localStorage.setItem(this.keys.buildTillKey(), changeNumber + ''); else localStorage.removeItem(this.keys.buildTillKey()); diff --git a/src/storages/inLocalStorage/__tests__/MySegmentsCacheInLocal.spec.ts b/src/storages/inLocalStorage/__tests__/MySegmentsCacheInLocal.spec.ts index aac52cac..813d4e7f 100644 --- a/src/storages/inLocalStorage/__tests__/MySegmentsCacheInLocal.spec.ts +++ b/src/storages/inLocalStorage/__tests__/MySegmentsCacheInLocal.spec.ts @@ -22,7 +22,11 @@ test('SEGMENT CACHE / in LocalStorage', () => { }); caches.forEach(cache => { - cache.removeFromSegment('mocked-segment'); + // @ts-expect-error + cache.resetSegments({ + added: [], + removed: ['mocked-segment'] + }); expect(cache.isInSegment('mocked-segment')).toBe(false); expect(cache.getRegisteredSegments()).toEqual(['mocked-segment-2']); diff --git a/src/storages/inMemory/MySegmentsCacheInMemory.ts b/src/storages/inMemory/MySegmentsCacheInMemory.ts index 1e10c0a6..546a83c3 100644 --- a/src/storages/inMemory/MySegmentsCacheInMemory.ts +++ b/src/storages/inMemory/MySegmentsCacheInMemory.ts @@ -1,15 +1,15 @@ -import { AbstractSegmentsCacheSync } from '../AbstractSegmentsCacheSync'; +import { AbstractMySegmentsCacheSync } from '../AbstractMySegmentsCacheSync'; /** * Default MySegmentsCacheInMemory implementation that stores MySegments in memory. * Supported by all JS runtimes. */ -export class MySegmentsCacheInMemory extends AbstractSegmentsCacheSync { +export class MySegmentsCacheInMemory extends AbstractMySegmentsCacheSync { private segmentCache: Record = {}; private cn?: number; - addToSegment(name: string): boolean { + protected addSegment(name: string): boolean { if (this.segmentCache[name]) return false; this.segmentCache[name] = true; @@ -17,7 +17,7 @@ export class MySegmentsCacheInMemory extends AbstractSegmentsCacheSync { return true; } - removeFromSegment(name: string): boolean { + protected removeSegment(name: string): boolean { if (!this.segmentCache[name]) return false; delete this.segmentCache[name]; @@ -30,7 +30,7 @@ export class MySegmentsCacheInMemory extends AbstractSegmentsCacheSync { } - setChangeNumber(name?: string, changeNumber?: number) { + protected setChangeNumber(changeNumber?: number) { this.cn = changeNumber; } diff --git a/src/storages/inMemory/SegmentsCacheInMemory.ts b/src/storages/inMemory/SegmentsCacheInMemory.ts index 91635354..66cc9c3f 100644 --- a/src/storages/inMemory/SegmentsCacheInMemory.ts +++ b/src/storages/inMemory/SegmentsCacheInMemory.ts @@ -1,12 +1,11 @@ -import { AbstractSegmentsCacheSync } from '../AbstractSegmentsCacheSync'; import { ISet, _Set } from '../../utils/lang/sets'; import { isIntegerNumber } from '../../utils/lang'; +import { ISegmentsCacheSync } from '../types'; /** - * Default ISplitsCacheSync implementation that stores split definitions in memory. - * Supported by all JS runtimes. + * Default ISplitsCacheSync implementation for server-side that stores segments definitions in memory. */ -export class SegmentsCacheInMemory extends AbstractSegmentsCacheSync { +export class SegmentsCacheInMemory implements ISegmentsCacheSync { private segmentCache: Record> = {}; private segmentChangeNumber: Record = {}; @@ -23,28 +22,6 @@ export class SegmentsCacheInMemory extends AbstractSegmentsCacheSync { return addedKeys.length > 0 || removedKeys.length > 0; } - addToSegment(name: string, segmentKeys: string[]): boolean { - const values = this.segmentCache[name]; - const keySet = values ? values : new _Set(); - - segmentKeys.forEach(k => keySet.add(k)); - - this.segmentCache[name] = keySet; - - return true; - } - - removeFromSegment(name: string, segmentKeys: string[]): boolean { - const values = this.segmentCache[name]; - const keySet = values ? values : new _Set(); - - segmentKeys.forEach(k => keySet.delete(k)); - - this.segmentCache[name] = keySet; - - return true; - } - isInSegment(name: string, key: string): boolean { const segmentValues = this.segmentCache[name]; @@ -86,16 +63,13 @@ export class SegmentsCacheInMemory extends AbstractSegmentsCacheSync { }, 0); } - setChangeNumber(name: string, changeNumber: number) { - this.segmentChangeNumber[name] = changeNumber; - - return true; - } - getChangeNumber(name: string) { const value = this.segmentChangeNumber[name]; return isIntegerNumber(value) ? value : -1; } + // No-op. Not used in server-side + resetSegments() { return false; } + } diff --git a/src/storages/inMemory/SplitsCacheInMemory.ts b/src/storages/inMemory/SplitsCacheInMemory.ts index 9294cc43..c1aa951b 100644 --- a/src/storages/inMemory/SplitsCacheInMemory.ts +++ b/src/storages/inMemory/SplitsCacheInMemory.ts @@ -5,7 +5,6 @@ import { ISet, _Set } from '../../utils/lang/sets'; /** * Default ISplitsCacheSync implementation that stores split definitions in memory. - * Supported by all JS runtimes. */ export class SplitsCacheInMemory extends AbstractSplitsCacheSync { diff --git a/src/storages/inMemory/__tests__/SegmentsCacheInMemory.spec.ts b/src/storages/inMemory/__tests__/SegmentsCacheInMemory.spec.ts index e6713376..5ee2683c 100644 --- a/src/storages/inMemory/__tests__/SegmentsCacheInMemory.spec.ts +++ b/src/storages/inMemory/__tests__/SegmentsCacheInMemory.spec.ts @@ -2,24 +2,18 @@ import { SegmentsCacheInMemory } from '../SegmentsCacheInMemory'; describe('SEGMENTS CACHE IN MEMORY', () => { - test('isInSegment, set/getChangeNumber, add/removeFromSegment, getKeysCount', () => { + test('isInSegment, getChangeNumber, update, getKeysCount', () => { const cache = new SegmentsCacheInMemory(); - cache.addToSegment('mocked-segment', [ - 'a', 'b', 'c' - ]); - - cache.setChangeNumber('mocked-segment', 1); - - cache.removeFromSegment('mocked-segment', ['d']); + cache.update('mocked-segment', [ 'a', 'b', 'c'], [], 1); + cache.update('mocked-segment', [], ['d'], 1); expect(cache.getChangeNumber('mocked-segment') === 1).toBe(true); - cache.addToSegment('mocked-segment', ['d', 'e']); + cache.update('mocked-segment', [ 'd', 'e'], [], 2); + cache.update('mocked-segment', [], ['a', 'c'], 2); - cache.removeFromSegment('mocked-segment', ['a', 'c']); - - expect(cache.getChangeNumber('mocked-segment') === 1).toBe(true); + expect(cache.getChangeNumber('mocked-segment') === 2).toBe(true); expect(cache.isInSegment('mocked-segment', 'a')).toBe(false); expect(cache.isInSegment('mocked-segment', 'b')).toBe(true); // b @@ -29,7 +23,7 @@ describe('SEGMENTS CACHE IN MEMORY', () => { // getKeysCount expect(cache.getKeysCount()).toBe(3); - cache.addToSegment('mocked-segment-2', ['a', 'b', 'c', 'd', 'e']); + cache.update('mocked-segment-2', ['a', 'b', 'c', 'd', 'e'], [], 2); expect(cache.getKeysCount()).toBe(8); cache.clear(); expect(cache.getKeysCount()).toBe(0); diff --git a/src/storages/inRedis/SegmentsCacheInRedis.ts b/src/storages/inRedis/SegmentsCacheInRedis.ts index 5537c823..42ed3b10 100644 --- a/src/storages/inRedis/SegmentsCacheInRedis.ts +++ b/src/storages/inRedis/SegmentsCacheInRedis.ts @@ -34,38 +34,12 @@ export class SegmentsCacheInRedis implements ISegmentsCacheAsync { }); } - addToSegment(name: string, segmentKeys: string[]) { - const segmentKey = this.keys.buildSegmentNameKey(name); - - if (segmentKeys.length) { - return this.redis.sadd(segmentKey, segmentKeys).then(() => true); - } else { - return Promise.resolve(true); - } - } - - removeFromSegment(name: string, segmentKeys: string[]) { - const segmentKey = this.keys.buildSegmentNameKey(name); - - if (segmentKeys.length) { - return this.redis.srem(segmentKey, segmentKeys).then(() => true); - } else { - return Promise.resolve(true); - } - } - isInSegment(name: string, key: string) { return this.redis.sismember( this.keys.buildSegmentNameKey(name), key ).then(matches => matches !== 0); } - setChangeNumber(name: string, changeNumber: number) { - return this.redis.set( - this.keys.buildSegmentTillKey(name), changeNumber + '' - ).then(status => status === 'OK'); - } - getChangeNumber(name: string) { return this.redis.get(this.keys.buildSegmentTillKey(name)).then((value: string | null) => { const i = parseInt(value as string, 10); diff --git a/src/storages/inRedis/__tests__/SegmentsCacheInRedis.spec.ts b/src/storages/inRedis/__tests__/SegmentsCacheInRedis.spec.ts index 6222af95..62799bab 100644 --- a/src/storages/inRedis/__tests__/SegmentsCacheInRedis.spec.ts +++ b/src/storages/inRedis/__tests__/SegmentsCacheInRedis.spec.ts @@ -9,25 +9,21 @@ const keys = new KeyBuilderSS(prefix, metadata); describe('SEGMENTS CACHE IN REDIS', () => { - test('isInSegment, set/getChangeNumber, add/removeFromSegment', async () => { + test('isInSegment, getChangeNumber, update', async () => { const connection = new RedisAdapter(loggerMock); const cache = new SegmentsCacheInRedis(loggerMock, keys, connection); - await cache.addToSegment('mocked-segment', ['a', 'b', 'c']); - - await cache.setChangeNumber('mocked-segment', 1); - - await cache.removeFromSegment('mocked-segment', ['d']); + await cache.update('mocked-segment', ['a', 'b', 'c'], ['d'], 1); expect(await cache.getChangeNumber('mocked-segment') === 1).toBe(true); expect(await cache.getChangeNumber('inexistent-segment')).toBe(-1); // -1 if the segment doesn't exist - await cache.addToSegment('mocked-segment', ['d', 'e']); + await cache.update('mocked-segment', ['d', 'e'], [], 2); - await cache.removeFromSegment('mocked-segment', ['a', 'c']); + await cache.update('mocked-segment', [], ['a', 'c'], 2); - expect(await cache.getChangeNumber('mocked-segment') === 1).toBe(true); + expect(await cache.getChangeNumber('mocked-segment') === 2).toBe(true); expect(await cache.isInSegment('mocked-segment', 'a')).toBe(false); expect(await cache.isInSegment('mocked-segment', 'b')).toBe(true); diff --git a/src/storages/pluggable/SegmentsCachePluggable.ts b/src/storages/pluggable/SegmentsCachePluggable.ts index 704b634c..05173938 100644 --- a/src/storages/pluggable/SegmentsCachePluggable.ts +++ b/src/storages/pluggable/SegmentsCachePluggable.ts @@ -39,36 +39,6 @@ export class SegmentsCachePluggable implements ISegmentsCacheAsync { }); } - /** - * Add a list of `segmentKeys` to the given segment `name`. - * The returned promise is resolved when the operation success - * or rejected if wrapper operation fails. - */ - addToSegment(name: string, segmentKeys: string[]) { - const segmentKey = this.keys.buildSegmentNameKey(name); - - if (segmentKeys.length) { - return this.wrapper.addItems(segmentKey, segmentKeys); - } else { - return Promise.resolve(); - } - } - - /** - * Remove a list of `segmentKeys` from the given segment `name`. - * The returned promise is resolved when the operation success - * or rejected if wrapper operation fails. - */ - removeFromSegment(name: string, segmentKeys: string[]) { - const segmentKey = this.keys.buildSegmentNameKey(name); - - if (segmentKeys.length) { - return this.wrapper.removeItems(segmentKey, segmentKeys); - } else { - return Promise.resolve(); - } - } - /** * Returns a promise that resolves with a boolean value indicating if `key` is part of `name` segment. * Promise can be rejected if wrapper operation fails. @@ -77,17 +47,6 @@ export class SegmentsCachePluggable implements ISegmentsCacheAsync { return this.wrapper.itemContains(this.keys.buildSegmentNameKey(name), key); } - /** - * Set till number for the given segment `name`. - * The returned promise is resolved when the operation success, - * or rejected if it fails (e.g., wrapper operation fails). - */ - setChangeNumber(name: string, changeNumber: number) { - return this.wrapper.set( - this.keys.buildSegmentTillKey(name), changeNumber + '' - ); - } - /** * Get till number or -1 if it's not defined. * The returned promise is resolved with the changeNumber or -1 if it doesn't exist or a wrapper operation fails. diff --git a/src/storages/pluggable/__tests__/SegmentsCachePluggable.spec.ts b/src/storages/pluggable/__tests__/SegmentsCachePluggable.spec.ts index 7fa1c537..eedb8f11 100644 --- a/src/storages/pluggable/__tests__/SegmentsCachePluggable.spec.ts +++ b/src/storages/pluggable/__tests__/SegmentsCachePluggable.spec.ts @@ -13,24 +13,20 @@ describe('SEGMENTS CACHE PLUGGABLE', () => { wrapperMock.mockClear(); }); - test('isInSegment, set/getChangeNumber, add/removeFromSegment', async () => { + test('isInSegment, getChangeNumber, update', async () => { const cache = new SegmentsCachePluggable(loggerMock, keyBuilder, wrapperMock); - await cache.addToSegment('mocked-segment', ['a', 'b', 'c']); - - await cache.setChangeNumber('mocked-segment', 1); - - await cache.removeFromSegment('mocked-segment', ['d']); + await cache.update('mocked-segment', ['a', 'b', 'c'], ['d'], 1); expect(await cache.getChangeNumber('mocked-segment') === 1).toBe(true); expect(await cache.getChangeNumber('inexistent-segment')).toBe(-1); // -1 if the segment doesn't exist - await cache.addToSegment('mocked-segment', ['d', 'e']); + await cache.update('mocked-segment', ['d', 'e'], [], 2); - await cache.removeFromSegment('mocked-segment', ['a', 'c']); + await cache.update('mocked-segment', [], ['a', 'c'], 2); - expect(await cache.getChangeNumber('mocked-segment') === 1).toBe(true); + expect(await cache.getChangeNumber('mocked-segment') === 2).toBe(true); expect(await cache.isInSegment('mocked-segment', 'a')).toBe(false); expect(await cache.isInSegment('mocked-segment', 'b')).toBe(true); diff --git a/src/storages/types.ts b/src/storages/types.ts index 41f569b4..1daa81fb 100644 --- a/src/storages/types.ts +++ b/src/storages/types.ts @@ -251,12 +251,9 @@ export interface ISplitsCacheAsync extends ISplitsCacheBase { /** Segments cache */ export interface ISegmentsCacheBase { - addToSegment(name: string, segmentKeys: string[]): MaybeThenable // different signature on Server and Client-Side - removeFromSegment(name: string, segmentKeys: string[]): MaybeThenable // different signature on Server and Client-Side isInSegment(name: string, key?: string): MaybeThenable // different signature on Server and Client-Side registerSegments(names: string[]): MaybeThenable // only for Server-Side getRegisteredSegments(): MaybeThenable // only for Server-Side - setChangeNumber(name: string, changeNumber: number): MaybeThenable // only for Server-Side getChangeNumber(name: string): MaybeThenable // only for Server-Side update(name: string, addedKeys: string[], removedKeys: string[], changeNumber: number): MaybeThenable // only for Server-Side clear(): MaybeThenable @@ -264,13 +261,10 @@ export interface ISegmentsCacheBase { // Same API for both variants: SegmentsCache and MySegmentsCache (client-side API) export interface ISegmentsCacheSync extends ISegmentsCacheBase { - addToSegment(name: string, segmentKeys?: string[]): boolean - removeFromSegment(name: string, segmentKeys?: string[]): boolean isInSegment(name: string, key?: string): boolean registerSegments(names: string[]): boolean getRegisteredSegments(): string[] getKeysCount(): number // only used for telemetry - setChangeNumber(name: string, changeNumber: number): boolean | void getChangeNumber(name?: string): number update(name: string, addedKeys: string[], removedKeys: string[], changeNumber: number): boolean // only for Server-Side resetSegments(segmentsData: MySegmentsData | IMySegmentsResponse): boolean // only for Sync Client-Side @@ -278,12 +272,9 @@ export interface ISegmentsCacheSync extends ISegmentsCacheBase { } export interface ISegmentsCacheAsync extends ISegmentsCacheBase { - addToSegment(name: string, segmentKeys: string[]): Promise - removeFromSegment(name: string, segmentKeys: string[]): Promise isInSegment(name: string, key: string): Promise registerSegments(names: string[]): Promise getRegisteredSegments(): Promise - setChangeNumber(name: string, changeNumber: number): Promise getChangeNumber(name: string): Promise update(name: string, addedKeys: string[], removedKeys: string[], changeNumber: number): Promise clear(): Promise diff --git a/src/sync/streaming/UpdateWorkers/__tests__/SegmentsUpdateWorker.spec.ts b/src/sync/streaming/UpdateWorkers/__tests__/SegmentsUpdateWorker.spec.ts index 5f4068f2..31663fb5 100644 --- a/src/sync/streaming/UpdateWorkers/__tests__/SegmentsUpdateWorker.spec.ts +++ b/src/sync/streaming/UpdateWorkers/__tests__/SegmentsUpdateWorker.spec.ts @@ -12,7 +12,7 @@ function segmentsSyncTaskMock(segmentsStorage: SegmentsCacheInMemory, changeNumb function __resolveSegmentsUpdaterCall(changeNumber: Record) { Object.keys(changeNumber).forEach(segmentName => { - segmentsStorage.setChangeNumber(segmentName, changeNumber[segmentName]); // update changeNumber in storage + segmentsStorage.update(segmentName, [], [], changeNumber[segmentName]); // update changeNumber in storage }); if (__segmentsUpdaterCalls.length) __segmentsUpdaterCalls.shift().res(); // resolve `execute` call else changeNumbers.push(changeNumber); From 1b7a18ab201b0cb297ce959cbc7c1783e016300d Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Fri, 20 Sep 2024 10:54:06 -0300 Subject: [PATCH 082/146] Update changelog entry --- CHANGES.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGES.txt b/CHANGES.txt index 61a0bfd9..15414d0c 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -1,5 +1,5 @@ 1.18.0 (September XX, 2024) - - Added `factory.destroy()` method, as a shortcut to destroy all SDK clients created by the factory. + - Added `factory.destroy()` method, which invokes the `destroy` method on all SDK clients created by the factory. 1.17.0 (September 6, 2024) - Added `sync.requestOptions.getHeaderOverrides` configuration option to enhance SDK HTTP request Headers for Authorization Frameworks. From f6f245d77cc6a0c462e78e13ce3d3f39e3a974dc Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Fri, 20 Sep 2024 11:58:01 -0300 Subject: [PATCH 083/146] Update comment --- src/sdkClient/sdkClientMethod.ts | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/sdkClient/sdkClientMethod.ts b/src/sdkClient/sdkClientMethod.ts index 3fe6f4fa..3860c5a7 100644 --- a/src/sdkClient/sdkClientMethod.ts +++ b/src/sdkClient/sdkClientMethod.ts @@ -4,12 +4,13 @@ import { RETRIEVE_CLIENT_DEFAULT } from '../logger/constants'; import { ISdkFactoryContext } from '../sdkFactory/types'; /** - * Factory of client method for server-side SDKs (ISDK and IAsyncSDK) + * Factory of client method for server-side SDKs */ export function sdkClientMethodFactory(params: ISdkFactoryContext): () => SplitIO.IClient | SplitIO.IAsyncClient { const log = params.settings.log; const clientInstance = sdkClientFactory(params); + // Only one client in server-side without bound key params.clients[''] = clientInstance; return function client() { From aed68a7847435e30cda62a25fe40e7f104019de0 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Fri, 20 Sep 2024 12:03:48 -0300 Subject: [PATCH 084/146] rc --- package-lock.json | 4 ++-- package.json | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/package-lock.json b/package-lock.json index 0e7168d3..8fc190d7 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "@splitsoftware/splitio-commons", - "version": "1.17.1-rc.0", + "version": "1.17.1-rc.1", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "@splitsoftware/splitio-commons", - "version": "1.17.1-rc.0", + "version": "1.17.1-rc.1", "license": "Apache-2.0", "dependencies": { "tslib": "^2.3.1" diff --git a/package.json b/package.json index ec638f10..883888e7 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@splitsoftware/splitio-commons", - "version": "1.17.1-rc.0", + "version": "1.17.1-rc.1", "description": "Split JavaScript SDK common components", "main": "cjs/index.js", "module": "esm/index.js", From d2fbd1bb48a513d59be452e3d97babb2b3113546 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Fri, 20 Sep 2024 13:36:10 -0300 Subject: [PATCH 085/146] Unit test for factory destroy --- src/sdkFactory/__tests__/index.spec.ts | 25 +++++++++++-------------- 1 file changed, 11 insertions(+), 14 deletions(-) diff --git a/src/sdkFactory/__tests__/index.spec.ts b/src/sdkFactory/__tests__/index.spec.ts index c7b51b2a..742abaff 100644 --- a/src/sdkFactory/__tests__/index.spec.ts +++ b/src/sdkFactory/__tests__/index.spec.ts @@ -6,7 +6,7 @@ import { EventEmitter } from '../../utils/MinEvents'; /** Mocks */ -const clientInstance = 'client'; +const clientInstance = { destroy: jest.fn() }; const managerInstance = 'manager'; const mockStorage = { splits: jest.fn(), @@ -30,7 +30,7 @@ jest.mock('../../trackers/telemetryTracker', () => { const paramsForAsyncSDK = { settings: fullSettings, storageFactory: jest.fn(() => mockStorage), - sdkClientMethodFactory: jest.fn(() => clientInstance), + sdkClientMethodFactory: jest.fn(({ clients }) => (key?: string) => { clients[key || ''] = clientInstance; return clientInstance; }), sdkManagerFactory: jest.fn(() => managerInstance), impressionsObserverFactory: jest.fn(), platform: { @@ -64,6 +64,7 @@ function assertSdkApi(sdk: SplitIO.IAsyncSDK | SplitIO.ISDK | SplitIO.ICsSDK, pa expect(sdk.settings).toBe(params.settings); expect(sdk.client).toBe(params.sdkClientMethodFactory.mock.results[0].value); expect(sdk.manager()).toBe(params.sdkManagerFactory.mock.results[0].value); + expect(sdk.destroy()).toBeDefined(); } function assertModulesCalled(params: any) { @@ -92,22 +93,18 @@ describe('sdkFactory', () => { afterEach(jest.clearAllMocks); - test('creates IAsyncSDK instance', () => { + test.each([paramsForAsyncSDK, fullParamsForSyncSDK])('creates SDK instance', async (params) => { - const sdk = sdkFactory(paramsForAsyncSDK as unknown as ISdkFactoryParams); + const sdk = sdkFactory(params as unknown as ISdkFactoryParams); // should return an object that conforms to SDK interface - assertSdkApi(sdk, paramsForAsyncSDK); + assertSdkApi(sdk, params); - assertModulesCalled(paramsForAsyncSDK); - }); - - test('creates ISDK instance', () => { - const sdk = sdkFactory(fullParamsForSyncSDK as unknown as ISdkFactoryParams); - - // should return an object that conforms to SDK interface - assertSdkApi(sdk, fullParamsForSyncSDK); + assertModulesCalled(params); - assertModulesCalled(fullParamsForSyncSDK); + // Factory destroy should call client destroy + expect(sdk.client()).toBe(clientInstance); + expect(await sdk.destroy()).toBeUndefined(); + expect(sdk.client().destroy).toBeCalledTimes(1); }); }); From 38092dc8247ea7aff08df579b8a3bf09135d9849 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Tue, 1 Oct 2024 11:10:08 -0300 Subject: [PATCH 086/146] Add onReadyFromCacheCb to storage factory params for code cleanup --- src/sdkFactory/index.ts | 5 ++++- src/storages/AbstractSplitsCacheAsync.ts | 8 -------- src/storages/AbstractSplitsCacheSync.ts | 8 -------- src/storages/inLocalStorage/SplitsCacheInLocal.ts | 11 +---------- .../__tests__/SplitsCacheInLocal.spec.ts | 6 ------ src/storages/inLocalStorage/index.ts | 8 ++++++-- src/storages/types.ts | 5 +---- src/sync/offline/syncTasks/fromObjectSyncTask.ts | 10 +++------- src/sync/polling/updaters/splitChangesUpdater.ts | 14 +++----------- .../storage/__tests__/storageCS.spec.ts | 7 +------ src/utils/settingsValidation/storage/storageCS.ts | 13 ------------- 11 files changed, 19 insertions(+), 76 deletions(-) diff --git a/src/sdkFactory/index.ts b/src/sdkFactory/index.ts index 8b70cd95..ce7a0e9e 100644 --- a/src/sdkFactory/index.ts +++ b/src/sdkFactory/index.ts @@ -7,7 +7,7 @@ import { IBasicClient, SplitIO } from '../types'; import { validateAndTrackApiKey } from '../utils/inputValidation/apiKey'; import { createLoggerAPI } from '../logger/sdkLogger'; import { NEW_FACTORY, RETRIEVE_MANAGER } from '../logger/constants'; -import { SDK_SPLITS_ARRIVED, SDK_SEGMENTS_ARRIVED } from '../readiness/constants'; +import { SDK_SPLITS_ARRIVED, SDK_SEGMENTS_ARRIVED, SDK_SPLITS_CACHE_LOADED } from '../readiness/constants'; import { objectAssign } from '../utils/lang/objectAssign'; import { strategyDebugFactory } from '../trackers/strategy/strategyDebug'; import { strategyOptimizedFactory } from '../trackers/strategy/strategyOptimized'; @@ -46,6 +46,9 @@ export function sdkFactory(params: ISdkFactoryParams): SplitIO.ICsSDK | SplitIO. readiness.splits.emit(SDK_SPLITS_ARRIVED); readiness.segments.emit(SDK_SEGMENTS_ARRIVED); }, + onReadyFromCacheCb: () => { + readiness.splits.emit(SDK_SPLITS_CACHE_LOADED); + } }); // @TODO add support for dataloader: `if (params.dataLoader) params.dataLoader(storage);` const clients: Record = {}; diff --git a/src/storages/AbstractSplitsCacheAsync.ts b/src/storages/AbstractSplitsCacheAsync.ts index 9e4e136c..6534c82d 100644 --- a/src/storages/AbstractSplitsCacheAsync.ts +++ b/src/storages/AbstractSplitsCacheAsync.ts @@ -28,14 +28,6 @@ export abstract class AbstractSplitsCacheAsync implements ISplitsCacheAsync { return Promise.resolve(true); } - /** - * Check if the splits information is already stored in cache. - * Noop, just keeping the interface. This is used by client-side implementations only. - */ - checkCache(): Promise { - return Promise.resolve(false); - } - /** * Kill `name` split and set `defaultTreatment` and `changeNumber`. * Used for SPLIT_KILL push notifications. diff --git a/src/storages/AbstractSplitsCacheSync.ts b/src/storages/AbstractSplitsCacheSync.ts index ef44db40..c0e370ac 100644 --- a/src/storages/AbstractSplitsCacheSync.ts +++ b/src/storages/AbstractSplitsCacheSync.ts @@ -48,14 +48,6 @@ export abstract class AbstractSplitsCacheSync implements ISplitsCacheSync { abstract clear(): void - /** - * Check if the splits information is already stored in cache. This data can be preloaded. - * It is used as condition to emit SDK_SPLITS_CACHE_LOADED, and then SDK_READY_FROM_CACHE. - */ - checkCache(): boolean { - return false; - } - /** * Kill `name` split and set `defaultTreatment` and `changeNumber`. * Used for SPLIT_KILL push notifications. diff --git a/src/storages/inLocalStorage/SplitsCacheInLocal.ts b/src/storages/inLocalStorage/SplitsCacheInLocal.ts index ccd4859f..66d8869a 100644 --- a/src/storages/inLocalStorage/SplitsCacheInLocal.ts +++ b/src/storages/inLocalStorage/SplitsCacheInLocal.ts @@ -217,15 +217,6 @@ export class SplitsCacheInLocal extends AbstractSplitsCacheSync { } } - /** - * Check if the splits information is already stored in browser LocalStorage. - * In this function we could add more code to check if the data is valid. - * @override - */ - checkCache(): boolean { - return this.getChangeNumber() > -1; - } - /** * Clean Splits cache if its `lastUpdated` timestamp is older than the given `expirationTimestamp`, * @@ -250,7 +241,7 @@ export class SplitsCacheInLocal extends AbstractSplitsCacheSync { this.updateNewFilter = true; // if there is cache, clear it - if (this.checkCache()) this.clear(); + if (this.getChangeNumber() > -1) this.clear(); } catch (e) { this.log.error(LOG_PREFIX + e); diff --git a/src/storages/inLocalStorage/__tests__/SplitsCacheInLocal.spec.ts b/src/storages/inLocalStorage/__tests__/SplitsCacheInLocal.spec.ts index 732ca8b7..4205d7a6 100644 --- a/src/storages/inLocalStorage/__tests__/SplitsCacheInLocal.spec.ts +++ b/src/storages/inLocalStorage/__tests__/SplitsCacheInLocal.spec.ts @@ -31,16 +31,10 @@ test('SPLIT CACHE / LocalStorage', () => { expect(cache.getSplit('lol1')).toEqual(null); expect(cache.getSplit('lol2')).toEqual(somethingElse); - expect(cache.checkCache()).toBe(false); // checkCache should return false until localstorage has data. - expect(cache.getChangeNumber() === -1).toBe(true); - expect(cache.checkCache()).toBe(false); // checkCache should return false until localstorage has data. - cache.setChangeNumber(123); - expect(cache.checkCache()).toBe(true); // checkCache should return true once localstorage has data. - expect(cache.getChangeNumber() === 123).toBe(true); }); diff --git a/src/storages/inLocalStorage/index.ts b/src/storages/inLocalStorage/index.ts index 63c14f3b..f50f6cdd 100644 --- a/src/storages/inLocalStorage/index.ts +++ b/src/storages/inLocalStorage/index.ts @@ -12,7 +12,7 @@ import { SplitsCacheInMemory } from '../inMemory/SplitsCacheInMemory'; import { DEFAULT_CACHE_EXPIRATION_IN_MILLIS } from '../../utils/constants/browser'; import { InMemoryStorageCSFactory } from '../inMemory/InMemoryStorageCS'; import { LOG_PREFIX } from './constants'; -import { DEBUG, NONE, STORAGE_LOCALSTORAGE } from '../../utils/constants'; +import { DEBUG, LOCALHOST_MODE, NONE, STORAGE_LOCALSTORAGE } from '../../utils/constants'; import { shouldRecordTelemetry, TelemetryCacheInMemory } from '../inMemory/TelemetryCacheInMemory'; import { UniqueKeysCacheInMemoryCS } from '../inMemory/UniqueKeysCacheInMemoryCS'; import { getMatching } from '../../utils/key'; @@ -36,7 +36,7 @@ export function InLocalStorage(options: InLocalStorageOptions = {}): IStorageSyn return InMemoryStorageCSFactory(params); } - const { settings, settings: { log, scheduler: { impressionsQueueSize, eventsQueueSize, }, sync: { impressionsMode, __splitFiltersValidation } } } = params; + const { onReadyFromCacheCb, settings, settings: { log, scheduler: { impressionsQueueSize, eventsQueueSize, }, sync: { impressionsMode, __splitFiltersValidation } } } = params; const matchingKey = getMatching(settings.core.key); const keys = new KeyBuilderCS(prefix, matchingKey); const expirationTimestamp = Date.now() - DEFAULT_CACHE_EXPIRATION_IN_MILLIS; @@ -45,6 +45,10 @@ export function InLocalStorage(options: InLocalStorageOptions = {}): IStorageSyn const segments = new MySegmentsCacheInLocal(log, keys); const largeSegments = new MySegmentsCacheInLocal(log, myLargeSegmentsKeyBuilder(prefix, matchingKey)); + if (settings.mode === LOCALHOST_MODE || splits.getChangeNumber() > -1) { + Promise.resolve().then(onReadyFromCacheCb); + } + return { splits, segments, diff --git a/src/storages/types.ts b/src/storages/types.ts index b3b1076c..c376485d 100644 --- a/src/storages/types.ts +++ b/src/storages/types.ts @@ -208,8 +208,6 @@ export interface ISplitsCacheBase { // only for Client-Side. Returns true if the storage is not synchronized yet (getChangeNumber() === -1) or contains a FF using segments or large segments usesSegments(): MaybeThenable, clear(): MaybeThenable, - // should never reject or throw an exception. Instead return false by default, to avoid emitting SDK_READY_FROM_CACHE. - checkCache(): MaybeThenable, killLocally(name: string, defaultTreatment: string, changeNumber: number): MaybeThenable, getNamesByFlagSets(flagSets: string[]): MaybeThenable[]> } @@ -226,7 +224,6 @@ export interface ISplitsCacheSync extends ISplitsCacheBase { trafficTypeExists(trafficType: string): boolean, usesSegments(): boolean, clear(): void, - checkCache(): boolean, killLocally(name: string, defaultTreatment: string, changeNumber: number): boolean, getNamesByFlagSets(flagSets: string[]): ISet[] } @@ -243,7 +240,6 @@ export interface ISplitsCacheAsync extends ISplitsCacheBase { trafficTypeExists(trafficType: string): Promise, usesSegments(): Promise, clear(): Promise, - checkCache(): Promise, killLocally(name: string, defaultTreatment: string, changeNumber: number): Promise, getNamesByFlagSets(flagSets: string[]): Promise[]> } @@ -504,6 +500,7 @@ export interface IStorageFactoryParams { * It is meant for emitting SDK_READY event in consumer mode, and waiting before using the storage in the synchronizer. */ onReadyCb: (error?: any) => void, + onReadyFromCacheCb: (error?: any) => void, } export type StorageType = 'MEMORY' | 'LOCALSTORAGE' | 'REDIS' | 'PLUGGABLE'; diff --git a/src/sync/offline/syncTasks/fromObjectSyncTask.ts b/src/sync/offline/syncTasks/fromObjectSyncTask.ts index 84805110..d555552b 100644 --- a/src/sync/offline/syncTasks/fromObjectSyncTask.ts +++ b/src/sync/offline/syncTasks/fromObjectSyncTask.ts @@ -7,7 +7,7 @@ import { syncTaskFactory } from '../../syncTask'; import { ISyncTask } from '../../types'; import { ISettings } from '../../../types'; import { CONTROL } from '../../../utils/constants'; -import { SDK_SPLITS_ARRIVED, SDK_SEGMENTS_ARRIVED, SDK_SPLITS_CACHE_LOADED } from '../../../readiness/constants'; +import { SDK_SPLITS_ARRIVED, SDK_SEGMENTS_ARRIVED } from '../../../readiness/constants'; import { SYNC_OFFLINE_DATA, ERROR_SYNC_OFFLINE_LOADING } from '../../../logger/constants'; /** @@ -60,12 +60,8 @@ export function fromObjectUpdaterFactory( if (startingUp) { startingUp = false; - Promise.resolve(splitsCache.checkCache()).then(cacheReady => { - // Emits SDK_READY_FROM_CACHE - if (cacheReady) readiness.splits.emit(SDK_SPLITS_CACHE_LOADED); - // Emits SDK_READY - readiness.segments.emit(SDK_SEGMENTS_ARRIVED); - }); + // Emits SDK_READY + readiness.segments.emit(SDK_SEGMENTS_ARRIVED); } return true; }); diff --git a/src/sync/polling/updaters/splitChangesUpdater.ts b/src/sync/polling/updaters/splitChangesUpdater.ts index 669a2010..29f023cf 100644 --- a/src/sync/polling/updaters/splitChangesUpdater.ts +++ b/src/sync/polling/updaters/splitChangesUpdater.ts @@ -4,7 +4,7 @@ import { ISplitChangesFetcher } from '../fetchers/types'; import { ISplit, ISplitChangesResponse, ISplitFiltersValidation } from '../../../dtos/types'; import { ISplitsEventEmitter } from '../../../readiness/types'; import { timeout } from '../../../utils/promise/timeout'; -import { SDK_SPLITS_ARRIVED, SDK_SPLITS_CACHE_LOADED } from '../../../readiness/constants'; +import { SDK_SPLITS_ARRIVED } from '../../../readiness/constants'; import { ILogger } from '../../../logger/types'; import { SYNC_SPLITS_FETCH, SYNC_SPLITS_NEW, SYNC_SPLITS_REMOVED, SYNC_SPLITS_SEGMENTS, SYNC_SPLITS_FETCH_FAILS, SYNC_SPLITS_FETCH_RETRY } from '../../../logger/constants'; import { startsWith } from '../../../utils/lang'; @@ -153,7 +153,8 @@ export function splitChangesUpdaterFactory( */ function _splitChangesUpdater(since: number, retry = 0): Promise { log.debug(SYNC_SPLITS_FETCH, [since]); - const fetcherPromise = Promise.resolve(splitUpdateNotification ? + + return Promise.resolve(splitUpdateNotification ? { splits: [splitUpdateNotification.payload], till: splitUpdateNotification.changeNumber } : splitChangesFetcher(since, noCache, till, _promiseDecorator) ) @@ -200,15 +201,6 @@ export function splitChangesUpdaterFactory( } return false; }); - - // After triggering the requests, if we have cached splits information let's notify that to emit SDK_READY_FROM_CACHE. - // Wrapping in a promise since checkCache can be async. - if (splitsEventEmitter && startingUp) { - Promise.resolve(splits.checkCache()).then(isCacheReady => { - if (isCacheReady) splitsEventEmitter.emit(SDK_SPLITS_CACHE_LOADED); - }); - } - return fetcherPromise; } let sincePromise = Promise.resolve(splits.getChangeNumber()); // `getChangeNumber` never rejects or throws error diff --git a/src/utils/settingsValidation/storage/__tests__/storageCS.spec.ts b/src/utils/settingsValidation/storage/__tests__/storageCS.spec.ts index 5bd7c389..88e078a0 100644 --- a/src/utils/settingsValidation/storage/__tests__/storageCS.spec.ts +++ b/src/utils/settingsValidation/storage/__tests__/storageCS.spec.ts @@ -1,4 +1,4 @@ -import { validateStorageCS, __InLocalStorageMockFactory } from '../storageCS'; +import { validateStorageCS } from '../storageCS'; import { InMemoryStorageCSFactory } from '../../../../storages/inMemory/InMemoryStorageCS'; import { loggerMock as log } from '../../../../logger/__tests__/sdkLogger.mock'; @@ -32,11 +32,6 @@ describe('storage validator for pluggable storage (client-side)', () => { expect(log.error).not.toBeCalled(); }); - test('fallbacks to mock InLocalStorage storage if the storage is InLocalStorage and the mode localhost', () => { - expect(validateStorageCS({ log, mode: 'localhost', storage: mockInLocalStorageFactory })).toBe(__InLocalStorageMockFactory); - expect(log.error).not.toBeCalled(); - }); - test('throws error if the provided storage factory is not compatible with the mode', () => { expect(() => { validateStorageCS({ log, mode: 'consumer', storage: mockInLocalStorageFactory }); }).toThrow('A PluggableStorage instance is required on consumer mode'); expect(() => { validateStorageCS({ log, mode: 'consumer_partial', storage: mockInLocalStorageFactory }); }).toThrow('A PluggableStorage instance is required on consumer mode'); diff --git a/src/utils/settingsValidation/storage/storageCS.ts b/src/utils/settingsValidation/storage/storageCS.ts index 097ce95d..43783630 100644 --- a/src/utils/settingsValidation/storage/storageCS.ts +++ b/src/utils/settingsValidation/storage/storageCS.ts @@ -3,14 +3,6 @@ import { ISettings, SDKMode } from '../../../types'; import { ILogger } from '../../../logger/types'; import { ERROR_STORAGE_INVALID } from '../../../logger/constants'; import { LOCALHOST_MODE, STANDALONE_MODE, STORAGE_PLUGGABLE, STORAGE_LOCALSTORAGE, STORAGE_MEMORY } from '../../../utils/constants'; -import { IStorageFactoryParams, IStorageSync } from '../../../storages/types'; - -export function __InLocalStorageMockFactory(params: IStorageFactoryParams): IStorageSync { - const result = InMemoryStorageCSFactory(params); - result.splits.checkCache = () => true; // to emit SDK_READY_FROM_CACHE - return result; -} -__InLocalStorageMockFactory.type = STORAGE_MEMORY; /** * This function validates `settings.storage` object @@ -30,11 +22,6 @@ export function validateStorageCS(settings: { log: ILogger, storage?: any, mode: log.error(ERROR_STORAGE_INVALID); } - // In localhost mode with InLocalStorage, fallback to a mock InLocalStorage to emit SDK_READY_FROM_CACHE - if (mode === LOCALHOST_MODE && storage.type === STORAGE_LOCALSTORAGE) { - return __InLocalStorageMockFactory; - } - if ([LOCALHOST_MODE, STANDALONE_MODE].indexOf(mode) === -1) { // Consumer modes require an async storage if (storage.type !== STORAGE_PLUGGABLE) throw new Error('A PluggableStorage instance is required on consumer mode'); From cf822b900b7fc174698f6b18b263018b805920c0 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Tue, 1 Oct 2024 14:48:50 -0300 Subject: [PATCH 087/146] Add changelog entry --- CHANGES.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGES.txt b/CHANGES.txt index 1c55e907..c993161c 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -1,4 +1,5 @@ 2.0.0 (September XX, 2024) + - Updated internal storage factory to emit the SDK_READY_FROM_CACHE event when it corresponds, to clean up the initialization flow. - Added `factory.destroy()` method, which invokes the `destroy` method on all SDK clients created by the factory. - Added support for targeting rules based on large segments. - BREAKING CHANGES: From 5f865f3388e92e218486fc18214cc302eb50cf4b Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Tue, 1 Oct 2024 14:54:21 -0300 Subject: [PATCH 088/146] Draft implementation of loadData and getSnapshot methods --- src/sdkFactory/index.ts | 2 +- src/storages/__tests__/dataLoader.spec.ts | 33 ++++++++ src/storages/dataLoader.ts | 95 ++++++++++++++-------- src/storages/inMemory/InMemoryStorageCS.ts | 23 +++++- src/storages/types.ts | 2 - src/types.ts | 13 +-- 6 files changed, 124 insertions(+), 44 deletions(-) create mode 100644 src/storages/__tests__/dataLoader.spec.ts diff --git a/src/sdkFactory/index.ts b/src/sdkFactory/index.ts index ce7a0e9e..666f0004 100644 --- a/src/sdkFactory/index.ts +++ b/src/sdkFactory/index.ts @@ -50,7 +50,7 @@ export function sdkFactory(params: ISdkFactoryParams): SplitIO.ICsSDK | SplitIO. readiness.splits.emit(SDK_SPLITS_CACHE_LOADED); } }); - // @TODO add support for dataloader: `if (params.dataLoader) params.dataLoader(storage);` + const clients: Record = {}; const telemetryTracker = telemetryTrackerFactory(storage.telemetry, platform.now); const integrationsManager = integrationsManagerFactory && integrationsManagerFactory({ settings, storage, telemetryTracker }); diff --git a/src/storages/__tests__/dataLoader.spec.ts b/src/storages/__tests__/dataLoader.spec.ts new file mode 100644 index 00000000..fe779046 --- /dev/null +++ b/src/storages/__tests__/dataLoader.spec.ts @@ -0,0 +1,33 @@ +import { InMemoryStorageFactory } from '../inMemory/InMemoryStorage'; +import { InMemoryStorageCSFactory } from '../inMemory/InMemoryStorageCS'; +import { fullSettings } from '../../utils/settingsValidation/__tests__/settings.mocks'; + +import * as dataLoader from '../dataLoader'; + +test('loadData & getSnapshot', () => { + jest.spyOn(dataLoader, 'loadData'); + const onReadyFromCacheCb = jest.fn(); + // @ts-expect-error + const serverStorage = InMemoryStorageFactory({ settings: fullSettings }); + serverStorage.splits.setChangeNumber(123); // @ts-expect-error + serverStorage.splits.addSplits([['split1', { name: 'split1' }]]); + serverStorage.segments.addToSegment('segment1', [fullSettings.core.key as string]); + + const preloadedData = dataLoader.getSnapshot(serverStorage, [fullSettings.core.key as string]); + + // @ts-expect-error + const clientStorage = InMemoryStorageCSFactory({ settings: { ...fullSettings, preloadedData }, onReadyFromCacheCb }); + + // Assert + expect(dataLoader.loadData).toBeCalledTimes(1); + expect(onReadyFromCacheCb).toBeCalledTimes(1); + expect(dataLoader.getSnapshot(clientStorage, [fullSettings.core.key as string])).toEqual(preloadedData); + expect(preloadedData).toEqual({ + since: 123, + splitsData: { + split1: { name: 'split1' } + }, + mySegmentsData: { [fullSettings.core.key as string]: ['segment1'] }, + segmentsData: undefined + }); +}); diff --git a/src/storages/dataLoader.ts b/src/storages/dataLoader.ts index 24898d68..c8b34354 100644 --- a/src/storages/dataLoader.ts +++ b/src/storages/dataLoader.ts @@ -1,55 +1,86 @@ import { SplitIO } from '../types'; -import { DEFAULT_CACHE_EXPIRATION_IN_MILLIS } from '../utils/constants/browser'; -import { DataLoader, ISegmentsCacheSync, ISplitsCacheSync } from './types'; +import { ISegmentsCacheSync, ISplitsCacheSync, IStorageSync } from './types'; +import { setToArray, ISet } from '../utils/lang/sets'; /** - * Factory of client-side storage loader + * Storage-agnostic adaptation of `loadDataIntoLocalStorage` function + * (https://github.com/godaddy/split-javascript-data-loader/blob/master/src/load-data.js) * - * @param preloadedData validated data following the format proposed in https://github.com/godaddy/split-javascript-data-loader - * and extended with a `mySegmentsData` property. - * @returns function to preload the storage + * @param preloadedData validated data following the format proposed in https://github.com/godaddy/split-javascript-data-loader and extended with a `mySegmentsData` property. + * @param storage object containing `splits` and `segments` cache (client-side variant) + * @param userKey user key (matching key) of the provided MySegmentsCache + * + * @TODO extend to load largeSegments + * @TODO extend to load data on shared mySegments storages. Be specific when emitting SDK_READY_FROM_CACHE on shared clients. Maybe the serializer should provide the `useSegments` flag. + * @TODO add logs, and input validation in this module, in favor of size reduction. + * @TODO unit tests */ -export function dataLoaderFactory(preloadedData: SplitIO.PreloadedData): DataLoader { - - /** - * Storage-agnostic adaptation of `loadDataIntoLocalStorage` function - * (https://github.com/godaddy/split-javascript-data-loader/blob/master/src/load-data.js) - * - * @param storage object containing `splits` and `segments` cache (client-side variant) - * @param userId user key string of the provided MySegmentsCache - * - * @TODO extend to support SegmentsCache (server-side variant) by making `userId` optional and adding the corresponding logic. - * @TODO extend to load data on shared mySegments storages. Be specific when emitting SDK_READY_FROM_CACHE on shared clients. Maybe the serializer should provide the `useSegments` flag. - */ - return function loadData(storage: { splits: ISplitsCacheSync, segments: ISegmentsCacheSync }, userId: string) { - // Do not load data if current preloadedData is empty - if (Object.keys(preloadedData).length === 0) return; - - const { lastUpdated = -1, segmentsData = {}, since = -1, splitsData = {} } = preloadedData; +export function loadData(preloadedData: SplitIO.PreloadedData, storage: { splits?: ISplitsCacheSync, segments: ISegmentsCacheSync, largeSegments?: ISegmentsCacheSync }, userKey?: string) { + // Do not load data if current preloadedData is empty + if (Object.keys(preloadedData).length === 0) return; + + const { segmentsData = {}, since = -1, splitsData = {} } = preloadedData; + if (storage.splits) { const storedSince = storage.splits.getChangeNumber(); - const expirationTimestamp = Date.now() - DEFAULT_CACHE_EXPIRATION_IN_MILLIS; - // Do not load data if current localStorage data is more recent, - // or if its `lastUpdated` timestamp is older than the given `expirationTimestamp`, - if (storedSince > since || lastUpdated < expirationTimestamp) return; + // Do not load data if current data is more recent + if (storedSince > since) return; // cleaning up the localStorage data, since some cached splits might need be part of the preloaded data storage.splits.clear(); storage.splits.setChangeNumber(since); // splitsData in an object where the property is the split name and the pertaining value is a stringified json of its data - storage.splits.addSplits(Object.keys(splitsData).map(splitName => JSON.parse(splitsData[splitName]))); + storage.splits.addSplits(Object.keys(splitsData).map(splitName => ([splitName, splitsData[splitName]]))); + } - // add mySegments data - let mySegmentsData = preloadedData.mySegmentsData && preloadedData.mySegmentsData[userId]; + if (userKey) { // add mySegments data (client-side) + let mySegmentsData = preloadedData.mySegmentsData && preloadedData.mySegmentsData[userKey]; if (!mySegmentsData) { // segmentsData in an object where the property is the segment name and the pertaining value is a stringified object that contains the `added` array of userIds mySegmentsData = Object.keys(segmentsData).filter(segmentName => { - const userIds = JSON.parse(segmentsData[segmentName]).added; - return Array.isArray(userIds) && userIds.indexOf(userId) > -1; + const userKeys = segmentsData[segmentName]; + return userKeys.indexOf(userKey) > -1; }); } storage.segments.resetSegments({ k: mySegmentsData.map(s => ({ n: s })) }); + } else { // add segments data (server-side) + Object.keys(segmentsData).filter(segmentName => { + const userKeys = segmentsData[segmentName]; + storage.segments.addToSegment(segmentName, userKeys); + }); + } +} + +export function getSnapshot(storage: IStorageSync, userKeys?: string[]): SplitIO.PreloadedData { + return { + // lastUpdated: Date.now(), + // @ts-ignore accessing private prop + since: storage.splits.changeNumber, // @ts-ignore accessing private prop + splitsData: storage.splits.splitsCache, + segmentsData: userKeys ? + undefined : // @ts-ignore accessing private prop + Object.keys(storage.segments.segmentCache).reduce((prev, cur) => { // @ts-ignore accessing private prop + prev[cur] = setToArray(storage.segments.segmentCache[cur] as ISet); + return prev; + }, {}), + mySegmentsData: userKeys ? + userKeys.reduce((prev, userKey) => { + // @ts-ignore accessing private prop + prev[userKey] = storage.shared ? + // Client-side segments + // @ts-ignore accessing private prop + Object.keys(storage.shared(userKey).segments.segmentCache) : + // Server-side segments + // @ts-ignore accessing private prop + Object.keys(storage.segments.segmentCache).reduce((prev, segmentName) => { // @ts-ignore accessing private prop + return storage.segments.segmentCache[segmentName].has(userKey) ? + prev.concat(segmentName) : + prev; + }, []); + return prev; + }, {}) : + undefined }; } diff --git a/src/storages/inMemory/InMemoryStorageCS.ts b/src/storages/inMemory/InMemoryStorageCS.ts index 30667369..670b91f1 100644 --- a/src/storages/inMemory/InMemoryStorageCS.ts +++ b/src/storages/inMemory/InMemoryStorageCS.ts @@ -7,6 +7,8 @@ import { ImpressionCountsCacheInMemory } from './ImpressionCountsCacheInMemory'; import { DEBUG, LOCALHOST_MODE, NONE, STORAGE_MEMORY } from '../../utils/constants'; import { shouldRecordTelemetry, TelemetryCacheInMemory } from './TelemetryCacheInMemory'; import { UniqueKeysCacheInMemoryCS } from './UniqueKeysCacheInMemoryCS'; +import { getMatching } from '../../utils/key'; +import { loadData } from '../dataLoader'; /** * InMemory storage factory for standalone client-side SplitFactory @@ -14,7 +16,7 @@ import { UniqueKeysCacheInMemoryCS } from './UniqueKeysCacheInMemoryCS'; * @param params parameters required by EventsCacheSync */ export function InMemoryStorageCSFactory(params: IStorageFactoryParams): IStorageSync { - const { settings: { scheduler: { impressionsQueueSize, eventsQueueSize, }, sync: { impressionsMode, __splitFiltersValidation } } } = params; + const { settings: { scheduler: { impressionsQueueSize, eventsQueueSize, }, sync: { impressionsMode, __splitFiltersValidation }, preloadedData }, onReadyFromCacheCb } = params; const splits = new SplitsCacheInMemory(__splitFiltersValidation); const segments = new MySegmentsCacheInMemory(); @@ -42,11 +44,18 @@ export function InMemoryStorageCSFactory(params: IStorageFactoryParams): IStorag }, // When using shared instanciation with MEMORY we reuse everything but segments (they are unique per key) - shared() { + shared(matchingKey: string) { + const segments = new MySegmentsCacheInMemory(); + const largeSegments = new MySegmentsCacheInMemory(); + + if (preloadedData) { + loadData(preloadedData, { segments, largeSegments }, matchingKey); + } + return { splits: this.splits, - segments: new MySegmentsCacheInMemory(), - largeSegments: new MySegmentsCacheInMemory(), + segments, + largeSegments, impressions: this.impressions, impressionCounts: this.impressionCounts, events: this.events, @@ -72,6 +81,12 @@ export function InMemoryStorageCSFactory(params: IStorageFactoryParams): IStorag if (storage.uniqueKeys) storage.uniqueKeys.track = noopTrack; } + + if (preloadedData) { + loadData(preloadedData, storage, getMatching(params.settings.core.key)); + if (splits.getChangeNumber() > -1) onReadyFromCacheCb(); + } + return storage; } diff --git a/src/storages/types.ts b/src/storages/types.ts index c376485d..f1091bf0 100644 --- a/src/storages/types.ts +++ b/src/storages/types.ts @@ -491,8 +491,6 @@ export interface IStorageAsync extends IStorageBase< /** StorageFactory */ -export type DataLoader = (storage: IStorageSync, matchingKey: string) => void - export interface IStorageFactoryParams { settings: ISettings, /** diff --git a/src/types.ts b/src/types.ts index 2a65b297..a2942a17 100644 --- a/src/types.ts +++ b/src/types.ts @@ -1,4 +1,4 @@ -import { ISplitFiltersValidation } from './dtos/types'; +import { ISplit, ISplitFiltersValidation } from './dtos/types'; import { IIntegration, IIntegrationFactoryParams } from './integrations/types'; import { ILogger } from './logger/types'; import { ISdkFactoryContext } from './sdkFactory/types'; @@ -98,6 +98,7 @@ export interface ISettings { eventsFirstPushWindow: number }, readonly storage: IStorageSyncFactory | IStorageAsyncFactory, + readonly preloadedData?: SplitIO.PreloadedData, readonly integrations: Array<{ readonly type: string, (params: IIntegrationFactoryParams): IIntegration | void @@ -771,7 +772,7 @@ export namespace SplitIO { * If this value is older than 10 days ago (expiration time policy), the data is not used to update the storage content. * @TODO configurable expiration time policy? */ - lastUpdated: number, + // lastUpdated: number, /** * Change number of the preloaded data. * If this value is older than the current changeNumber at the storage, the data is not used to update the storage content. @@ -779,13 +780,14 @@ export namespace SplitIO { since: number, /** * Map of feature flags to their stringified definitions. + * @TODO rename to flags */ splitsData: { - [splitName: string]: string + [splitName: string]: ISplit }, /** * Optional map of user keys to their list of segments. - * @TODO remove when releasing first version + * @TODO rename to memberships */ mySegmentsData?: { [key: string]: string[] @@ -793,9 +795,10 @@ export namespace SplitIO { /** * Optional map of segments to their stringified definitions. * This property is ignored if `mySegmentsData` was provided. + * @TODO rename to segments */ segmentsData?: { - [segmentName: string]: string + [segmentName: string]: string[] }, } /** From 6ab2bc89d3667683b64951dcd71c81813d1c827c Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Tue, 1 Oct 2024 20:44:37 -0300 Subject: [PATCH 089/146] Polishing --- package-lock.json | 4 ++-- package.json | 2 +- src/storages/__tests__/dataLoader.spec.ts | 4 +--- src/storages/dataLoader.ts | 8 ++++---- src/types.ts | 6 ++---- 5 files changed, 10 insertions(+), 14 deletions(-) diff --git a/package-lock.json b/package-lock.json index 8fc190d7..6de71997 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "@splitsoftware/splitio-commons", - "version": "1.17.1-rc.1", + "version": "1.17.1-rc.2", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "@splitsoftware/splitio-commons", - "version": "1.17.1-rc.1", + "version": "1.17.1-rc.2", "license": "Apache-2.0", "dependencies": { "tslib": "^2.3.1" diff --git a/package.json b/package.json index 883888e7..dc7467df 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@splitsoftware/splitio-commons", - "version": "1.17.1-rc.1", + "version": "1.17.1-rc.2", "description": "Split JavaScript SDK common components", "main": "cjs/index.js", "module": "esm/index.js", diff --git a/src/storages/__tests__/dataLoader.spec.ts b/src/storages/__tests__/dataLoader.spec.ts index fe779046..c9f77849 100644 --- a/src/storages/__tests__/dataLoader.spec.ts +++ b/src/storages/__tests__/dataLoader.spec.ts @@ -24,9 +24,7 @@ test('loadData & getSnapshot', () => { expect(dataLoader.getSnapshot(clientStorage, [fullSettings.core.key as string])).toEqual(preloadedData); expect(preloadedData).toEqual({ since: 123, - splitsData: { - split1: { name: 'split1' } - }, + splitsData: [{ name: 'split1' }], mySegmentsData: { [fullSettings.core.key as string]: ['segment1'] }, segmentsData: undefined }); diff --git a/src/storages/dataLoader.ts b/src/storages/dataLoader.ts index c8b34354..69b28274 100644 --- a/src/storages/dataLoader.ts +++ b/src/storages/dataLoader.ts @@ -19,7 +19,7 @@ export function loadData(preloadedData: SplitIO.PreloadedData, storage: { splits // Do not load data if current preloadedData is empty if (Object.keys(preloadedData).length === 0) return; - const { segmentsData = {}, since = -1, splitsData = {} } = preloadedData; + const { segmentsData = {}, since = -1, splitsData = [] } = preloadedData; if (storage.splits) { const storedSince = storage.splits.getChangeNumber(); @@ -32,7 +32,7 @@ export function loadData(preloadedData: SplitIO.PreloadedData, storage: { splits storage.splits.setChangeNumber(since); // splitsData in an object where the property is the split name and the pertaining value is a stringified json of its data - storage.splits.addSplits(Object.keys(splitsData).map(splitName => ([splitName, splitsData[splitName]]))); + storage.splits.addSplits(splitsData.map(split => ([split.name, split]))); } if (userKey) { // add mySegments data (client-side) @@ -57,8 +57,8 @@ export function getSnapshot(storage: IStorageSync, userKeys?: string[]): SplitIO return { // lastUpdated: Date.now(), // @ts-ignore accessing private prop - since: storage.splits.changeNumber, // @ts-ignore accessing private prop - splitsData: storage.splits.splitsCache, + since: storage.splits.changeNumber, + splitsData: storage.splits.getAll(), segmentsData: userKeys ? undefined : // @ts-ignore accessing private prop Object.keys(storage.segments.segmentCache).reduce((prev, cur) => { // @ts-ignore accessing private prop diff --git a/src/types.ts b/src/types.ts index a2942a17..777b3258 100644 --- a/src/types.ts +++ b/src/types.ts @@ -779,12 +779,10 @@ export namespace SplitIO { */ since: number, /** - * Map of feature flags to their stringified definitions. + * List of feature flag definitions. * @TODO rename to flags */ - splitsData: { - [splitName: string]: ISplit - }, + splitsData: ISplit[], /** * Optional map of user keys to their list of segments. * @TODO rename to memberships From d653de654fc07da34297a1266329cc462bd97fd1 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Thu, 3 Oct 2024 10:13:50 -0300 Subject: [PATCH 090/146] Handle SplitKeyObject --- src/storages/dataLoader.ts | 25 ++++++++++++------------- 1 file changed, 12 insertions(+), 13 deletions(-) diff --git a/src/storages/dataLoader.ts b/src/storages/dataLoader.ts index 69b28274..7b44df91 100644 --- a/src/storages/dataLoader.ts +++ b/src/storages/dataLoader.ts @@ -1,6 +1,7 @@ import { SplitIO } from '../types'; import { ISegmentsCacheSync, ISplitsCacheSync, IStorageSync } from './types'; import { setToArray, ISet } from '../utils/lang/sets'; +import { getMatching } from '../utils/key'; /** * Storage-agnostic adaptation of `loadDataIntoLocalStorage` function @@ -15,7 +16,7 @@ import { setToArray, ISet } from '../utils/lang/sets'; * @TODO add logs, and input validation in this module, in favor of size reduction. * @TODO unit tests */ -export function loadData(preloadedData: SplitIO.PreloadedData, storage: { splits?: ISplitsCacheSync, segments: ISegmentsCacheSync, largeSegments?: ISegmentsCacheSync }, userKey?: string) { +export function loadData(preloadedData: SplitIO.PreloadedData, storage: { splits?: ISplitsCacheSync, segments: ISegmentsCacheSync, largeSegments?: ISegmentsCacheSync }, matchingKey?: string) { // Do not load data if current preloadedData is empty if (Object.keys(preloadedData).length === 0) return; @@ -35,29 +36,28 @@ export function loadData(preloadedData: SplitIO.PreloadedData, storage: { splits storage.splits.addSplits(splitsData.map(split => ([split.name, split]))); } - if (userKey) { // add mySegments data (client-side) - let mySegmentsData = preloadedData.mySegmentsData && preloadedData.mySegmentsData[userKey]; + if (matchingKey) { // add mySegments data (client-side) + let mySegmentsData = preloadedData.mySegmentsData && preloadedData.mySegmentsData[matchingKey]; if (!mySegmentsData) { // segmentsData in an object where the property is the segment name and the pertaining value is a stringified object that contains the `added` array of userIds mySegmentsData = Object.keys(segmentsData).filter(segmentName => { - const userKeys = segmentsData[segmentName]; - return userKeys.indexOf(userKey) > -1; + const matchingKeys = segmentsData[segmentName]; + return matchingKeys.indexOf(matchingKey) > -1; }); } storage.segments.resetSegments({ k: mySegmentsData.map(s => ({ n: s })) }); } else { // add segments data (server-side) Object.keys(segmentsData).filter(segmentName => { - const userKeys = segmentsData[segmentName]; - storage.segments.addToSegment(segmentName, userKeys); + const matchingKeys = segmentsData[segmentName]; + storage.segments.addToSegment(segmentName, matchingKeys); }); } } -export function getSnapshot(storage: IStorageSync, userKeys?: string[]): SplitIO.PreloadedData { +export function getSnapshot(storage: IStorageSync, userKeys?: SplitIO.SplitKey[]): SplitIO.PreloadedData { return { // lastUpdated: Date.now(), - // @ts-ignore accessing private prop - since: storage.splits.changeNumber, + since: storage.splits.getChangeNumber(), splitsData: storage.splits.getAll(), segmentsData: userKeys ? undefined : // @ts-ignore accessing private prop @@ -66,9 +66,8 @@ export function getSnapshot(storage: IStorageSync, userKeys?: string[]): SplitIO return prev; }, {}), mySegmentsData: userKeys ? - userKeys.reduce((prev, userKey) => { - // @ts-ignore accessing private prop - prev[userKey] = storage.shared ? + userKeys.reduce>((prev, userKey) => { + prev[getMatching(userKey)] = storage.shared ? // Client-side segments // @ts-ignore accessing private prop Object.keys(storage.shared(userKey).segments.segmentCache) : From bf74104bac2b06d986637c4c2e8a852b496e07d6 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Thu, 3 Oct 2024 12:18:37 -0300 Subject: [PATCH 091/146] Move SplitFactory side-effects into an init function, to expose for internal usage in React SDK --- .../__tests__/readinessManager.spec.ts | 4 +- src/readiness/readinessManager.ts | 16 +++-- src/readiness/types.ts | 1 + .../__tests__/sdkClientMethodCS.spec.ts | 9 +-- src/sdkClient/sdkClientMethodCS.ts | 7 +- src/sdkClient/sdkClientMethodCSWithTT.ts | 7 +- src/sdkFactory/index.ts | 42 ++++++++--- src/sdkFactory/types.ts | 3 + src/storages/inLocalStorage/index.ts | 10 +-- src/storages/inRedis/RedisAdapter.ts | 2 +- .../pluggable/__tests__/index.spec.ts | 4 ++ src/storages/pluggable/index.ts | 71 ++++++++++--------- src/storages/types.ts | 1 + src/trackers/__tests__/eventTracker.spec.ts | 8 ++- .../__tests__/impressionsTracker.spec.ts | 17 ++--- .../__tests__/uniqueKeysTracker.spec.ts | 2 + src/trackers/eventTracker.ts | 17 +++-- src/trackers/impressionsTracker.ts | 21 +++--- src/trackers/types.ts | 1 + src/trackers/uniqueKeysTracker.ts | 10 +-- 20 files changed, 158 insertions(+), 95 deletions(-) diff --git a/src/readiness/__tests__/readinessManager.spec.ts b/src/readiness/__tests__/readinessManager.spec.ts index 062ac941..e59a3cba 100644 --- a/src/readiness/__tests__/readinessManager.spec.ts +++ b/src/readiness/__tests__/readinessManager.spec.ts @@ -157,13 +157,14 @@ test('READINESS MANAGER / Segment updates should not be propagated', (done) => { }); }); -describe('READINESS MANAGER / Timeout ready event', () => { +describe('READINESS MANAGER / Timeout event', () => { let readinessManager: IReadinessManager; let timeoutCounter: number; beforeEach(() => { // Schedule timeout to be fired before SDK_READY readinessManager = readinessManagerFactory(EventEmitter, settingsWithTimeout); + readinessManager.init(); // Start the timeout timeoutCounter = 0; readinessManager.gate.on(SDK_READY_TIMED_OUT, () => { @@ -212,6 +213,7 @@ test('READINESS MANAGER / Cancel timeout if ready fired', (done) => { let sdkReadyTimedoutCalled = false; const readinessManager = readinessManagerFactory(EventEmitter, settingsWithTimeout); + readinessManager.init(); // Start the timeout readinessManager.gate.on(SDK_READY_TIMED_OUT, () => { sdkReadyTimedoutCalled = true; }); readinessManager.gate.once(SDK_READY, () => { sdkReadyCalled = true; }); diff --git a/src/readiness/readinessManager.ts b/src/readiness/readinessManager.ts index 82798182..00957df5 100644 --- a/src/readiness/readinessManager.ts +++ b/src/readiness/readinessManager.ts @@ -55,19 +55,15 @@ export function readinessManagerFactory( // emit SDK_READY_TIMED_OUT let hasTimedout = false; + let readyTimeoutId: ReturnType; - function timeout() { - if (hasTimedout) return; + function timeout() { // eslint-disable-next-line no-use-before-define + if (hasTimedout || isReady) return; hasTimedout = true; syncLastUpdate(); gate.emit(SDK_READY_TIMED_OUT, 'Split SDK emitted SDK_READY_TIMED_OUT event.'); } - let readyTimeoutId: ReturnType; - if (readyTimeout > 0) { - readyTimeoutId = setTimeout(timeout, readyTimeout); - } - // emit SDK_READY and SDK_UPDATE let isReady = false; splits.on(SDK_SPLITS_ARRIVED, checkIsReadyOrUpdate); @@ -132,6 +128,12 @@ export function readinessManagerFactory( // tracking and evaluations, while keeping event listeners to emit SDK_READY_TIMED_OUT event setDestroyed() { isDestroyed = true; }, + init() { + if (readyTimeout > 0) { + readyTimeoutId = setTimeout(timeout, readyTimeout); + } + }, + destroy() { isDestroyed = true; syncLastUpdate(); diff --git a/src/readiness/types.ts b/src/readiness/types.ts index 614070dc..6104eda5 100644 --- a/src/readiness/types.ts +++ b/src/readiness/types.ts @@ -59,6 +59,7 @@ export interface IReadinessManager { timeout(): void, setDestroyed(): void, destroy(): void, + init(): void, /** for client-side */ shared(): IReadinessManager, diff --git a/src/sdkClient/__tests__/sdkClientMethodCS.spec.ts b/src/sdkClient/__tests__/sdkClientMethodCS.spec.ts index 1abb2a93..1ac052aa 100644 --- a/src/sdkClient/__tests__/sdkClientMethodCS.spec.ts +++ b/src/sdkClient/__tests__/sdkClientMethodCS.spec.ts @@ -14,15 +14,15 @@ const storageMock = { }) }; -const partialSdkReadinessManagers: { sdkStatus: jest.Mock, readinessManager: { destroy: jest.Mock } }[] = []; +const partialSdkReadinessManagers: { sdkStatus: jest.Mock, readinessManager: { init: jest.Mock, destroy: jest.Mock } }[] = []; const sdkReadinessManagerMock = { sdkStatus: jest.fn(), - readinessManager: { destroy: jest.fn() }, + readinessManager: { init: jest.fn(), destroy: jest.fn() }, shared: jest.fn(() => { partialSdkReadinessManagers.push({ sdkStatus: jest.fn(), - readinessManager: { destroy: jest.fn() }, + readinessManager: { init: jest.fn(), destroy: jest.fn() }, }); return partialSdkReadinessManagers[partialSdkReadinessManagers.length - 1]; }) @@ -46,7 +46,8 @@ const params = { signalListener: { stop: jest.fn() }, settings: settingsWithKey, telemetryTracker: telemetryTrackerFactory(), - clients: {} + clients: {}, + whenInit: (cb: () => void) => cb() }; const invalidAttributes = [ diff --git a/src/sdkClient/sdkClientMethodCS.ts b/src/sdkClient/sdkClientMethodCS.ts index 87380d7b..1470728d 100644 --- a/src/sdkClient/sdkClientMethodCS.ts +++ b/src/sdkClient/sdkClientMethodCS.ts @@ -15,7 +15,7 @@ import { buildInstanceId } from './identity'; * Therefore, clients don't have a bound TT for the track method. */ export function sdkClientMethodCSFactory(params: ISdkFactoryContext): (key?: SplitIO.SplitKey) => SplitIO.ICsClient { - const { clients, storage, syncManager, sdkReadinessManager, settings: { core: { key }, log } } = params; + const { clients, storage, syncManager, sdkReadinessManager, settings: { core: { key }, log }, whenInit } = params; const mainClientInstance = clientCSDecorator( log, @@ -75,7 +75,10 @@ export function sdkClientMethodCSFactory(params: ISdkFactoryContext): (key?: Spl validKey ); - sharedSyncManager && sharedSyncManager.start(); + whenInit(() => { + sharedSdkReadiness.readinessManager.init(); + sharedSyncManager && sharedSyncManager.start(); + }); log.info(NEW_SHARED_CLIENT); } else { diff --git a/src/sdkClient/sdkClientMethodCSWithTT.ts b/src/sdkClient/sdkClientMethodCSWithTT.ts index 9f577471..5ff0acfb 100644 --- a/src/sdkClient/sdkClientMethodCSWithTT.ts +++ b/src/sdkClient/sdkClientMethodCSWithTT.ts @@ -17,7 +17,7 @@ import { buildInstanceId } from './identity'; * (default client) or the client method (shared clients). */ export function sdkClientMethodCSFactory(params: ISdkFactoryContext): (key?: SplitIO.SplitKey, trafficType?: string) => SplitIO.ICsClient { - const { clients, storage, syncManager, sdkReadinessManager, settings: { core: { key, trafficType }, log } } = params; + const { clients, storage, syncManager, sdkReadinessManager, settings: { core: { key, trafficType }, log }, whenInit } = params; const mainClientInstance = clientCSDecorator( log, @@ -86,7 +86,10 @@ export function sdkClientMethodCSFactory(params: ISdkFactoryContext): (key?: Spl validTrafficType ); - sharedSyncManager && sharedSyncManager.start(); + whenInit(() => { + sharedSdkReadiness.readinessManager.init(); + sharedSyncManager && sharedSyncManager.start(); + }); log.info(NEW_SHARED_CLIENT); } else { diff --git a/src/sdkFactory/index.ts b/src/sdkFactory/index.ts index 666f0004..e7f46836 100644 --- a/src/sdkFactory/index.ts +++ b/src/sdkFactory/index.ts @@ -23,14 +23,20 @@ export function sdkFactory(params: ISdkFactoryParams): SplitIO.ICsSDK | SplitIO. const { settings, platform, storageFactory, splitApiFactory, extraProps, syncManagerFactory, SignalListener, impressionsObserverFactory, integrationsManagerFactory, sdkManagerFactory, sdkClientMethodFactory, - filterAdapterFactory } = params; + filterAdapterFactory, isPure } = params; const { log, sync: { impressionsMode } } = settings; // @TODO handle non-recoverable errors, such as, global `fetch` not available, invalid SDK Key, etc. // On non-recoverable errors, we should mark the SDK as destroyed and not start synchronization. - // We will just log and allow for the SDK to end up throwing an SDK_TIMEOUT event for devs to handle. - validateAndTrackApiKey(log, settings.core.authorizationKey); + // initialization + let isInit = false; + const initCallbacks: (() => void)[] = []; + + function whenInit(cb: () => void) { + if (isInit) cb(); + else initCallbacks.push(cb); + } const sdkReadinessManager = sdkReadinessManagerFactory(platform.EventEmitter, settings); const readiness = sdkReadinessManager.readinessManager; @@ -70,13 +76,13 @@ export function sdkFactory(params: ISdkFactoryParams): SplitIO.ICsSDK | SplitIO. strategy = strategyDebugFactory(observer); } - const impressionsTracker = impressionsTrackerFactory(settings, storage.impressions, strategy, integrationsManager, storage.telemetry); - const eventTracker = eventTrackerFactory(settings, storage.events, integrationsManager, storage.telemetry); + const impressionsTracker = impressionsTrackerFactory(settings, storage.impressions, strategy, whenInit, integrationsManager, storage.telemetry); + const eventTracker = eventTrackerFactory(settings, storage.events, whenInit, integrationsManager, storage.telemetry); // splitApi is used by SyncManager and Browser signal listener const splitApi = splitApiFactory && splitApiFactory(settings, platform, telemetryTracker); - const ctx: ISdkFactoryContext = { clients, splitApi, eventTracker, impressionsTracker, telemetryTracker, uniqueKeysTracker, sdkReadinessManager, readiness, settings, storage, platform }; + const ctx: ISdkFactoryContext = { clients, splitApi, eventTracker, impressionsTracker, telemetryTracker, uniqueKeysTracker, sdkReadinessManager, readiness, settings, storage, platform, whenInit }; const syncManager = syncManagerFactory && syncManagerFactory(ctx as ISdkFactoryContextSync); ctx.syncManager = syncManager; @@ -88,8 +94,24 @@ export function sdkFactory(params: ISdkFactoryParams): SplitIO.ICsSDK | SplitIO. const clientMethod = sdkClientMethodFactory(ctx); const managerInstance = sdkManagerFactory(settings, storage.splits, sdkReadinessManager); - syncManager && syncManager.start(); - signalListener && signalListener.start(); + + function init() { + if (isInit) return; + isInit = true; + + // We will just log and allow for the SDK to end up throwing an SDK_TIMEOUT event for devs to handle. + validateAndTrackApiKey(log, settings.core.authorizationKey); + readiness.init(); + storage.init && storage.init(); + uniqueKeysTracker && uniqueKeysTracker.start(); + syncManager && syncManager.start(); + signalListener && signalListener.start(); + + initCallbacks.forEach((cb) => cb()); + initCallbacks.length = 0; + } + + if (!isPure) init(); log.info(NEW_FACTORY); @@ -110,7 +132,7 @@ export function sdkFactory(params: ISdkFactoryParams): SplitIO.ICsSDK | SplitIO. settings, destroy() { - return Promise.all(Object.keys(clients).map(key => clients[key].destroy())).then(() => {}); + return Promise.all(Object.keys(clients).map(key => clients[key].destroy())).then(() => { }); } - }, extraProps && extraProps(ctx)); + }, extraProps && extraProps(ctx), isPure && { init }); } diff --git a/src/sdkFactory/types.ts b/src/sdkFactory/types.ts index b0a3b3f2..222592e5 100644 --- a/src/sdkFactory/types.ts +++ b/src/sdkFactory/types.ts @@ -50,6 +50,7 @@ export interface ISdkFactoryContext { splitApi?: ISplitApi syncManager?: ISyncManager, clients: Record, + whenInit(cb: () => void): void } export interface ISdkFactoryContextSync extends ISdkFactoryContext { @@ -68,6 +69,8 @@ export interface ISdkFactoryContextAsync extends ISdkFactoryContext { * Object parameter with the modules required to create an SDK factory instance */ export interface ISdkFactoryParams { + // If true, the `sdkFactory` is pure (no side effects), and the SDK instance includes a `init` method to run initialization side effects + isPure?: boolean, // The settings must be already validated settings: ISettings, diff --git a/src/storages/inLocalStorage/index.ts b/src/storages/inLocalStorage/index.ts index f50f6cdd..7360d4e3 100644 --- a/src/storages/inLocalStorage/index.ts +++ b/src/storages/inLocalStorage/index.ts @@ -45,10 +45,6 @@ export function InLocalStorage(options: InLocalStorageOptions = {}): IStorageSyn const segments = new MySegmentsCacheInLocal(log, keys); const largeSegments = new MySegmentsCacheInLocal(log, myLargeSegmentsKeyBuilder(prefix, matchingKey)); - if (settings.mode === LOCALHOST_MODE || splits.getChangeNumber() > -1) { - Promise.resolve().then(onReadyFromCacheCb); - } - return { splits, segments, @@ -59,6 +55,12 @@ export function InLocalStorage(options: InLocalStorageOptions = {}): IStorageSyn telemetry: shouldRecordTelemetry(params) ? new TelemetryCacheInMemory(splits, segments) : undefined, uniqueKeys: impressionsMode === NONE ? new UniqueKeysCacheInMemoryCS() : undefined, + init() { + if (settings.mode === LOCALHOST_MODE || splits.getChangeNumber() > -1) { + Promise.resolve().then(onReadyFromCacheCb); + } + }, + destroy() { this.splits = new SplitsCacheInMemory(__splitFiltersValidation); this.segments = new MySegmentsCacheInMemory(); diff --git a/src/storages/inRedis/RedisAdapter.ts b/src/storages/inRedis/RedisAdapter.ts index 6d738606..5a800ec5 100644 --- a/src/storages/inRedis/RedisAdapter.ts +++ b/src/storages/inRedis/RedisAdapter.ts @@ -20,7 +20,7 @@ const DEFAULT_OPTIONS = { const DEFAULT_LIBRARY_OPTIONS = { enableOfflineQueue: false, connectTimeout: DEFAULT_OPTIONS.connectionTimeout, - lazyConnect: false + lazyConnect: false // @TODO true to avoid side-effects on instantiation. }; interface IRedisCommand { diff --git a/src/storages/pluggable/__tests__/index.spec.ts b/src/storages/pluggable/__tests__/index.spec.ts index a0f32b1d..98f5622e 100644 --- a/src/storages/pluggable/__tests__/index.spec.ts +++ b/src/storages/pluggable/__tests__/index.spec.ts @@ -28,6 +28,7 @@ describe('PLUGGABLE STORAGE', () => { test('creates a storage instance', async () => { const storageFactory = PluggableStorage({ prefix, wrapper: wrapperMock }); const storage = storageFactory(internalSdkParams); + storage.init(); assertStorageInterface(storage); // the instance must implement the storage interface expect(wrapperMock.connect).toBeCalledTimes(1); // wrapper connect method should be called once when storage is created @@ -74,6 +75,7 @@ describe('PLUGGABLE STORAGE', () => { test('creates a storage instance for partial consumer mode (events and impressions cache in memory)', async () => { const storageFactory = PluggableStorage({ prefix, wrapper: wrapperMock }); const storage = storageFactory({ ...internalSdkParams, settings: { ...internalSdkParams.settings, mode: CONSUMER_PARTIAL_MODE } }); + storage.init(); assertStorageInterface(storage); expect(wrapperMock.connect).toBeCalledTimes(1); @@ -102,6 +104,7 @@ describe('PLUGGABLE STORAGE', () => { // Create storage instance. Wrapper is pollute but doesn't have filter query key, so it should clear the cache await new Promise(resolve => { storage = storageFactory({ onReadyCb: resolve, settings: { ...fullSettings, mode: undefined } }); + storage.init(); }); // Assert that expected caches are present @@ -121,6 +124,7 @@ describe('PLUGGABLE STORAGE', () => { // Create storage instance. This time the wrapper has the current filter query key, so it should not clear the cache await new Promise(resolve => { storage = storageFactory({ onReadyCb: resolve, settings: { ...fullSettings, mode: undefined } }); + storage.init(); }); // Assert that cache was not cleared diff --git a/src/storages/pluggable/index.ts b/src/storages/pluggable/index.ts index 60350d66..09bf2e45 100644 --- a/src/storages/pluggable/index.ts +++ b/src/storages/pluggable/index.ts @@ -1,4 +1,4 @@ -import { IPluggableStorageWrapper, IStorageAsync, IStorageAsyncFactory, IStorageFactoryParams, ITelemetryCacheAsync } from '../types'; +import { IPluggableStorageWrapper, IStorageAsyncFactory, IStorageFactoryParams, ITelemetryCacheAsync } from '../types'; import { KeyBuilderSS } from '../KeyBuilderSS'; import { SplitsCachePluggable } from './SplitsCachePluggable'; @@ -62,11 +62,12 @@ export function PluggableStorage(options: PluggableStorageOptions): IStorageAsyn const prefix = validatePrefix(options.prefix); - function PluggableStorageFactory(params: IStorageFactoryParams): IStorageAsync { + function PluggableStorageFactory(params: IStorageFactoryParams) { const { onReadyCb, settings, settings: { log, mode, sync: { impressionsMode }, scheduler: { impressionsQueueSize, eventsQueueSize } } } = params; const metadata = metadataBuilder(settings); const keys = new KeyBuilderSS(prefix, metadata); const wrapper = wrapperAdapter(log, options.wrapper); + let connectPromise: Promise; const isSyncronizer = mode === undefined; // If mode is not defined, the synchronizer is running const isPartialConsumer = mode === CONSUMER_PARTIAL_MODE; @@ -89,35 +90,6 @@ export function PluggableStorage(options: PluggableStorageOptions): IStorageAsyn new UniqueKeysCachePluggable(log, keys.buildUniqueKeysKey(), wrapper) : undefined; - // Connects to wrapper and emits SDK_READY event on main client - const connectPromise = wrapper.connect().then(() => { - if (isSyncronizer) { - // In standalone or producer mode, clear storage if SDK key or feature flag filter has changed - return wrapper.get(keys.buildHashKey()).then((hash) => { - const currentHash = getStorageHash(settings); - if (hash !== currentHash) { - log.info(LOG_PREFIX + 'Storage HASH has changed (SDK key, flags filter criteria or flags spec version was modified). Clearing cache'); - return wrapper.getKeysByPrefix(`${keys.prefix}.`).then(storageKeys => { - return Promise.all(storageKeys.map(storageKey => wrapper.del(storageKey))); - }).then(() => wrapper.set(keys.buildHashKey(), currentHash)); - } - }).then(() => { - onReadyCb(); - }); - } else { - // Start periodic flush of async storages if not running synchronizer (producer mode) - if (impressionCountsCache && (impressionCountsCache as ImpressionCountsCachePluggable).start) (impressionCountsCache as ImpressionCountsCachePluggable).start(); - if (uniqueKeysCache && (uniqueKeysCache as UniqueKeysCachePluggable).start) (uniqueKeysCache as UniqueKeysCachePluggable).start(); - if (telemetry && (telemetry as ITelemetryCacheAsync).recordConfig) (telemetry as ITelemetryCacheAsync).recordConfig(); - - onReadyCb(); - } - }).catch((e) => { - e = e || new Error('Error connecting wrapper'); - onReadyCb(e); - return e; // Propagate error for shared clients - }); - return { splits: new SplitsCachePluggable(log, keys, wrapper, settings.sync.__splitFiltersValidation), segments: new SegmentsCachePluggable(log, keys, wrapper), @@ -127,6 +99,39 @@ export function PluggableStorage(options: PluggableStorageOptions): IStorageAsyn telemetry, uniqueKeys: uniqueKeysCache, + init() { + if (connectPromise) return connectPromise; + + // Connects to wrapper and emits SDK_READY event on main client + return connectPromise = wrapper.connect().then(() => { + if (isSyncronizer) { + // In standalone or producer mode, clear storage if SDK key or feature flag filter has changed + return wrapper.get(keys.buildHashKey()).then((hash) => { + const currentHash = getStorageHash(settings); + if (hash !== currentHash) { + log.info(LOG_PREFIX + 'Storage HASH has changed (SDK key, flags filter criteria or flags spec version was modified). Clearing cache'); + return wrapper.getKeysByPrefix(`${keys.prefix}.`).then(storageKeys => { + return Promise.all(storageKeys.map(storageKey => wrapper.del(storageKey))); + }).then(() => wrapper.set(keys.buildHashKey(), currentHash)); + } + }).then(() => { + onReadyCb(); + }); + } else { + // Start periodic flush of async storages if not running synchronizer (producer mode) + if (impressionCountsCache && (impressionCountsCache as ImpressionCountsCachePluggable).start) (impressionCountsCache as ImpressionCountsCachePluggable).start(); + if (uniqueKeysCache && (uniqueKeysCache as UniqueKeysCachePluggable).start) (uniqueKeysCache as UniqueKeysCachePluggable).start(); + if (telemetry && (telemetry as ITelemetryCacheAsync).recordConfig) (telemetry as ITelemetryCacheAsync).recordConfig(); + + onReadyCb(); + } + }).catch((e) => { + e = e || new Error('Error connecting wrapper'); + onReadyCb(e); + return e; // Propagate error for shared clients + }); + }, + // Stop periodic flush and disconnect the underlying storage destroy() { return Promise.all(isSyncronizer ? [] : [ @@ -136,8 +141,8 @@ export function PluggableStorage(options: PluggableStorageOptions): IStorageAsyn }, // emits SDK_READY event on shared clients and returns a reference to the storage - shared(_, onReadyCb) { - connectPromise.then(onReadyCb); + shared(_: string, onReadyCb: (error?: any) => void) { + this.init().then(onReadyCb); return { ...this, diff --git a/src/storages/types.ts b/src/storages/types.ts index f1091bf0..21945587 100644 --- a/src/storages/types.ts +++ b/src/storages/types.ts @@ -462,6 +462,7 @@ export interface IStorageBase< events: TEventsCache, telemetry?: TTelemetryCache, uniqueKeys?: TUniqueKeysCache, + init?: () => void | Promise, destroy(): void | Promise, shared?: (matchingKey: string, onReadyCb: (error?: any) => void) => this } diff --git a/src/trackers/__tests__/eventTracker.spec.ts b/src/trackers/__tests__/eventTracker.spec.ts index 18dba632..ec0e3f17 100644 --- a/src/trackers/__tests__/eventTracker.spec.ts +++ b/src/trackers/__tests__/eventTracker.spec.ts @@ -29,13 +29,15 @@ const fakeEvent = { } }; +const fakeWhenInit = (cb: () => void) => cb(); + /* Tests */ describe('Event Tracker', () => { test('Tracker API', () => { expect(typeof eventTrackerFactory).toBe('function'); // The module should return a function which acts as a factory. - const instance = eventTrackerFactory(fullSettings, fakeEventsCache, fakeIntegrationsManager); + const instance = eventTrackerFactory(fullSettings, fakeEventsCache, fakeWhenInit, fakeIntegrationsManager); expect(typeof instance.track).toBe('function'); // The instance should implement the track method. }); @@ -51,7 +53,7 @@ describe('Event Tracker', () => { } }); // @ts-ignore - const tracker = eventTrackerFactory(fullSettings, fakeEventsCache, fakeIntegrationsManager, fakeTelemetryCache); + const tracker = eventTrackerFactory(fullSettings, fakeEventsCache, fakeWhenInit, fakeIntegrationsManager, fakeTelemetryCache); const result1 = tracker.track(fakeEvent, 1); expect(fakeEventsCache.track.mock.calls[0]).toEqual([fakeEvent, 1]); // Should be present in the event cache. @@ -92,7 +94,7 @@ describe('Event Tracker', () => { const settings = { ...fullSettings }; const fakeEventsCache = { track: jest.fn(() => true) }; - const tracker = eventTrackerFactory(settings, fakeEventsCache); + const tracker = eventTrackerFactory(settings, fakeEventsCache, fakeWhenInit); expect(tracker.track(fakeEvent)).toBe(true); expect(fakeEventsCache.track).toBeCalledTimes(1); // event should be tracked if userConsent is undefined diff --git a/src/trackers/__tests__/impressionsTracker.spec.ts b/src/trackers/__tests__/impressionsTracker.spec.ts index d5c40ee7..06f14fc2 100644 --- a/src/trackers/__tests__/impressionsTracker.spec.ts +++ b/src/trackers/__tests__/impressionsTracker.spec.ts @@ -34,6 +34,7 @@ const fakeSettingsWithListener = { ...fakeSettings, impressionListener: fakeListener }; +const fakeWhenInit = (cb: () => void) => cb(); /* Tests */ @@ -50,12 +51,12 @@ describe('Impressions Tracker', () => { test('Tracker API', () => { expect(typeof impressionsTrackerFactory).toBe('function'); // The module should return a function which acts as a factory. - const instance = impressionsTrackerFactory(fakeSettings, fakeImpressionsCache, strategy); + const instance = impressionsTrackerFactory(fakeSettings, fakeImpressionsCache, strategy, fakeWhenInit); expect(typeof instance.track).toBe('function'); // The instance should implement the track method which will actually track queued impressions. }); test('Should be able to track impressions (in DEBUG mode without Previous Time).', () => { - const tracker = impressionsTrackerFactory(fakeSettings, fakeImpressionsCache, strategy); + const tracker = impressionsTrackerFactory(fakeSettings, fakeImpressionsCache, strategy, fakeWhenInit); const imp1 = { feature: '10', @@ -75,7 +76,7 @@ describe('Impressions Tracker', () => { }); test('Tracked impressions should be sent to impression listener and integration manager when we invoke .track()', (done) => { - const tracker = impressionsTrackerFactory(fakeSettingsWithListener, fakeImpressionsCache, strategy, fakeIntegrationsManager); + const tracker = impressionsTrackerFactory(fakeSettingsWithListener, fakeImpressionsCache, strategy, fakeWhenInit, fakeIntegrationsManager); const fakeImpression = { feature: 'impression' @@ -149,8 +150,8 @@ describe('Impressions Tracker', () => { impression3.time = 1234567891; const trackers = [ - impressionsTrackerFactory(fakeSettings, fakeImpressionsCache, strategyDebugFactory(impressionObserverSSFactory()), undefined), - impressionsTrackerFactory(fakeSettings, fakeImpressionsCache, strategyDebugFactory(impressionObserverCSFactory()), undefined) + impressionsTrackerFactory(fakeSettings, fakeImpressionsCache, strategyDebugFactory(impressionObserverSSFactory()), fakeWhenInit, undefined), + impressionsTrackerFactory(fakeSettings, fakeImpressionsCache, strategyDebugFactory(impressionObserverCSFactory()), fakeWhenInit, undefined) ]; expect(fakeImpressionsCache.track).not.toBeCalled(); // storage method should not be called until impressions are tracked. @@ -176,8 +177,8 @@ describe('Impressions Tracker', () => { impression2.time = Date.now(); impression3.time = Date.now(); - const impressionCountsCache = new ImpressionCountsCacheInMemory(); // @ts-ignore - const tracker = impressionsTrackerFactory(fakeSettings, fakeImpressionsCache, strategyOptimizedFactory(impressionObserverCSFactory(), impressionCountsCache), undefined, fakeTelemetryCache); + const impressionCountsCache = new ImpressionCountsCacheInMemory(); + const tracker = impressionsTrackerFactory(fakeSettings, fakeImpressionsCache, strategyOptimizedFactory(impressionObserverCSFactory(), impressionCountsCache), fakeWhenInit, undefined, fakeTelemetryCache as any); expect(fakeImpressionsCache.track).not.toBeCalled(); // cache method should not be called by just creating a tracker @@ -200,7 +201,7 @@ describe('Impressions Tracker', () => { test('Should track or not impressions depending on user consent status', () => { const settings = { ...fullSettings }; - const tracker = impressionsTrackerFactory(settings, fakeImpressionsCache, strategy); + const tracker = impressionsTrackerFactory(settings, fakeImpressionsCache, strategy, fakeWhenInit); tracker.track([impression]); expect(fakeImpressionsCache.track).toBeCalledTimes(1); // impression should be tracked if userConsent is undefined diff --git a/src/trackers/__tests__/uniqueKeysTracker.spec.ts b/src/trackers/__tests__/uniqueKeysTracker.spec.ts index e9dab3c5..1e577d4e 100644 --- a/src/trackers/__tests__/uniqueKeysTracker.spec.ts +++ b/src/trackers/__tests__/uniqueKeysTracker.spec.ts @@ -48,6 +48,8 @@ describe('Unique keys tracker', () => { const uniqueKeysTrackerWithRefresh = uniqueKeysTrackerFactory(loggerMock, fakeUniqueKeysCache, fakeFilter); + uniqueKeysTrackerWithRefresh.start(); + setTimeout(() => { expect(fakeFilter.clear).toBeCalledTimes(1); diff --git a/src/trackers/eventTracker.ts b/src/trackers/eventTracker.ts index cebf3950..18b1e94c 100644 --- a/src/trackers/eventTracker.ts +++ b/src/trackers/eventTracker.ts @@ -16,6 +16,7 @@ import { isConsumerMode } from '../utils/settingsValidation/mode'; export function eventTrackerFactory( settings: ISettings, eventsCache: IEventsCacheBase, + whenInit: (cb: () => void) => void, integrationsManager?: IEventsHandler, telemetryCache?: ITelemetryCacheSync | ITelemetryCacheAsync ): IEventTracker { @@ -32,13 +33,15 @@ export function eventTrackerFactory( log.info(EVENTS_TRACKER_SUCCESS, [msg]); if (integrationsManager) { // Wrap in a timeout because we don't want it to be blocking. - setTimeout(function () { - // copy of event, to avoid unexpected behaviour if modified by integrations - const eventDataCopy = objectAssign({}, eventData); - if (properties) eventDataCopy.properties = objectAssign({}, properties); - // integrationsManager does not throw errors (they are internally handled by each integration module) - integrationsManager.handleEvent(eventDataCopy); - }, 0); + whenInit(() => { + setTimeout(() => { + // copy of event, to avoid unexpected behaviour if modified by integrations + const eventDataCopy = objectAssign({}, eventData); + if (properties) eventDataCopy.properties = objectAssign({}, properties); + // integrationsManager does not throw errors (they are internally handled by each integration module) + integrationsManager.handleEvent(eventDataCopy); + }); + }); } } else { log.error(ERROR_EVENTS_TRACKER, [msg]); diff --git a/src/trackers/impressionsTracker.ts b/src/trackers/impressionsTracker.ts index 77b5f1f1..d8a3fbc0 100644 --- a/src/trackers/impressionsTracker.ts +++ b/src/trackers/impressionsTracker.ts @@ -19,6 +19,7 @@ export function impressionsTrackerFactory( settings: ISettings, impressionsCache: IImpressionsCacheBase, strategy: IStrategy, + whenInit: (cb: () => void) => void, integrationsManager?: IImpressionsHandler, telemetryCache?: ITelemetryCacheSync | ITelemetryCacheAsync, ): IImpressionsTracker { @@ -67,16 +68,18 @@ export function impressionsTrackerFactory( }; // Wrap in a timeout because we don't want it to be blocking. - setTimeout(function () { - // integrationsManager.handleImpression does not throw errors - if (integrationsManager) integrationsManager.handleImpression(impressionData); + whenInit(() => { + setTimeout(() => { + // integrationsManager.handleImpression does not throw errors + if (integrationsManager) integrationsManager.handleImpression(impressionData); - try { // @ts-ignore. An exception on the listeners should not break the SDK. - if (impressionListener) impressionListener.logImpression(impressionData); - } catch (err) { - log.error(ERROR_IMPRESSIONS_LISTENER, [err]); - } - }, 0); + try { // @ts-ignore. An exception on the listeners should not break the SDK. + if (impressionListener) impressionListener.logImpression(impressionData); + } catch (err) { + log.error(ERROR_IMPRESSIONS_LISTENER, [err]); + } + }); + }); } } } diff --git a/src/trackers/types.ts b/src/trackers/types.ts index 041c9b97..dfb01c5e 100644 --- a/src/trackers/types.ts +++ b/src/trackers/types.ts @@ -65,6 +65,7 @@ export interface IImpressionSenderAdapter { /** Unique keys tracker */ export interface IUniqueKeysTracker { + start(): void; stop(): void; track(key: string, featureName: string): void; } diff --git a/src/trackers/uniqueKeysTracker.ts b/src/trackers/uniqueKeysTracker.ts index fe367c79..9faa57fc 100644 --- a/src/trackers/uniqueKeysTracker.ts +++ b/src/trackers/uniqueKeysTracker.ts @@ -25,10 +25,6 @@ export function uniqueKeysTrackerFactory( ): IUniqueKeysTracker { let intervalId: any; - if (filterAdapter.refreshRate) { - intervalId = setInterval(filterAdapter.clear, filterAdapter.refreshRate); - } - return { track(key: string, featureName: string): void { @@ -39,6 +35,12 @@ export function uniqueKeysTrackerFactory( uniqueKeysCache.track(key, featureName); }, + start(): void { + if (filterAdapter.refreshRate) { + intervalId = setInterval(filterAdapter.clear, filterAdapter.refreshRate); + } + }, + stop(): void { clearInterval(intervalId); } From 37d05ebc42e508613006b997caabad16c5f705b2 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Thu, 3 Oct 2024 12:19:06 -0300 Subject: [PATCH 092/146] rc --- package-lock.json | 4 ++-- package.json | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/package-lock.json b/package-lock.json index 6de71997..de54ab44 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "@splitsoftware/splitio-commons", - "version": "1.17.1-rc.2", + "version": "1.17.1-rc.3", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "@splitsoftware/splitio-commons", - "version": "1.17.1-rc.2", + "version": "1.17.1-rc.3", "license": "Apache-2.0", "dependencies": { "tslib": "^2.3.1" diff --git a/package.json b/package.json index dc7467df..f09e09f7 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@splitsoftware/splitio-commons", - "version": "1.17.1-rc.2", + "version": "1.17.1-rc.3", "description": "Split JavaScript SDK common components", "main": "cjs/index.js", "module": "esm/index.js", From 7b7feea4ae0386ad0ca38b4a985b941be5d4e142 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Fri, 4 Oct 2024 14:52:56 -0300 Subject: [PATCH 093/146] rename isPure to lazyInit --- src/sdkFactory/index.ts | 6 ++---- src/sdkFactory/types.ts | 2 +- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/src/sdkFactory/index.ts b/src/sdkFactory/index.ts index e7f46836..0951abd3 100644 --- a/src/sdkFactory/index.ts +++ b/src/sdkFactory/index.ts @@ -23,7 +23,7 @@ export function sdkFactory(params: ISdkFactoryParams): SplitIO.ICsSDK | SplitIO. const { settings, platform, storageFactory, splitApiFactory, extraProps, syncManagerFactory, SignalListener, impressionsObserverFactory, integrationsManagerFactory, sdkManagerFactory, sdkClientMethodFactory, - filterAdapterFactory, isPure } = params; + filterAdapterFactory, lazyInit } = params; const { log, sync: { impressionsMode } } = settings; // @TODO handle non-recoverable errors, such as, global `fetch` not available, invalid SDK Key, etc. @@ -111,8 +111,6 @@ export function sdkFactory(params: ISdkFactoryParams): SplitIO.ICsSDK | SplitIO. initCallbacks.length = 0; } - if (!isPure) init(); - log.info(NEW_FACTORY); // @ts-ignore @@ -134,5 +132,5 @@ export function sdkFactory(params: ISdkFactoryParams): SplitIO.ICsSDK | SplitIO. destroy() { return Promise.all(Object.keys(clients).map(key => clients[key].destroy())).then(() => { }); } - }, extraProps && extraProps(ctx), isPure && { init }); + }, extraProps && extraProps(ctx), lazyInit ? { init } : init()); } diff --git a/src/sdkFactory/types.ts b/src/sdkFactory/types.ts index 222592e5..774110c5 100644 --- a/src/sdkFactory/types.ts +++ b/src/sdkFactory/types.ts @@ -70,7 +70,7 @@ export interface ISdkFactoryContextAsync extends ISdkFactoryContext { */ export interface ISdkFactoryParams { // If true, the `sdkFactory` is pure (no side effects), and the SDK instance includes a `init` method to run initialization side effects - isPure?: boolean, + lazyInit?: boolean, // The settings must be already validated settings: ISettings, From 9176f67a003e0bc590cc1a73fc7457dfafed1dde Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Fri, 4 Oct 2024 15:03:45 -0300 Subject: [PATCH 094/146] Bugfix on server-side polling manager --- CHANGES.txt | 3 ++- src/__tests__/testUtils/fetchMock.ts | 3 ++- src/sync/polling/pollingManagerSS.ts | 5 ++--- 3 files changed, 6 insertions(+), 5 deletions(-) diff --git a/CHANGES.txt b/CHANGES.txt index 15414d0c..c8955b92 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -1,5 +1,6 @@ -1.18.0 (September XX, 2024) +1.18.0 (October XX, 2024) - Added `factory.destroy()` method, which invokes the `destroy` method on all SDK clients created by the factory. + - Bugfixing - Fixed an issue with the server-side polling manager that caused dangling timers when the SDK was destroyed before it was ready. 1.17.0 (September 6, 2024) - Added `sync.requestOptions.getHeaderOverrides` configuration option to enhance SDK HTTP request Headers for Authorization Frameworks. diff --git a/src/__tests__/testUtils/fetchMock.ts b/src/__tests__/testUtils/fetchMock.ts index 94a614f7..8b86df27 100644 --- a/src/__tests__/testUtils/fetchMock.ts +++ b/src/__tests__/testUtils/fetchMock.ts @@ -1,4 +1,5 @@ -// http://www.wheresrhys.co.uk/fetch-mock/#usageinstallation +// @TODO upgrade fetch-mock when fetch-mock-jest vulnerabilities are fixed +// https://www.wheresrhys.co.uk/fetch-mock/docs/fetch-mock/Usage/cheatsheet#local-fetch-with-jest import fetchMockLib from 'fetch-mock'; const fetchMock = fetchMockLib.sandbox(); diff --git a/src/sync/polling/pollingManagerSS.ts b/src/sync/polling/pollingManagerSS.ts index 90f252a4..cea57dfe 100644 --- a/src/sync/polling/pollingManagerSS.ts +++ b/src/sync/polling/pollingManagerSS.ts @@ -1,7 +1,6 @@ import { splitsSyncTaskFactory } from './syncTasks/splitsSyncTask'; import { segmentsSyncTaskFactory } from './syncTasks/segmentsSyncTask'; import { IPollingManager, ISegmentsSyncTask, ISplitsSyncTask } from './types'; -import { thenable } from '../../utils/promise/thenable'; import { POLLING_START, POLLING_STOP, LOG_PREFIX_SYNC_POLLING } from '../../logger/constants'; import { ISdkFactoryContextSync } from '../../sdkFactory/types'; @@ -29,9 +28,9 @@ export function pollingManagerSSFactory( log.debug(LOG_PREFIX_SYNC_POLLING + `Segments will be refreshed each ${settings.scheduler.segmentsRefreshRate} millis`); const startingUp = splitsSyncTask.start(); - if (thenable(startingUp)) { + if (startingUp) { startingUp.then(() => { - segmentsSyncTask.start(); + if (splitsSyncTask.isRunning()) segmentsSyncTask.start(); }); } }, From 5eac7b13fd57713f95ef5df76df148509a5a29bf Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Fri, 4 Oct 2024 15:29:21 -0300 Subject: [PATCH 095/146] changelog entry --- CHANGES.txt | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CHANGES.txt b/CHANGES.txt index c7372ef1..98fefdc6 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -1,6 +1,8 @@ 2.0.0 (October XX, 2024) - Added support for targeting rules based on large segments. - Added `factory.destroy()` method, which invokes the `destroy` method on all SDK clients created by the factory. + - Added `factory.getState()` method for standalone server-side SDKs, which returns the rollout plan snapshot from the storage. + - Added `preloadedData` configuration option for standalone client-side SDKs, which allows preloading the SDK storage with a snapshot of the rollout plan. - Updated internal storage factory to emit the SDK_READY_FROM_CACHE event when it corresponds, to clean up the initialization flow. - Bugfixing - Fixed an issue with the server-side polling manager that caused dangling timers when the SDK was destroyed before it was ready. - BREAKING CHANGES: From fcdeb1b55c4114b27b2eb4fb5adad72871644ebd Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Fri, 4 Oct 2024 15:33:35 -0300 Subject: [PATCH 096/146] changelog entry --- CHANGES.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGES.txt b/CHANGES.txt index 98fefdc6..0ab19c90 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -4,6 +4,7 @@ - Added `factory.getState()` method for standalone server-side SDKs, which returns the rollout plan snapshot from the storage. - Added `preloadedData` configuration option for standalone client-side SDKs, which allows preloading the SDK storage with a snapshot of the rollout plan. - Updated internal storage factory to emit the SDK_READY_FROM_CACHE event when it corresponds, to clean up the initialization flow. + - Updated the handling of timers and async operations by moving them into an `init` factory method to enable lazy initialization of the SDK. This update is intended for the React SDK. - Bugfixing - Fixed an issue with the server-side polling manager that caused dangling timers when the SDK was destroyed before it was ready. - BREAKING CHANGES: - Updated default flag spec version to 1.2. From bdf69e8831604da4845fcc4dd4c4432929b86a2e Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Sat, 5 Oct 2024 02:42:55 -0300 Subject: [PATCH 097/146] Fix duplicated /memberships fetch of non-default clients when using lazy init --- src/readiness/readinessManager.ts | 15 +++++--- src/readiness/types.ts | 2 ++ src/sdkClient/sdkClientMethodCS.ts | 7 +--- src/sdkClient/sdkClientMethodCSWithTT.ts | 7 +--- src/sync/__tests__/syncManagerOnline.spec.ts | 6 ---- src/sync/syncManagerOnline.ts | 36 +++++++++++--------- src/sync/types.ts | 2 +- 7 files changed, 35 insertions(+), 40 deletions(-) diff --git a/src/readiness/readinessManager.ts b/src/readiness/readinessManager.ts index 00957df5..bc31152b 100644 --- a/src/readiness/readinessManager.ts +++ b/src/readiness/readinessManager.ts @@ -7,6 +7,8 @@ function splitsEventEmitterFactory(EventEmitter: new () => IEventEmitter): ISpli const splitsEventEmitter = objectAssign(new EventEmitter(), { splitsArrived: false, splitsCacheLoaded: false, + initialized: false, + initCallbacks: [] }); // `isSplitKill` condition avoids an edge-case of wrongly emitting SDK_READY if: @@ -55,7 +57,6 @@ export function readinessManagerFactory( // emit SDK_READY_TIMED_OUT let hasTimedout = false; - let readyTimeoutId: ReturnType; function timeout() { // eslint-disable-next-line no-use-before-define if (hasTimedout || isReady) return; @@ -64,6 +65,12 @@ export function readinessManagerFactory( gate.emit(SDK_READY_TIMED_OUT, 'Split SDK emitted SDK_READY_TIMED_OUT event.'); } + let readyTimeoutId: ReturnType; + if (readyTimeout > 0) { + if (splits.initialized) readyTimeoutId = setTimeout(timeout, readyTimeout); + else splits.initCallbacks.push(() => { readyTimeoutId = setTimeout(timeout, readyTimeout); }); + } + // emit SDK_READY and SDK_UPDATE let isReady = false; splits.on(SDK_SPLITS_ARRIVED, checkIsReadyOrUpdate); @@ -129,9 +136,9 @@ export function readinessManagerFactory( setDestroyed() { isDestroyed = true; }, init() { - if (readyTimeout > 0) { - readyTimeoutId = setTimeout(timeout, readyTimeout); - } + if (splits.initialized) return; + splits.initialized = true; + splits.initCallbacks.forEach(cb => cb()); }, destroy() { diff --git a/src/readiness/types.ts b/src/readiness/types.ts index 6104eda5..ebe22b0f 100644 --- a/src/readiness/types.ts +++ b/src/readiness/types.ts @@ -12,6 +12,8 @@ export interface ISplitsEventEmitter extends IEventEmitter { once(event: ISplitsEvent, listener: (...args: any[]) => void): this; splitsArrived: boolean splitsCacheLoaded: boolean + initialized: boolean, + initCallbacks: (() => void)[] } /** Segments data emitter */ diff --git a/src/sdkClient/sdkClientMethodCS.ts b/src/sdkClient/sdkClientMethodCS.ts index 1470728d..58f1c570 100644 --- a/src/sdkClient/sdkClientMethodCS.ts +++ b/src/sdkClient/sdkClientMethodCS.ts @@ -15,7 +15,7 @@ import { buildInstanceId } from './identity'; * Therefore, clients don't have a bound TT for the track method. */ export function sdkClientMethodCSFactory(params: ISdkFactoryContext): (key?: SplitIO.SplitKey) => SplitIO.ICsClient { - const { clients, storage, syncManager, sdkReadinessManager, settings: { core: { key }, log }, whenInit } = params; + const { clients, storage, syncManager, sdkReadinessManager, settings: { core: { key }, log } } = params; const mainClientInstance = clientCSDecorator( log, @@ -75,11 +75,6 @@ export function sdkClientMethodCSFactory(params: ISdkFactoryContext): (key?: Spl validKey ); - whenInit(() => { - sharedSdkReadiness.readinessManager.init(); - sharedSyncManager && sharedSyncManager.start(); - }); - log.info(NEW_SHARED_CLIENT); } else { log.debug(RETRIEVE_CLIENT_EXISTING); diff --git a/src/sdkClient/sdkClientMethodCSWithTT.ts b/src/sdkClient/sdkClientMethodCSWithTT.ts index 5ff0acfb..33284cf5 100644 --- a/src/sdkClient/sdkClientMethodCSWithTT.ts +++ b/src/sdkClient/sdkClientMethodCSWithTT.ts @@ -17,7 +17,7 @@ import { buildInstanceId } from './identity'; * (default client) or the client method (shared clients). */ export function sdkClientMethodCSFactory(params: ISdkFactoryContext): (key?: SplitIO.SplitKey, trafficType?: string) => SplitIO.ICsClient { - const { clients, storage, syncManager, sdkReadinessManager, settings: { core: { key, trafficType }, log }, whenInit } = params; + const { clients, storage, syncManager, sdkReadinessManager, settings: { core: { key, trafficType }, log } } = params; const mainClientInstance = clientCSDecorator( log, @@ -86,11 +86,6 @@ export function sdkClientMethodCSFactory(params: ISdkFactoryContext): (key?: Spl validTrafficType ); - whenInit(() => { - sharedSdkReadiness.readinessManager.init(); - sharedSyncManager && sharedSyncManager.start(); - }); - log.info(NEW_SHARED_CLIENT); } else { log.debug(RETRIEVE_CLIENT_EXISTING); diff --git a/src/sync/__tests__/syncManagerOnline.spec.ts b/src/sync/__tests__/syncManagerOnline.spec.ts index a83568ef..7fda853b 100644 --- a/src/sync/__tests__/syncManagerOnline.spec.ts +++ b/src/sync/__tests__/syncManagerOnline.spec.ts @@ -125,12 +125,9 @@ test('syncManagerOnline should syncAll a single time when sync is disabled', () if (!pollingSyncManagerShared) throw new Error('pollingSyncManagerShared should exist'); - pollingSyncManagerShared.start(); - expect(pollingManagerMock.start).not.toBeCalled(); pollingSyncManagerShared.stop(); - pollingSyncManagerShared.start(); expect(pollingManagerMock.start).not.toBeCalled(); @@ -153,12 +150,9 @@ test('syncManagerOnline should syncAll a single time when sync is disabled', () if (!pushingSyncManagerShared) throw new Error('pushingSyncManagerShared should exist'); - pushingSyncManagerShared.start(); - expect(pollingManagerMock.start).not.toBeCalled(); pushingSyncManagerShared.stop(); - pushingSyncManagerShared.start(); expect(pollingManagerMock.start).not.toBeCalled(); diff --git a/src/sync/syncManagerOnline.ts b/src/sync/syncManagerOnline.ts index b6407630..b3c4aad7 100644 --- a/src/sync/syncManagerOnline.ts +++ b/src/sync/syncManagerOnline.ts @@ -143,27 +143,29 @@ export function syncManagerOnlineFactory( const mySegmentsSyncTask = (pollingManager as IPollingManagerCS).add(matchingKey, readinessManager, storage); - return { - isRunning: mySegmentsSyncTask.isRunning, - start() { - if (syncEnabled) { - if (pushManager) { - if (pollingManager!.isRunning()) { - // if doing polling, we must start the periodic fetch of data - if (storage.splits.usesSegments()) mySegmentsSyncTask.start(); - } else { - // if not polling, we must execute the sync task for the initial fetch - // of segments since `syncAll` was already executed when starting the main client - mySegmentsSyncTask.execute(); - } - pushManager.add(matchingKey, mySegmentsSyncTask); - } else { + if (running) { + if (syncEnabled) { + if (pushManager) { + if (pollingManager!.isRunning()) { + // if doing polling, we must start the periodic fetch of data if (storage.splits.usesSegments()) mySegmentsSyncTask.start(); + } else { + // if not polling, we must execute the sync task for the initial fetch + // of segments since `syncAll` was already executed when starting the main client + mySegmentsSyncTask.execute(); } + pushManager.add(matchingKey, mySegmentsSyncTask); } else { - if (!readinessManager.isReady()) mySegmentsSyncTask.execute(); + if (storage.splits.usesSegments()) mySegmentsSyncTask.start(); } - }, + } else { + if (!readinessManager.isReady()) mySegmentsSyncTask.execute(); + } + } + + return { + isRunning: mySegmentsSyncTask.isRunning, + stop() { // check in case `client.destroy()` has been invoked more than once for the same client const mySegmentsSyncTask = (pollingManager as IPollingManagerCS).get(matchingKey); diff --git a/src/sync/types.ts b/src/sync/types.ts index 81727ca9..956d3112 100644 --- a/src/sync/types.ts +++ b/src/sync/types.ts @@ -44,5 +44,5 @@ export interface ISyncManager extends ITask { } export interface ISyncManagerCS extends ISyncManager { - shared(matchingKey: string, readinessManager: IReadinessManager, storage: IStorageSync): ISyncManager | undefined + shared(matchingKey: string, readinessManager: IReadinessManager, storage: IStorageSync): Pick | undefined } From 7b6e43371f347ee8ab730818406aa2f804ded54f Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Fri, 4 Oct 2024 17:15:28 -0300 Subject: [PATCH 098/146] Removed the deprecated GOOGLE_ANALYTICS_TO_SPLIT and SPLIT_TO_GOOGLE_ANALYTICS integrations --- CHANGES.txt | 3 +- src/integrations/__tests__/browser.spec.ts | 98 ------ src/integrations/browser.ts | 35 -- src/integrations/ga/GaToSplit.ts | 299 ------------------ src/integrations/ga/GoogleAnalyticsToSplit.ts | 14 - src/integrations/ga/SplitToGa.ts | 135 -------- src/integrations/ga/SplitToGoogleAnalytics.ts | 14 - .../ga/__tests__/GaToSplit.spec.ts | 295 ----------------- .../ga/__tests__/SplitToGa.spec.ts | 195 ------------ src/integrations/ga/__tests__/gaMock.ts | 60 ---- src/integrations/ga/autoRequire.js | 33 -- src/integrations/ga/types.ts | 153 --------- src/utils/constants/browser.ts | 4 - .../integrations/__tests__/plugable.spec.ts | 4 +- 14 files changed, 4 insertions(+), 1338 deletions(-) delete mode 100644 src/integrations/__tests__/browser.spec.ts delete mode 100644 src/integrations/browser.ts delete mode 100644 src/integrations/ga/GaToSplit.ts delete mode 100644 src/integrations/ga/GoogleAnalyticsToSplit.ts delete mode 100644 src/integrations/ga/SplitToGa.ts delete mode 100644 src/integrations/ga/SplitToGoogleAnalytics.ts delete mode 100644 src/integrations/ga/__tests__/GaToSplit.spec.ts delete mode 100644 src/integrations/ga/__tests__/SplitToGa.spec.ts delete mode 100644 src/integrations/ga/__tests__/gaMock.ts delete mode 100644 src/integrations/ga/autoRequire.js delete mode 100644 src/integrations/ga/types.ts diff --git a/CHANGES.txt b/CHANGES.txt index 8b3e1c91..9ec1a0e7 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -1,8 +1,9 @@ -2.0.0 (September XX, 2024) +2.0.0 (October XX, 2024) - Added support for targeting rules based on large segments. - Added `factory.destroy()` method, which invokes the `destroy` method on all SDK clients created by the factory. - Bugfixing - Fixed an issue with the server-side polling manager that caused dangling timers when the SDK was destroyed before it was ready. - BREAKING CHANGES: + - Removed the deprecated `GOOGLE_ANALYTICS_TO_SPLIT` and `SPLIT_TO_GOOGLE_ANALYTICS` integrations. - Updated default flag spec version to 1.2. - Removed `/mySegments` endpoint from SplitAPI module, as it is replaced by `/memberships` endpoint. - Removed support for MY_SEGMENTS_UPDATE and MY_SEGMENTS_UPDATE_V2 notification types, as they are replaced by MEMBERSHIPS_MS_UPDATE and MEMBERSHIPS_LS_UPDATE notification types. diff --git a/src/integrations/__tests__/browser.spec.ts b/src/integrations/__tests__/browser.spec.ts deleted file mode 100644 index 40316654..00000000 --- a/src/integrations/__tests__/browser.spec.ts +++ /dev/null @@ -1,98 +0,0 @@ -import { GOOGLE_ANALYTICS_TO_SPLIT, SPLIT_TO_GOOGLE_ANALYTICS } from '../../utils/constants/browser'; -import { SPLIT_IMPRESSION, SPLIT_EVENT } from '../../utils/constants'; -import { IIntegrationManager } from '../types'; -import { loggerMock } from '../../logger/__tests__/sdkLogger.mock'; - -// Mock integration modules (GaToSplit and SplitToGa). - -jest.mock('../ga/GaToSplit'); -import { GaToSplit as GaToSplitMock } from '../ga/GaToSplit'; -jest.mock('../ga/SplitToGa'); -import { SplitToGa as SplitToGaMock } from '../ga/SplitToGa'; - -const SplitToGaQueueMethod = jest.fn(); -(SplitToGaMock as unknown as jest.Mock).mockImplementation(() => { - return { - queue: SplitToGaQueueMethod - }; -}); - - -const fakeParams = { - storage: 'fakeStorage', - settings: { - core: 'fakeCore', - log: loggerMock - } -}; - -function clearMocks() { - (GaToSplitMock as jest.Mock).mockClear(); - (SplitToGaMock as unknown as jest.Mock).mockClear(); - SplitToGaQueueMethod.mockClear(); -} - -// Test target -import { integrationsManagerFactory as browserIMF } from '../browser'; -import { BrowserIntegration } from '../ga/types'; - -describe('IntegrationsManagerFactory for browser', () => { - - test('API', () => { - expect(typeof browserIMF).toBe('function'); // The module should return a function which acts as a factory. - - // @ts-expect-error - const instance1 = browserIMF([]); - expect(instance1).toBe(undefined); // The instance should be undefined if settings.integrations does not contain integrations that register a listener. - - let integrations: BrowserIntegration[] = [{ type: GOOGLE_ANALYTICS_TO_SPLIT }, { type: SPLIT_TO_GOOGLE_ANALYTICS }]; - const instance2 = browserIMF(integrations, fakeParams as any) as IIntegrationManager; - expect(GaToSplitMock).toBeCalledTimes(1); // GaToSplit invoked once - expect(SplitToGaMock).toBeCalledTimes(1); // SplitToGa invoked once - expect(typeof instance2.handleImpression).toBe('function'); // The instance should implement the handleImpression method if settings.integrations has items that register a listener. - expect(typeof instance2.handleEvent).toBe('function'); // The instance should implement the handleEvent method if settings.integrations has items that register a listener. - - clearMocks(); - - integrations = [{ type: GOOGLE_ANALYTICS_TO_SPLIT }, { type: SPLIT_TO_GOOGLE_ANALYTICS }, { type: GOOGLE_ANALYTICS_TO_SPLIT }, { type: SPLIT_TO_GOOGLE_ANALYTICS }, { type: SPLIT_TO_GOOGLE_ANALYTICS }]; - browserIMF(integrations, fakeParams as any); - expect(GaToSplitMock).toBeCalledTimes(2); // GaToSplit invoked twice - expect(SplitToGaMock).toBeCalledTimes(3); // SplitToGa invoked thrice - - clearMocks(); - }); - - test('Interaction with GaToSplit integration module', () => { - const integrations: BrowserIntegration[] = [{ - type: 'GOOGLE_ANALYTICS_TO_SPLIT', - prefix: 'some-prefix' - }]; - browserIMF(integrations, fakeParams as any); - - expect((GaToSplitMock as jest.Mock).mock.calls).toEqual([[integrations[0], fakeParams]]); // Invokes GaToSplit integration module with options, storage and core settings - - clearMocks(); - }); - - test('Interaction with SplitToGa integration module', () => { - const integrations: BrowserIntegration[] = [{ - type: 'SPLIT_TO_GOOGLE_ANALYTICS', - events: true - }]; - const instance = browserIMF(integrations, fakeParams as any); - - expect((SplitToGaMock as unknown as jest.Mock).mock.calls).toEqual([[fakeParams.settings.log, integrations[0]]]); // Invokes SplitToGa integration module with options - - const fakeImpression = 'fake'; // @ts-expect-error - instance.handleImpression(fakeImpression); - expect(SplitToGaQueueMethod.mock.calls).toEqual([[{ payload: fakeImpression, type: SPLIT_IMPRESSION }]]); // Invokes SplitToGa.queue method with tracked impression - - clearMocks(); - - const fakeEvent = 'fake'; // @ts-expect-error - instance.handleEvent(fakeEvent); - expect(SplitToGaQueueMethod.mock.calls).toEqual([[{ payload: fakeEvent, type: SPLIT_EVENT }]]); // Invokes SplitToGa.queue method with tracked event - - clearMocks(); - }); -}); diff --git a/src/integrations/browser.ts b/src/integrations/browser.ts deleted file mode 100644 index d4ad8de8..00000000 --- a/src/integrations/browser.ts +++ /dev/null @@ -1,35 +0,0 @@ -import { GOOGLE_ANALYTICS_TO_SPLIT, SPLIT_TO_GOOGLE_ANALYTICS } from '../utils/constants/browser'; -import { IIntegration, IIntegrationManager, IIntegrationFactoryParams } from './types'; -import { BrowserIntegration } from './ga/types'; -import { pluggableIntegrationsManagerFactory } from './pluggable'; -import { GoogleAnalyticsToSplit } from './ga/GoogleAnalyticsToSplit'; -import { SplitToGoogleAnalytics } from './ga/SplitToGoogleAnalytics'; - -/** - * IntegrationsManager factory for the browser variant of the isomorphic JS SDK. - * The integrations manager instantiates integration modules, and bypass tracked events and impressions to them. - * - * @param integrations valid integration settings object for browser sdk - * @param params information of the Sdk factory instance that integrations can access to - * - * @returns integration manager or undefined if `integrations` are not present in settings. - */ -export function integrationsManagerFactory( - integrations: BrowserIntegration[], - params: IIntegrationFactoryParams -): IIntegrationManager | undefined { - - // maps integration config items into integration factories to reuse the pluggable integration manager - const integrationFactories: Array<(params: IIntegrationFactoryParams) => IIntegration | void> = integrations - .map(integrationOptions => { - switch (integrationOptions.type) { - case GOOGLE_ANALYTICS_TO_SPLIT: return GoogleAnalyticsToSplit(integrationOptions); - case SPLIT_TO_GOOGLE_ANALYTICS: return SplitToGoogleAnalytics(integrationOptions); - } - }) - .filter(integrationFactory => { - return integrationFactory && typeof integrationFactory === 'function'; - }); - - return pluggableIntegrationsManagerFactory(integrationFactories, params); -} diff --git a/src/integrations/ga/GaToSplit.ts b/src/integrations/ga/GaToSplit.ts deleted file mode 100644 index a996625f..00000000 --- a/src/integrations/ga/GaToSplit.ts +++ /dev/null @@ -1,299 +0,0 @@ -import { objectAssign } from '../../utils/lang/objectAssign'; -import { isString, isFiniteNumber, uniqAsStrings } from '../../utils/lang'; -import { - validateEvent, - validateEventValue, - validateEventProperties, - validateKey, - validateTrafficType -} from '../../utils/inputValidation'; -import { SplitIO } from '../../types'; -import { Identity, GoogleAnalyticsToSplitOptions } from './types'; -import { ILogger } from '../../logger/types'; -import { IIntegrationFactoryParams } from '../types'; -import { ITelemetryTracker } from '../../trackers/types'; - -const logPrefix = 'ga-to-split: '; -const logNameMapper = 'ga-to-split:mapper'; - -/** - * Provides a plugin to use with analytics.js, accounting for the possibility - * that the global command queue has been renamed or not yet defined. - * @param window Reference to global object. - * @param pluginName The plugin name identifier. - * @param pluginConstructor The plugin constructor function. - * @param log Logger instance. - * @param autoRequire If true, log error when auto-require script is not detected - */ -function providePlugin(window: any, pluginName: string, pluginConstructor: Function, log: ILogger, autoRequire: boolean, telemetryTracker?: ITelemetryTracker) { - // get reference to global command queue. Init it if not defined yet. - const gaAlias = window.GoogleAnalyticsObject || 'ga'; - window[gaAlias] = window[gaAlias] || function () { - (window[gaAlias].q = window[gaAlias].q || []).push(arguments); - }; - - // provides the plugin for use with analytics.js. - window[gaAlias]('provide', pluginName, pluginConstructor); - - const hasAutoRequire = window[gaAlias].q && window[gaAlias].q.push !== [].push; - if (autoRequire && !hasAutoRequire) { // Expecting spy on ga.q push method but not found - log.error(logPrefix + 'integration is configured to autorequire the splitTracker plugin, but the necessary script does not seem to have run. Please check the docs.'); - } - if (telemetryTracker && hasAutoRequire) { - telemetryTracker.addTag('integration:ga-autorequire'); - } -} - -// Default mapping: object used for building the default mapper from hits to Split events -const defaultMapping = { - eventTypeId: { - event: 'eventAction', - social: 'socialAction', - }, - eventValue: { - event: 'eventValue', - timing: 'timingValue', - }, - eventProperties: { - pageview: ['page'], - screenview: ['screenName'], - event: ['eventCategory', 'eventLabel'], - social: ['socialNetwork', 'socialTarget'], - timing: ['timingCategory', 'timingVar', 'timingLabel'], - exception: ['exDescription', 'exFatal'], - } -}; - -/** - * Build a mapper function based on a mapping object - * - * @param {object} mapping - */ -function mapperBuilder(mapping: typeof defaultMapping) { - return function (model: UniversalAnalytics.Model): SplitIO.EventData { - const hitType: string = model.get('hitType'); - // @ts-expect-error - const eventTypeId = model.get(mapping.eventTypeId[hitType] || 'hitType'); - // @ts-expect-error - const value = model.get(mapping.eventValue[hitType]); - - const properties: Record = {}; // @ts-expect-error - const fields: string[] = mapping.eventProperties[hitType]; - if (fields) { - for (let i = 0; i < fields.length; i++) { - const fieldValue = model.get(fields[i]); - if (fieldValue !== undefined) properties[fields[i]] = fieldValue; - } - } - - return { - eventTypeId, - value, - properties, - timestamp: Date.now(), - }; - }; -} - -// exposed for unit testing purposses. -export const defaultMapper = mapperBuilder(defaultMapping); - -export const defaultPrefix = 'ga'; - -/** - * Return a new list of identities removing invalid and duplicated ones. - * - * @param {Array} identities list of identities - * @returns list of valid and unique identities. The list might be empty if `identities` is not an array or all its elements are invalid. - */ -export function validateIdentities(identities?: Identity[]) { - if (!Array.isArray(identities)) - return []; - - // Remove duplicated identities - const uniqueIdentities = uniqAsStrings(identities); - - // Filter based on rum-agent identities validator - return uniqueIdentities.filter(identity => { - if (!identity) return false; - - const maybeKey = identity.key; - const maybeTT = identity.trafficType; - - if (!isString(maybeKey) && !isFiniteNumber(maybeKey)) - return false; - if (!isString(maybeTT)) - return false; - - return true; - }); -} - -/** - * Checks if EventData fields (except EventTypeId) are valid, and logs corresponding warnings. - * EventTypeId is validated separately. - * - * @param {EventData} data event data instance to validate. Precondition: data != undefined - * @returns {boolean} Whether the data instance is a valid EventData or not. - */ -export function validateEventData(log: ILogger, eventData: any): eventData is SplitIO.EventData { - if (!validateEvent(log, eventData.eventTypeId, logNameMapper)) - return false; - - if (validateEventValue(log, eventData.value, logNameMapper) === false) - return false; - - const { properties } = validateEventProperties(log, eventData.properties, logNameMapper); - if (properties === false) - return false; - - if (eventData.timestamp && !isFiniteNumber(eventData.timestamp)) - return false; - - if (eventData.key && validateKey(log, eventData.key, logNameMapper) === false) - return false; - - if (eventData.trafficTypeName && validateTrafficType(log, eventData.trafficTypeName, logNameMapper) === false) - return false; - - return true; -} - -const INVALID_PREFIX_REGEX = /^[^a-zA-Z0-9]+/; -const INVALID_SUBSTRING_REGEX = /[^-_.:a-zA-Z0-9]+/g; -/** - * Fixes the passed string value to comply with EventTypeId format, by removing invalid characters and truncating if necessary. - * - * @param {object} log factory logger - * @param {string} eventTypeId string value to fix. - * @returns {string} Fixed version of `eventTypeId`. - */ -export function fixEventTypeId(log: ILogger, eventTypeId: any) { - // return the input eventTypeId if it cannot be fixed - if (!isString(eventTypeId) || eventTypeId.length === 0) { - return eventTypeId; - } - - // replace invalid substrings and truncate - const fixed = eventTypeId - .replace(INVALID_PREFIX_REGEX, '') - .replace(INVALID_SUBSTRING_REGEX, '_'); - const truncated = fixed.slice(0, 80); - if (truncated.length < fixed.length) log.warn(logPrefix + 'EventTypeId was truncated because it cannot be more than 80 characters long.'); - return truncated; -} - -/** - * GaToSplit integration. - * This function provides the SplitTracker plugin to ga command queue. - * - * @param {object} sdkOptions options passed at the SDK integrations settings (isomorphic SDK) or the GoogleAnalyticsToSplit plugin (pluggable browser SDK) - * @param {object} storage SDK storage passed to track events - * @param {object} coreSettings core settings used to define an identity if no one provided as SDK or plugin options - * @param {object} log factory logger - */ -export function GaToSplit(sdkOptions: GoogleAnalyticsToSplitOptions, params: IIntegrationFactoryParams) { - - const { storage, settings: { core: coreSettings, log }, telemetryTracker } = params; - - const defaultOptions = { - prefix: defaultPrefix, - // We set default identities if key and TT are present in settings.core - identities: (coreSettings.key && coreSettings.trafficType) ? - [{ key: coreSettings.key, trafficType: coreSettings.trafficType }] : - undefined - }; - - class SplitTracker { - - private tracker: UniversalAnalytics.Tracker; - - // Constructor for the SplitTracker plugin. - constructor(tracker: UniversalAnalytics.Tracker, pluginOptions: GoogleAnalyticsToSplitOptions) { - - // precedence of options: SDK options (config.integrations) overwrite pluginOptions (`ga('require', 'splitTracker', pluginOptions)`) - const opts = objectAssign({}, defaultOptions, sdkOptions, pluginOptions) as GoogleAnalyticsToSplitOptions & { identities: Identity[] }; - - this.tracker = tracker; - - // Validate identities - const validIdentities = validateIdentities(opts.identities); - - if (validIdentities.length === 0) { - log.warn(logPrefix + 'No valid identities were provided. Please check that you are passing a valid list of identities or providing a traffic type at the SDK configuration.'); - return; - } - - const invalids = validIdentities.length - opts.identities.length; - if (invalids) { - log.warn(logPrefix + `${invalids} identities were discarded because they are invalid or duplicated. Identities must be an array of objects with key and trafficType.`); - } - opts.identities = validIdentities; - - // Validate prefix - if (!isString(opts.prefix)) { - log.warn(logPrefix + 'The provided `prefix` was ignored since it is invalid. Please check that you are passing a string object as `prefix`.'); - opts.prefix = undefined; - } - - // Overwrite sendHitTask to perform plugin tasks: - // 1) filter hits - // 2) map hits to Split events - // 3) handle events, i.e., validate and send them to Split BE - const originalSendHitTask = tracker.get('sendHitTask'); - tracker.set('sendHitTask', function (model: UniversalAnalytics.Model) { - originalSendHitTask(model); - - // filter hit if `hits` flag is false or if it comes from Split-to-GA integration - if (opts.hits === false || model.get('splitHit')) return; - try { - if (opts.filter && !opts.filter(model)) return; - } catch (err) { - log.warn(logPrefix + `custom filter threw: ${err}`); - return; - } - - // map hit into an EventData instance - let eventData: SplitIO.EventData = defaultMapper(model); - if (opts.mapper) { - try { - eventData = opts.mapper(model, eventData as SplitIO.EventData); - } catch (err) { - log.warn(logPrefix + `custom mapper threw: ${err}`); - return; - } - if (!eventData) - return; - } - - // Add prefix. Nothing is appended if the prefix is falsy, e.g. undefined or ''. - if (opts.prefix) eventData.eventTypeId = `${opts.prefix}.${eventData.eventTypeId}`; - - eventData.eventTypeId = fixEventTypeId(log, eventData.eventTypeId); - - if (!validateEventData(log, eventData)) - return; - - // Store the event - if (eventData.key && eventData.trafficTypeName) { - storage.events.track(eventData); - } else { // Store the event for each Key-TT pair (identities), if key and TT is not present in eventData - opts.identities.forEach(identity => { - const event = objectAssign({ - key: identity.key, - trafficTypeName: identity.trafficType, - }, eventData); - storage.events.track(event); - }); - } - }); - - log.info(logPrefix + 'integration started'); - } - - } - - // Register the plugin, even if config is invalid, since, if not provided, it will block `ga` command queue. - // eslint-disable-next-line no-undef - providePlugin(window, 'splitTracker', SplitTracker, log, sdkOptions.autoRequire === true, telemetryTracker); -} diff --git a/src/integrations/ga/GoogleAnalyticsToSplit.ts b/src/integrations/ga/GoogleAnalyticsToSplit.ts deleted file mode 100644 index b6463bb2..00000000 --- a/src/integrations/ga/GoogleAnalyticsToSplit.ts +++ /dev/null @@ -1,14 +0,0 @@ -import { IIntegrationFactoryParams, IntegrationFactory } from '../types'; -import { GaToSplit } from './GaToSplit'; -import { GoogleAnalyticsToSplitOptions } from './types'; - -export function GoogleAnalyticsToSplit(options: GoogleAnalyticsToSplitOptions = {}): IntegrationFactory { - - // GaToSplit integration factory - function GoogleAnalyticsToSplitFactory(params: IIntegrationFactoryParams) { - return GaToSplit(options, params); - } - - GoogleAnalyticsToSplitFactory.type = 'GOOGLE_ANALYTICS_TO_SPLIT'; - return GoogleAnalyticsToSplitFactory; -} diff --git a/src/integrations/ga/SplitToGa.ts b/src/integrations/ga/SplitToGa.ts deleted file mode 100644 index dd469676..00000000 --- a/src/integrations/ga/SplitToGa.ts +++ /dev/null @@ -1,135 +0,0 @@ -/* eslint-disable no-undef */ -import { uniq } from '../../utils/lang'; -import { SPLIT_IMPRESSION, SPLIT_EVENT } from '../../utils/constants'; -import { SplitIO } from '../../types'; -import { IIntegration } from '../types'; -import { SplitToGoogleAnalyticsOptions } from './types'; -import { ILogger } from '../../logger/types'; - -const logPrefix = 'split-to-ga: '; -const noGaWarning = '`ga` command queue not found.'; -const noHit = 'No hit was sent.'; - -export class SplitToGa implements IIntegration { - - // A falsy object represents the default tracker - static defaultTrackerNames = ['']; - - private trackerNames: string[]; - private filter?: (data: SplitIO.IntegrationData) => boolean; - private mapper?: (data: SplitIO.IntegrationData, defaultMapping: UniversalAnalytics.FieldsObject) => UniversalAnalytics.FieldsObject; - private impressions: boolean | undefined; - private events: boolean | undefined; - private log: ILogger; - - // Default mapper function. - static defaultMapper({ type, payload }: SplitIO.IntegrationData): UniversalAnalytics.FieldsObject { - switch (type) { - case SPLIT_IMPRESSION: - return { - hitType: 'event', - eventCategory: 'split-impression', - eventAction: 'Evaluate ' + (payload as SplitIO.ImpressionData).impression.feature, - eventLabel: 'Treatment: ' + (payload as SplitIO.ImpressionData).impression.treatment + '. Targeting rule: ' + (payload as SplitIO.ImpressionData).impression.label + '.', - nonInteraction: true, - }; - case SPLIT_EVENT: - return { - hitType: 'event', - eventCategory: 'split-event', - eventAction: (payload as SplitIO.EventData).eventTypeId, - eventValue: (payload as SplitIO.EventData).value, - nonInteraction: true, - }; - } - } - - // Util to access ga command queue, accounting for the possibility that it has been renamed. - static getGa(): UniversalAnalytics.ga | undefined { // @ts-expect-error - return typeof window !== 'undefined' ? window[window['GoogleAnalyticsObject'] || 'ga'] : undefined; - } - - /** - * Validates if a given object is a UniversalAnalytics.FieldsObject instance, and logs a warning if not. - * It checks that the object contains a `hitType`, since it is the minimal field required to send the hit - * and avoid the GA error `No hit type specified. Aborting hit.`. - * Other validations (e.g., an `event` hitType must have a `eventCategory` and `eventAction`) are handled - * and logged (as warnings or errors depending the case) by GA debugger, but the hit is sent anyway. - * - * @param {object} log factory logger - * @param {UniversalAnalytics.FieldsObject} fieldsObject object to validate. - * @returns {boolean} Whether the data instance is a valid FieldsObject or not. - */ - static validateFieldsObject(log: ILogger, fieldsObject: any): fieldsObject is UniversalAnalytics.FieldsObject { - if (fieldsObject && fieldsObject.hitType) return true; - - log.warn(logPrefix + 'your custom mapper returned an invalid FieldsObject instance. It must be an object with at least a `hitType` field.'); - return false; - } - - /** - * constructor description - * @param {object} options options passed at the SDK integrations settings (isomorphic SDK) or the SplitToGoogleAnalytics plugin (pluggable browser SDK) - */ - constructor(log: ILogger, options: SplitToGoogleAnalyticsOptions) { - - this.trackerNames = SplitToGa.defaultTrackerNames; - this.log = log; - - if (options) { - if (typeof options.filter === 'function') this.filter = options.filter; - if (typeof options.mapper === 'function') this.mapper = options.mapper; - // We strip off duplicated values if we received a `trackerNames` param. - // We don't warn if a tracker does not exist, since the user might create it after the SDK is initialized. - // Note: GA allows to create and get trackers using a string or number as tracker name, and does nothing if other types are used. - if (Array.isArray(options.trackerNames)) this.trackerNames = uniq(options.trackerNames); - - // No need to validate `impressions` and `events` flags. Any other value than `false` is ignored (considered true by default). - this.impressions = options.impressions; - this.events = options.events; - } - - log.info(logPrefix + 'integration started'); - if (typeof SplitToGa.getGa() !== 'function') log.warn(logPrefix + `${noGaWarning} No hits will be sent until it is available.`); - } - - queue(data: SplitIO.IntegrationData) { - // access ga command queue via `getGa` method, accounting for the possibility that - // the global `ga` reference was not yet mutated by analytics.js. - const ga = SplitToGa.getGa(); - if (ga) { - - if (this.impressions === false && data.type === SPLIT_IMPRESSION) return; - if (this.events === false && data.type === SPLIT_EVENT) return; - - let fieldsObject: UniversalAnalytics.FieldsObject & { splitHit?: boolean }; - try { // only try/catch filter and mapper, which might be defined by the user - // filter - if (this.filter && !this.filter(data)) return; - - // map data into a FieldsObject instance - fieldsObject = SplitToGa.defaultMapper(data); - if (this.mapper) { - fieldsObject = this.mapper(data, fieldsObject); - // don't send the hit if it is falsy or invalid - if (!fieldsObject || !SplitToGa.validateFieldsObject(this.log, fieldsObject)) return; - } - } catch (err) { - this.log.warn(logPrefix + `queue method threw: ${err}. ${noHit}`); - return; - } - - // send the hit - this.trackerNames.forEach(trackerName => { - const sendCommand = trackerName ? `${trackerName}.send` : 'send'; - // mark the hit as a Split one to avoid the loop. - fieldsObject.splitHit = true; - // Send to GA using our reference to the GA object. - ga(sendCommand, fieldsObject); - }); - } else { - this.log.warn(logPrefix + `${noGaWarning} ${noHit}`); - } - } - -} diff --git a/src/integrations/ga/SplitToGoogleAnalytics.ts b/src/integrations/ga/SplitToGoogleAnalytics.ts deleted file mode 100644 index 101df26f..00000000 --- a/src/integrations/ga/SplitToGoogleAnalytics.ts +++ /dev/null @@ -1,14 +0,0 @@ -import { IIntegrationFactoryParams, IntegrationFactory } from '../types'; -import { SplitToGa } from './SplitToGa'; -import { SplitToGoogleAnalyticsOptions } from './types'; - -export function SplitToGoogleAnalytics(options: SplitToGoogleAnalyticsOptions = {}): IntegrationFactory { - - // SplitToGa integration factory - function SplitToGoogleAnalyticsFactory(params: IIntegrationFactoryParams) { - return new SplitToGa(params.settings.log, options); - } - - SplitToGoogleAnalyticsFactory.type = 'SPLIT_TO_GOOGLE_ANALYTICS'; - return SplitToGoogleAnalyticsFactory; -} diff --git a/src/integrations/ga/__tests__/GaToSplit.spec.ts b/src/integrations/ga/__tests__/GaToSplit.spec.ts deleted file mode 100644 index 1417c6a1..00000000 --- a/src/integrations/ga/__tests__/GaToSplit.spec.ts +++ /dev/null @@ -1,295 +0,0 @@ -/* eslint-disable no-undef */ -import { IEventsCacheSync } from '../../../storages/types'; -import { SplitIO, ISettings } from '../../../types'; -import { GaToSplit, validateIdentities, defaultPrefix, defaultMapper, validateEventData, fixEventTypeId } from '../GaToSplit'; -import { gaMock, gaRemove, modelMock } from './gaMock'; -import { loggerMock } from '../../../logger/__tests__/sdkLogger.mock'; - -const hitSample: UniversalAnalytics.FieldsObject = { - hitType: 'pageview', - page: '/path', -}; -const modelSample = modelMock(hitSample); -const expectedDefaultEvent = { - eventTypeId: 'pageview', - value: undefined, - properties: { page: hitSample.page }, - timestamp: 0, -}; - -test('validateIdentities', () => { - expect(validateIdentities(undefined)).toEqual([]); // @ts-expect-error - expect(validateIdentities(null)).toEqual([]); // @ts-expect-error - expect(validateIdentities(123)).toEqual([]); // @ts-expect-error - expect(validateIdentities(true)).toEqual([]); // @ts-expect-error - expect(validateIdentities('something')).toEqual([]); // @ts-expect-error - expect(validateIdentities({})).toEqual([]); // @ts-expect-error - expect(validateIdentities(/asd/ig)).toEqual([]); // @ts-expect-error - expect(validateIdentities(function () { })).toEqual([]); - - expect(validateIdentities([])).toEqual([]); // @ts-expect-error - expect(validateIdentities([undefined, /asd/ig, function () { }])).toEqual([]); - expect(validateIdentities([{ - key: 'key', trafficType: 'user' // First occurence of this item - }, { // @ts-expect-error - key: 'key', trafficType: function () { } // Invalid item (invalid TT) - }, { - key: 'keyu', trafficType: 'ser' // First occurence of this item - }, { // @ts-expect-error - key: true, trafficType: 'user' // Invalid item (invalid key) - }, { - key: 'key2', trafficType: 'user2' // First occurence of this item - }, { // @ts-expect-error - key: 12, trafficType: 'user' // First occurence of this item - }, { - key: 'key', trafficType: 'user' // Duplicated item - }, // @ts-expect-error - {} // Invalid item (undefined key and traffic type) - ])).toEqual([{ - key: 'key', trafficType: 'user' - }, { - key: 'keyu', trafficType: 'ser' - }, { - key: 'key2', trafficType: 'user2' - }, { - key: 12, trafficType: 'user' - }]); -}); - -test('validateEventData', () => { - expect(() => { validateEventData(loggerMock, undefined); }).toThrow(); // throws exception if passed object is undefined - expect(() => { validateEventData(loggerMock, null); }).toThrow(); // throws exception if passed object is null - - expect(validateEventData(loggerMock, {})).toBe(false); // event must have a valid eventTypeId - expect(validateEventData(loggerMock, { eventTypeId: 'type' })).toBe(true); // event must have a valid eventTypeId - expect(validateEventData(loggerMock, { eventTypeId: 123 })).toBe(false); // event must have a valid eventTypeId - - expect(validateEventData(loggerMock, { eventTypeId: 'type', value: 'value' })).toBe(false); // event must have a valid value if present - expect(validateEventData(loggerMock, { eventTypeId: 'type', value: 0 })).toBe(true); // event must have a valid value if present - - expect(validateEventData(loggerMock, { eventTypeId: 'type', properties: ['prop1'] })).toBe(false); // event must have valid properties if present - expect(validateEventData(loggerMock, { eventTypeId: 'type', properties: { prop1: 'prop1' } })).toBe(true); // event must have valid properties if present - - expect(validateEventData(loggerMock, { eventTypeId: 'type', timestamp: true })).toBe(false); // event must have a valid timestamp if present - expect(validateEventData(loggerMock, { eventTypeId: 'type', timestamp: Date.now() })).toBe(true); // event must have a valid timestamp if present - - expect(validateEventData(loggerMock, { eventTypeId: 'type', key: true })).toBe(false); // event must have a valid key if present - expect(validateEventData(loggerMock, { eventTypeId: 'type', key: 'key' })).toBe(true); // event must have a valid key if present - - expect(validateEventData(loggerMock, { eventTypeId: 'type', trafficTypeName: true })).toBe(false); // event must have a valid trafficTypeName if present - expect(validateEventData(loggerMock, { eventTypeId: 'type', trafficTypeName: 'tt' })).toBe(true); // event must have a valid trafficTypeName if present -}); - -test('fixEventTypeId', () => { - expect(fixEventTypeId(loggerMock, undefined)).toBe(undefined); - expect(fixEventTypeId(loggerMock, 111)).toBe(111); - expect(fixEventTypeId(loggerMock, '')).toBe(''); - expect(fixEventTypeId(loggerMock, '()')).toBe(''); - expect(fixEventTypeId(loggerMock, '()+_')).toBe(''); - expect(fixEventTypeId(loggerMock, ' some event ')).toBe('some_event_'); - expect(fixEventTypeId(loggerMock, ' -*- some -.%^ event =+ ')).toBe('some_-._event_'); -}); - -test('defaultMapper', () => { - const initTimestamp = Date.now(); - const defaultEvent = defaultMapper(modelSample); - - expect(defaultEvent.eventTypeId).toBe(expectedDefaultEvent.eventTypeId); // should return the corresponding default event instance for a given pageview hit - expect(defaultEvent.value).toBe(expectedDefaultEvent.value); - expect(defaultEvent.properties).toEqual(expectedDefaultEvent.properties); - expect(initTimestamp <= defaultEvent.timestamp && defaultEvent.timestamp <= Date.now()).toBe(true); -}); - -const coreSettings = { - authorizationKey: 'sdkkey', - key: 'key', - trafficType: 'user', -} as ISettings['core']; -const fakeStorage = { - // @ts-expect-error - events: { - track: jest.fn() - } as IEventsCacheSync -}; -const fakeParams = { - storage: fakeStorage, - settings: { core: coreSettings, log: loggerMock } -}; - -// Returns a new event by copying defaultEvent -function customMapper(model: UniversalAnalytics.Model, defaultEvent: SplitIO.EventData) { - return { ...defaultEvent, properties: { ...defaultEvent.properties, someProp: 'someProp' } }; -} -// Updates defaultEvent -function customMapper2(model: UniversalAnalytics.Model, defaultEvent: SplitIO.EventData) { - // @ts-ignore. The defaultEvent has a property value, that might be empty depending on the hitType - defaultEvent.properties['someProp2'] = 'someProp2'; - return defaultEvent; -} -// Updates defaultEvent adding a `key` and `TT`, to assert that `identities` plugin param is ignored. -function customMapper3(model: UniversalAnalytics.Model, defaultEvent: SplitIO.EventData) { - defaultEvent.key = 'someKey'; - defaultEvent.trafficTypeName = 'someTT'; - return defaultEvent; -} -function customFilter() { - return true; -} -const customIdentities = [{ key: 'key2', trafficType: 'tt2' }]; - -test('GaToSplit', () => { - - // test setup - const { ga, tracker } = gaMock(); - - // provide SplitTracker plugin - GaToSplit({}, fakeParams as any); - // @ts-expect-error - let [arg1, arg2, SplitTracker] = ga.mock.calls.pop() as [string, string, any]; - expect([arg1, arg2]).toEqual(['provide', 'splitTracker']); - expect(typeof SplitTracker === 'function').toBe(true); - - /** Default behavior */ - - // init plugin on default tracker. equivalent to calling `ga('require', 'splitTracker')` - new SplitTracker(tracker); - - // send hit and assert that it was properly tracked as a Split event - window.ga('send', hitSample); - let event = (fakeStorage.events.track as jest.Mock).mock.calls.pop()[0]; - expect(event).toEqual( - { - ...expectedDefaultEvent, - eventTypeId: defaultPrefix + '.' + expectedDefaultEvent.eventTypeId, - key: coreSettings.key, - trafficTypeName: coreSettings.trafficType, - timestamp: event.timestamp, - }); // should track an event using the default mapper and key and traffic type from the SDK config - - /** Custom behavior: plugin options */ - - // init plugin with custom options - new SplitTracker(tracker, { mapper: customMapper, filter: customFilter, identities: customIdentities, prefix: '' }); - - // send hit and assert that it was properly tracked as a Split event - window.ga('send', hitSample); - event = (fakeStorage.events.track as jest.Mock).mock.calls.pop()[0]; - expect(event).toEqual( - { - ...customMapper(modelSample, defaultMapper(modelSample)), - key: customIdentities[0].key, - trafficTypeName: customIdentities[0].trafficType, - timestamp: event.timestamp, - }); // should track an event using a custom mapper and identity from the plugin options - - /** Custom behavior: SDK options */ - - // provide a new SplitTracker plugin with custom SDK options - GaToSplit({ - mapper: customMapper2, filter: customFilter, identities: customIdentities, prefix: '' - }, fakeParams as any); - // @ts-expect-error - [arg1, arg2, SplitTracker] = ga.mock.calls.pop(); - expect([arg1, arg2]).toEqual(['provide', 'splitTracker']); - expect(typeof SplitTracker === 'function').toBe(true); - - // init plugin - new SplitTracker(tracker); - - // send hit and assert that it was properly tracked as a Split event - window.ga('send', hitSample); - event = (fakeStorage.events.track as jest.Mock).mock.calls.pop()[0]; - expect(event).toEqual( - { - ...customMapper2(modelSample, defaultMapper(modelSample)), - key: customIdentities[0].key, - trafficTypeName: customIdentities[0].trafficType, - timestamp: event.timestamp, - }); // should track the event using a custom mapper and identity from the SDK options - - /** Custom behavior: SDK options, including a customMapper that set events key and traffic type */ - - // provide a new SplitTracker plugin with custom SDK options - GaToSplit({ - mapper: customMapper3, filter: customFilter, identities: customIdentities, prefix: '' - }, fakeParams as any); - // @ts-ignore - [arg1, arg2, SplitTracker] = ga.mock.calls.pop(); - expect([arg1, arg2]).toEqual(['provide', 'splitTracker']); - expect(typeof SplitTracker === 'function').toBe(true); - - // init plugin - new SplitTracker(tracker); - - // send hit and assert that it was properly tracked as a Split event - window.ga('send', hitSample); - event = (fakeStorage.events.track as jest.Mock).mock.calls.pop()[0]; - expect(event).toEqual( - { - ...customMapper3(modelSample, defaultMapper(modelSample)), - timestamp: event.timestamp, - }); // should track the event using a custom mapper and identity from the SDK options - - // test teardown - gaRemove(); -}); - -test('GaToSplit: `hits` flag param', () => { - - // test setup - const { ga, tracker } = gaMock(); - GaToSplit({}, fakeParams as any); // @ts-expect-error - let SplitTracker: any = ga.mock.calls.pop()[2]; - - // init plugin with custom options - new SplitTracker(tracker, { hits: false }); - - // send hit and assert that it was not tracked as a Split event - (fakeStorage.events.track as jest.Mock).mockClear(); - window.ga('send', hitSample); - expect(fakeStorage.events.track).toBeCalledTimes(0); - - // test teardown - gaRemove(); -}); - -test('GaToSplit: `autoRequire` script and flag param', () => { - // test setup - gaMock(); - loggerMock.error.mockClear(); - - // Create commands before autoRequire script is executed - window.ga('create', 'UA-ID-X', 'auto', 'tX'); - - GaToSplit({ autoRequire: true }, fakeParams as any); - expect(loggerMock.error).toBeCalledTimes(1); - - window.ga('create', 'UA-ID-Y', 'auto', 'tY'); - - // Run autoRequire iife - require('../autoRequire.js'); - - GaToSplit({ autoRequire: true }, fakeParams as any); - expect(loggerMock.error).toBeCalledTimes(1); - - // Assert auto-require script - window.ga('create', 'UA-ID-0'); - window.ga('create', 'UA-ID-1', 'auto', 't1'); - window.ga('create', 'UA-ID-2', { name: 't2' }); - window.ga('create', 'UA-ID-3', 'auto', { name: 't3' }); - window.ga('create', { trackingId: 'UA-ID-4', name: 't4' }); - - expect(window.ga.q.map(args => args[0])).toEqual([ - 'create' /* tX */, 'provide', - 'create' /* tY */, 'tX.require', - 'tY.require', 'provide', - 'create' /* default */, 'require', - 'create' /* t1 */, 't1.require', - 'create' /* t2 */, 't2.require', - 'create' /* t3 */, 't3.require', - 'create' /* t4 */, 't4.require', - ]); - - // test teardown - gaRemove(); -}); diff --git a/src/integrations/ga/__tests__/SplitToGa.spec.ts b/src/integrations/ga/__tests__/SplitToGa.spec.ts deleted file mode 100644 index d05e4bab..00000000 --- a/src/integrations/ga/__tests__/SplitToGa.spec.ts +++ /dev/null @@ -1,195 +0,0 @@ -import { SplitIO } from '../../../types'; -import { SPLIT_IMPRESSION, SPLIT_EVENT } from '../../../utils/constants'; - -// Mocks -import { loggerMock } from '../../../logger/__tests__/sdkLogger.mock'; -import { gaMock, gaRemove } from './gaMock'; - -// Test target -import { SplitToGa } from '../SplitToGa'; - -const fakeImpressionPayload: SplitIO.ImpressionData = { - impression: { - feature: 'hierarchical_splits_test', - keyName: 'nicolas@split.io', - treatment: 'on', - bucketingKey: undefined, - label: 'expected label', - time: 2000, - changeNumber: 1000, - }, - attributes: undefined, - ip: 'ip', - hostname: 'hostname', - sdkLanguageVersion: 'version', -}; -const fakeImpression: SplitIO.IntegrationData = { - type: SPLIT_IMPRESSION, - payload: fakeImpressionPayload, -}; -const defaultImpressionFieldsObject: UniversalAnalytics.FieldsObject = { - hitType: 'event', - eventCategory: 'split-impression', - eventAction: 'Evaluate ' + fakeImpressionPayload.impression.feature, - eventLabel: 'Treatment: ' + fakeImpressionPayload.impression.treatment + '. Targeting rule: ' + fakeImpressionPayload.impression.label + '.', - nonInteraction: true -}; - -const fakeEventPayload: SplitIO.EventData = { - eventTypeId: 'eventTypeId', - trafficTypeName: 'trafficTypeName', - value: 0, - timestamp: Date.now(), - key: 'key', - properties: {}, -}; -const fakeEvent: SplitIO.IntegrationData = { - type: SPLIT_EVENT, - payload: fakeEventPayload, -}; -const defaultEventFieldsObject = { - hitType: 'event', - eventCategory: 'split-event', - eventAction: fakeEventPayload.eventTypeId, - eventValue: fakeEventPayload.value, - nonInteraction: true -}; - -describe('SplitToGa', () => { - - test('SplitToGa.validateFieldsObject', () => { - expect(SplitToGa.validateFieldsObject(loggerMock, undefined)).toBe(false); - expect(SplitToGa.validateFieldsObject(loggerMock, null)).toBe(false); - expect(SplitToGa.validateFieldsObject(loggerMock, 123)).toBe(false); - expect(SplitToGa.validateFieldsObject(loggerMock, true)).toBe(false); - expect(SplitToGa.validateFieldsObject(loggerMock, 'something')).toBe(false); - expect(SplitToGa.validateFieldsObject(loggerMock, /asd/ig)).toBe(false); - expect(SplitToGa.validateFieldsObject(loggerMock, function () { })).toBe(false); - - expect(SplitToGa.validateFieldsObject(loggerMock, {})).toBe(false); // An empty object is an invalid FieldsObject instance - expect(SplitToGa.validateFieldsObject(loggerMock, { hitType: 10 })).toBe(true); // A fields object instance must have a HitType - expect(SplitToGa.validateFieldsObject(loggerMock, { hitType: 'event', ignoredProp: 'ignoredProp' })).toBe(true); // A fields object instance must have a HitType - }); - - test('SplitToGa.defaultMapper', () => { - // should return the corresponding FieldsObject for a given impression - expect(SplitToGa.defaultMapper(fakeImpression)).toEqual(defaultImpressionFieldsObject); - // should return the corresponding FieldsObject for a given event - expect(SplitToGa.defaultMapper(fakeEvent)).toEqual(defaultEventFieldsObject); - }); - - test('SplitToGa.getGa', () => { - loggerMock.mockClear(); - - const { ga } = gaMock(); - expect(SplitToGa.getGa()).toBe(ga); // should return ga command queue if it exists - - let integration = new SplitToGa(loggerMock, {}); - expect(typeof integration).toBe('object'); - expect(loggerMock.warn).not.toBeCalled(); - - gaRemove(); - expect(SplitToGa.getGa()).toBe(undefined); // should return undefined if ga command queue does not exist - - integration = new SplitToGa(loggerMock, {}); - expect(typeof integration).toBe('object'); // SplitToGa instances should be created even if ga command queue does not exist - // @ts-expect-error - integration.queue('fake-data'); - expect(loggerMock.warn.mock.calls).toEqual([ // Warn when creating and queueing while ga command queue does not exist - ['split-to-ga: `ga` command queue not found. No hits will be sent until it is available.'], - ['split-to-ga: `ga` command queue not found. No hit was sent.'] - ]); - }); - - test('SplitToGa (constructor and queue method)', () => { - - // test setup - const { ga } = gaMock(); - - /** Default behaviour **/ - const instance = new SplitToGa(loggerMock, {}) as SplitToGa; - instance.queue(fakeImpression); - // should queue `ga send` with the default mapped FieldsObject for impressions, appended with `splitHit` field - expect(ga).lastCalledWith('send', { ...defaultImpressionFieldsObject, splitHit: true }); - - instance.queue(fakeEvent); - // should queue `ga send` with the default mapped FieldsObject for events, appended with `splitHit` field - expect(ga).lastCalledWith('send', { ...defaultEventFieldsObject, splitHit: true }); - - expect(ga).toBeCalledTimes(2); - - /** Custom behaviour **/ - // Custom filter - function customFilter(data: SplitIO.IntegrationData) { - return data.type === SPLIT_EVENT; - } - // Custom mapper that returns a new FieldsObject instance - function customMapper() { - return { - hitType: 'event', - someField: 'someField', - } as UniversalAnalytics.FieldsObject; - } - const trackerNames = ['', 'namedTracker']; - const instance2 = new SplitToGa(loggerMock, { - filter: customFilter, - mapper: customMapper, - trackerNames, - }) as SplitToGa; - ga.mockClear(); - instance2.queue(fakeImpression); - expect(ga).not.toBeCalled(); // shouldn't queue `ga send` if a Split data (impression or event) is filtered - - instance2.queue(fakeEvent); - expect(ga.mock.calls).toEqual([ - ['send', { ...customMapper(), splitHit: true }], - [`${trackerNames[1]}.send`, { ...customMapper(), splitHit: true }] - ]); // should queue `ga send` with the custom trackerName and FieldsObject from customMapper, appended with `splitHit` field - - expect(ga).toBeCalledTimes(2); - - // Custom mapper that returns the default FieldsObject - function customMapper2(data: SplitIO.IntegrationData, defaultFieldsObject: UniversalAnalytics.FieldsObject) { - return defaultFieldsObject; - } - const instance3 = new SplitToGa(loggerMock, { - mapper: customMapper2, - }) as SplitToGa; - ga.mockClear(); - instance3.queue(fakeImpression); - // should queue `ga send` with the custom FieldsObject from customMapper2, appended with `splitHit` field - expect(ga).lastCalledWith('send', { ...customMapper2(fakeImpression, defaultImpressionFieldsObject), splitHit: true }); - - expect(ga).toBeCalledTimes(1); - - // Custom mapper that throws an error - function customMapper3() { - throw 'some error'; - } - const instance4 = new SplitToGa(loggerMock, { // @ts-expect-error - mapper: customMapper3, - }) as SplitToGa; - ga.mockClear(); - instance4.queue(fakeImpression); - expect(ga).not.toBeCalled(); // shouldn't queue `ga send` if a custom mapper throw an exception - - // `impressions` flags - const instance5 = new SplitToGa(loggerMock, { - impressions: false, - }) as SplitToGa; - ga.mockClear(); - instance5.queue(fakeImpression); - expect(ga).not.toBeCalled(); // shouldn't queue `ga send` for an impression if `impressions` flag is false - - // `impressions` flags - const instance6 = new SplitToGa(loggerMock, { - events: false, - }) as SplitToGa; - ga.mockClear(); - instance6.queue(fakeEvent); - expect(ga).not.toBeCalled(); // shouldn't queue `ga send` for a event if `events` flag is false - - // test teardown - gaRemove(); - }); -}); diff --git a/src/integrations/ga/__tests__/gaMock.ts b/src/integrations/ga/__tests__/gaMock.ts deleted file mode 100644 index 2a72863d..00000000 --- a/src/integrations/ga/__tests__/gaMock.ts +++ /dev/null @@ -1,60 +0,0 @@ -export function modelMock(fieldsObject: UniversalAnalytics.FieldsObject) { - return { - get(fieldName: string) { - return fieldsObject[fieldName as keyof UniversalAnalytics.FieldsObject]; - }, - set(fieldNameOrObject: string | {}, fieldValue?: any) { - if (typeof fieldNameOrObject === 'object') - fieldsObject = { ...fieldsObject, ...fieldNameOrObject }; - else - fieldsObject[fieldNameOrObject as keyof UniversalAnalytics.FieldsObject] = fieldValue; - } - }; -} - -export function gaMock() { - - const __originalSendHitTask = jest.fn(); - const __tasks: Record = { - sendHitTask: __originalSendHitTask - }; - const ga = jest.fn(function (command) { // @ts-ignore - (ga.q = ga.q || []).push(arguments); - - if (command === 'send') { - const fieldsObject = arguments[1]; - __tasks.sendHitTask(modelMock(fieldsObject)); - } - }); - - const set = jest.fn(function (taskName, taskFunc) { - __tasks[taskName] = taskFunc; - }); - const get = jest.fn(function (taskName) { - return __tasks[taskName]; - }); - - // Add ga to window object - if (typeof window === 'undefined') { // @ts-expect-error - if (global) global.window = {}; - } // @ts-expect-error - // eslint-disable-next-line no-undef - window['GoogleAnalyticsObject'] = 'ga'; - // eslint-disable-next-line no-undef - window['ga'] = window['ga'] || ga; - - return { - ga, - tracker: { - get, - set, - __originalSendHitTask, - } - }; -} - -export function gaRemove() { - if (typeof window !== 'undefined') // @ts-expect-error - // eslint-disable-next-line no-undef - window[window['GoogleAnalyticsObject'] || 'ga'] = undefined; -} diff --git a/src/integrations/ga/autoRequire.js b/src/integrations/ga/autoRequire.js deleted file mode 100644 index a6adad72..00000000 --- a/src/integrations/ga/autoRequire.js +++ /dev/null @@ -1,33 +0,0 @@ -/* eslint-disable no-undef */ -/** - * Auto-require script to use with GoogleAnalyticsToSplit integration - */ -(function (w, g, o) { - w[o] = w[o] || g; - w[g] = w[g] || function () { w[g].q.push(arguments); }; - w[g].q = w[g].q || []; - - var trackerNames = {}; - function name(arg) { return typeof arg === 'object' && typeof arg.name === 'string' && arg.name; } - - function processCommand(command) { // Queue a `require` command if v is a `create` command - if (command && command[0] === 'create') { - var trackerName = name(command[1]) || name(command[2]) || name(command[3]) || (typeof command[3] === 'string' ? command[3] : undefined); // Get tracker name - - if (!trackerNames[trackerName]) { - trackerNames[trackerName] = true; - w[g]((trackerName ? trackerName + '.' : '') + 'require', 'splitTracker'); // Auto-require - } - } - } - - w[g].q.forEach(processCommand); // Process already queued commands - - var originalPush = w[g].q.push; - w[g].q.push = function (command) { // Spy new queued commands - var result = originalPush.apply(this, arguments); - processCommand(command); - return result; - }; - -})(window, 'ga', 'GoogleAnalyticsObject'); diff --git a/src/integrations/ga/types.ts b/src/integrations/ga/types.ts deleted file mode 100644 index dfa5f11e..00000000 --- a/src/integrations/ga/types.ts +++ /dev/null @@ -1,153 +0,0 @@ -import { SplitIO } from '../../types'; - -/** - * A pair of user key and it's trafficType, required for tracking valid Split events. - * @typedef {Object} Identity - * @property {string} key The user key. - * @property {string} trafficType The key traffic type. - */ -export type Identity = { - key: string; - trafficType: string; -}; - -/** - * Options for GoogleAnalyticsToSplit integration plugin - */ -export interface GoogleAnalyticsToSplitOptions { - /** - * Optional flag to filter GA hits from being tracked as Split events. - * @property {boolean} hits - * @default true - */ - hits?: boolean, - /** - * Optional predicate used to define a custom filter for tracking GA hits as Split events. - * For example, the following filter allows to track only 'event' hits: - * `(model) => model.get('hitType') === 'event'` - * By default, all hits are tracked as Split events. - */ - filter?: (model: UniversalAnalytics.Model) => boolean, - /** - * Optional function useful when you need to modify the Split event before tracking it. - * This function is invoked with two arguments: - * 1. the GA model object representing the hit. - * 2. the default format of the mapped Split event instance. - * The return value must be a Split event, that can be the second argument or a new object. - * - * For example, the following mapper adds a custom property to events: - * `(model, defaultMapping) => { - * defaultMapping.properties.someProperty = SOME_VALUE; - * return defaultMapping; - * }` - */ - mapper?: (model: UniversalAnalytics.Model, defaultMapping: SplitIO.EventData) => SplitIO.EventData, - /** - * Optional prefix for EventTypeId, to prevent any kind of data collision between events. - * @property {string} prefix - * @default 'ga' - */ - prefix?: string, - /** - * List of Split identities (key & traffic type pairs) used to track events. - * If not provided, events are sent using the key and traffic type provided at SDK config - */ - identities?: Identity[], - /** - * Optional flag to log an error if the `auto-require` script is not detected. - * The auto-require script automatically requires the `splitTracker` plugin for created trackers, - * and should be placed right after your Google Analytics, Google Tag Manager or gtag.js script tag. - * - * @see {@link https://help.split.io/hc/en-us/articles/360040838752#set-up-with-gtm-and-gtag.js} - * - * @property {boolean} autoRequire - * @default false - */ - autoRequire?: boolean, -} - -/** - * Enable 'Google Analytics to Split' integration, to track Google Analytics hits as Split events. - * Used by the browser variant of the isomorphic JS SDK. - * - * @see {@link https://help.split.io/hc/en-us/articles/360040838752#google-analytics-to-split} - */ -export interface IGoogleAnalyticsToSplitConfig extends GoogleAnalyticsToSplitOptions { - type: 'GOOGLE_ANALYTICS_TO_SPLIT' -} - -/** - * Options for SplitToGoogleAnalytics integration plugin - */ -export interface SplitToGoogleAnalyticsOptions { - /** - * Optional flag to filter Split impressions from being tracked as GA hits. - * @property {boolean} impressions - * @default true - */ - impressions?: boolean, - /** - * Optional flag to filter Split events from being tracked as GA hits. - * @property {boolean} events - * @default true - */ - events?: boolean, - /** - * Optional predicate used to define a custom filter for tracking Split data (events and impressions) as GA hits. - * For example, the following filter allows to track only impressions, equivalent to setting events to false: - * `(data) => data.type === 'IMPRESSION'` - */ - filter?: (data: SplitIO.IntegrationData) => boolean, - /** - * Optional function useful when you need to modify the GA hit before sending it. - * This function is invoked with two arguments: - * 1. the input data (Split event or impression). - * 2. the default format of the mapped FieldsObject instance (GA hit). - * The return value must be a FieldsObject, that can be the second argument or a new object. - * - * For example, the following mapper adds a custom dimension to hits: - * `(data, defaultMapping) => { - * defaultMapping.dimension1 = SOME_VALUE; - * return defaultMapping; - * }` - * - * Default FieldsObject instance for data.type === 'IMPRESSION': - * `{ - * hitType: 'event', - * eventCategory: 'split-impression', - * eventAction: 'Evaluate ' + data.payload.impression.feature, - * eventLabel: 'Treatment: ' + data.payload.impression.treatment + '. Targeting rule: ' + data.payload.impression.label + '.', - * nonInteraction: true, - * }` - * Default FieldsObject instance for data.type === 'EVENT': - * `{ - * hitType: 'event', - * eventCategory: 'split-event', - * eventAction: data.payload.eventTypeId, - * eventValue: data.payload.value, - * nonInteraction: true, - * }` - */ - mapper?: (data: SplitIO.IntegrationData, defaultMapping: UniversalAnalytics.FieldsObject) => UniversalAnalytics.FieldsObject, - /** - * List of tracker names to send the hit. An empty string represents the default tracker. - * If not provided, hits are only sent to default tracker. - */ - trackerNames?: string[], -} - -/** - * Enable 'Split to Google Analytics' integration, to track Split impressions and events as Google Analytics hits. - * Used by the browser variant of the isomorphic JS SDK. - * - * @see {@link https://help.split.io/hc/en-us/articles/360040838752#split-to-google-analytics} - */ -export interface ISplitToGoogleAnalyticsConfig extends SplitToGoogleAnalyticsOptions { - type: 'SPLIT_TO_GOOGLE_ANALYTICS' -} - -/** - * Available integration options for the browser - * Used by the browser variant of the isomorphic JS SDK. - */ -export type BrowserIntegration = ISplitToGoogleAnalyticsConfig | IGoogleAnalyticsToSplitConfig; diff --git a/src/utils/constants/browser.ts b/src/utils/constants/browser.ts index ec2add2c..d627f780 100644 --- a/src/utils/constants/browser.ts +++ b/src/utils/constants/browser.ts @@ -1,6 +1,2 @@ -// Integration types -export const GOOGLE_ANALYTICS_TO_SPLIT = 'GOOGLE_ANALYTICS_TO_SPLIT'; -export const SPLIT_TO_GOOGLE_ANALYTICS = 'SPLIT_TO_GOOGLE_ANALYTICS'; - // This value might be eventually set via a config parameter export const DEFAULT_CACHE_EXPIRATION_IN_MILLIS = 864000000; // 10 days diff --git a/src/utils/settingsValidation/integrations/__tests__/plugable.spec.ts b/src/utils/settingsValidation/integrations/__tests__/plugable.spec.ts index a9b76aa1..3de62641 100644 --- a/src/utils/settingsValidation/integrations/__tests__/plugable.spec.ts +++ b/src/utils/settingsValidation/integrations/__tests__/plugable.spec.ts @@ -16,8 +16,8 @@ describe('integrations validator for pluggable integrations', () => { }); test('Filters invalid integration factories from `integrations` array', () => { - const validNoopIntFactory = () => { }; // no-op integration, such as GoogleAnalyticsToSplit - const validIntFactory = () => { return { queue() { } }; }; // integration with queue handler, such as SplitToGoogleAnalytics + const validNoopIntFactory = () => { }; // integration with no queue handler, such as 3rdPartyAnalyticsToSplit + const validIntFactory = () => { return { queue() { } }; }; // integration with queue handler, such as SplitTo3rdPartyAnalytics const invalid = { queue() { } }; // Integration factories that are invalid objects are removed From bb9a458a47cd3c4f9be1dad2a53f5f3c4ff82a33 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Mon, 7 Oct 2024 18:45:35 -0300 Subject: [PATCH 099/146] Update objectAssign polyfill to reduce code size --- src/utils/lang/objectAssign.ts | 105 ++++----------------------------- 1 file changed, 13 insertions(+), 92 deletions(-) diff --git a/src/utils/lang/objectAssign.ts b/src/utils/lang/objectAssign.ts index 23fcbd9a..1253b355 100644 --- a/src/utils/lang/objectAssign.ts +++ b/src/utils/lang/objectAssign.ts @@ -1,71 +1,6 @@ -/* -Adaptation of "object-assign" library (https://www.npmjs.com/package/object-assign) -exported as an ES module instead of CommonJS, to avoid extra configuration steps when using -the ESM build of the SDK with tools that doesn't support CommonJS by default (e.g. Rollup). - -object-assign -(c) Sindre Sorhus -@license MIT -*/ - -/* eslint-disable */ -// @ts-nocheck - -var getOwnPropertySymbols = Object.getOwnPropertySymbols; -var hasOwnProperty = Object.prototype.hasOwnProperty; -var propIsEnumerable = Object.prototype.propertyIsEnumerable; - -function toObject(val) { - if (val === null || val === undefined) { - throw new TypeError('Object.assign cannot be called with null or undefined'); - } - - return Object(val); -} - -function shouldUseNative() { - try { - if (!Object.assign) { - return false; - } - - // Detect buggy property enumeration order in older V8 versions. - - // https://bugs.chromium.org/p/v8/issues/detail?id=4118 - var test1 = new String('abc'); - test1[5] = 'de'; - if (Object.getOwnPropertyNames(test1)[0] === '5') { - return false; - } - - // https://bugs.chromium.org/p/v8/issues/detail?id=3056 - var test2 = {}; - for (var i = 0; i < 10; i++) { - test2['_' + String.fromCharCode(i)] = i; - } - var order2 = Object.getOwnPropertyNames(test2).map(function (n) { - return test2[n]; - }); - if (order2.join('') !== '0123456789') { - return false; - } - - // https://bugs.chromium.org/p/v8/issues/detail?id=3056 - var test3 = {}; - 'abcdefghijklmnopqrst'.split('').forEach(function (letter) { - test3[letter] = letter; - }); - if (Object.keys(Object.assign({}, test3)).join('') !== - 'abcdefghijklmnopqrst') { - return false; - } - - return true; - } catch (err) { - // We don't expect any of the above to throw, but better to be safe. - return false; - } -} +/** + * A tiny polyfill for Object.assign + */ // https://www.npmjs.com/package/@types/object-assign type ObjectAssign = ((target: T, source: U) => T & U) & @@ -74,31 +9,17 @@ type ObjectAssign = ((target: T, source: U) => T & U) & ((target: T, source1: U, source2: V, source3: W, source4: Q) => T & U & V & W & Q) & ((target: T, source1: U, source2: V, source3: W, source4: Q, source5: R) => T & U & V & W & Q & R) & ((target: any, ...sources: any[]) => any); - -export const objectAssign: ObjectAssign = shouldUseNative() ? Object.assign : function (target, source) { - var from; - var to = toObject(target); - var symbols; - - for (var s = 1; s < arguments.length; s++) { - from = Object(arguments[s]); - - // eslint-disable-next-line no-restricted-syntax - for (var key in from) { - if (hasOwnProperty.call(from, key)) { - to[key] = from[key]; - } - } - - if (getOwnPropertySymbols) { - symbols = getOwnPropertySymbols(from); - for (var i = 0; i < symbols.length; i++) { - if (propIsEnumerable.call(from, symbols[i])) { - to[symbols[i]] = from[symbols[i]]; - } +export const objectAssign: ObjectAssign = Object.assign || function (target: any) { + if (target === null || target === undefined) throw new TypeError('Object.assign cannot be called with null or undefined'); + target = Object(target); + + for (let i = 1; i < arguments.length; i++) { + const source = Object(arguments[i]); // eslint-disable-next-line no-restricted-syntax + for (const key in source) { + if (Object.prototype.hasOwnProperty.call(source, key)) { + target[key] = source[key]; } } } - - return to; + return target; }; From 9d97256cb33868f449761b3c7b1daf75dcb51fce Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Mon, 7 Oct 2024 19:08:24 -0300 Subject: [PATCH 100/146] Update compat linter: 'defaults, ie 10, node 6' -> 'defaults, node 14' --- .eslintrc | 2 +- package-lock.json | 194 +++++++++++++++++++++++++--------------------- package.json | 2 +- 3 files changed, 106 insertions(+), 92 deletions(-) diff --git a/.eslintrc b/.eslintrc index 365fc121..0442d7a9 100644 --- a/.eslintrc +++ b/.eslintrc @@ -80,7 +80,7 @@ "message": "Don't declare const enum, because it is not supported by Babel used for building RN SDK" } ], - "compat/compat": ["error", "defaults, ie 10, node 6"], + "compat/compat": ["error", "defaults, node 14"], "no-throw-literal": "error", "import/no-default-export": "error", "import/no-self-import": "error" diff --git a/package-lock.json b/package-lock.json index 8fc190d7..2493ec38 100644 --- a/package-lock.json +++ b/package-lock.json @@ -20,7 +20,7 @@ "@typescript-eslint/parser": "^6.6.0", "cross-env": "^7.0.2", "eslint": "^8.48.0", - "eslint-plugin-compat": "^4.2.0", + "eslint-plugin-compat": "^6.0.1", "eslint-plugin-import": "^2.25.3", "fetch-mock": "^9.11.0", "ioredis": "^4.28.0", @@ -1363,9 +1363,9 @@ "dev": true }, "node_modules/@mdn/browser-compat-data": { - "version": "5.3.14", - "resolved": "https://registry.npmjs.org/@mdn/browser-compat-data/-/browser-compat-data-5.3.14.tgz", - "integrity": "sha512-Y9XQrphVcE6u9xMm+gIqN86opbU/5s2W1pdPyKRyFV5B7+2jWM2gLI5JpfhZncaoDKvhy6FYwK04aCz5UM/bTQ==", + "version": "5.6.4", + "resolved": "https://registry.npmjs.org/@mdn/browser-compat-data/-/browser-compat-data-5.6.4.tgz", + "integrity": "sha512-bOOF4GGzn0exmvNHpSWmTfOXB9beTpIFCm2KPY2UVoCdn1YVfr8heuHr1C++BYI9Tun8REgi5TNVdKbBs249CA==", "dev": true }, "node_modules/@nodelib/fs.scandir": { @@ -2228,9 +2228,9 @@ "dev": true }, "node_modules/browserslist": { - "version": "4.21.10", - "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.21.10.tgz", - "integrity": "sha512-bipEBdZfVH5/pwrvqc+Ub0kUPVfGUhlKxbvfD+z1BDnPEO/X98ruXGA1WP5ASpAFKan7Qr6j736IacbZQuAlKQ==", + "version": "4.24.0", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.24.0.tgz", + "integrity": "sha512-Rmb62sR1Zpjql25eSanFGEhAxcFwfA1K0GuQcLoaJBAcENegrQut3hYdhXFF1obQfiDyqIW/cLM5HSJ/9k884A==", "dev": true, "funding": [ { @@ -2247,10 +2247,10 @@ } ], "dependencies": { - "caniuse-lite": "^1.0.30001517", - "electron-to-chromium": "^1.4.477", - "node-releases": "^2.0.13", - "update-browserslist-db": "^1.0.11" + "caniuse-lite": "^1.0.30001663", + "electron-to-chromium": "^1.5.28", + "node-releases": "^2.0.18", + "update-browserslist-db": "^1.1.0" }, "bin": { "browserslist": "cli.js" @@ -2318,9 +2318,9 @@ } }, "node_modules/caniuse-lite": { - "version": "1.0.30001528", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001528.tgz", - "integrity": "sha512-0Db4yyjR9QMNlsxh+kKWzQtkyflkG/snYheSzkjmvdEtEXB1+jt7A2HmSEiO6XIJPIbo92lHNGNySvE5pZcs5Q==", + "version": "1.0.30001667", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001667.tgz", + "integrity": "sha512-7LTwJjcRkzKFmtqGsibMeuXmvFDfZq/nzIjnmgCGzKKRVzjD72selLDK1oPF/Oxzmt4fNcPvTDvGqSDG4tCALw==", "dev": true, "funding": [ { @@ -2678,9 +2678,9 @@ } }, "node_modules/electron-to-chromium": { - "version": "1.4.510", - "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.510.tgz", - "integrity": "sha512-xPfLIPFcN/WLXBpQ/K4UgE98oUBO5Tia6BD4rkSR0wE7ep/PwBVlgvPJQrIBpmJGVAmUzwPKuDbVt9XV6+uC2g==", + "version": "1.5.33", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.33.tgz", + "integrity": "sha512-+cYTcFB1QqD4j4LegwLfpCNxifb6dDFUAwk6RsLusCwIaZI6or2f+q8rs5tTB2YC53HhOlIbEaqHMAAC8IOIwA==", "dev": true }, "node_modules/emittery": { @@ -2762,9 +2762,9 @@ } }, "node_modules/escalade": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", - "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==", + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", + "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", "dev": true, "engines": { "node": ">=6" @@ -3028,24 +3028,25 @@ } }, "node_modules/eslint-plugin-compat": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-compat/-/eslint-plugin-compat-4.2.0.tgz", - "integrity": "sha512-RDKSYD0maWy5r7zb5cWQS+uSPc26mgOzdORJ8hxILmWM7S/Ncwky7BcAtXVY5iRbKjBdHsWU8Yg7hfoZjtkv7w==", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-compat/-/eslint-plugin-compat-6.0.1.tgz", + "integrity": "sha512-0MeIEuoy8kWkOhW38kK8hU4vkb6l/VvyjpuYDymYOXmUY9NvTgyErF16lYuX+HPS5hkmym7lfA+XpYZiWYWmYA==", "dev": true, "dependencies": { - "@mdn/browser-compat-data": "^5.3.13", + "@mdn/browser-compat-data": "^5.5.35", "ast-metadata-inferer": "^0.8.0", - "browserslist": "^4.21.10", - "caniuse-lite": "^1.0.30001524", + "browserslist": "^4.23.1", + "caniuse-lite": "^1.0.30001639", "find-up": "^5.0.0", + "globals": "^15.7.0", "lodash.memoize": "^4.1.2", - "semver": "^7.5.4" + "semver": "^7.6.2" }, "engines": { - "node": ">=14.x" + "node": ">=18.x" }, "peerDependencies": { - "eslint": "^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0 || ^8.0.0" + "eslint": "^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0 || ^8.0.0 || ^9.0.0" } }, "node_modules/eslint-plugin-compat/node_modules/find-up": { @@ -3064,6 +3065,18 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/eslint-plugin-compat/node_modules/globals": { + "version": "15.10.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-15.10.0.tgz", + "integrity": "sha512-tqFIbz83w4Y5TCbtgjZjApohbuh7K9BxGYFm7ifwDR240tvdb7P9x+/9VvUKlmkPoiknoJtanI8UOrqxS3a7lQ==", + "dev": true, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/eslint-plugin-compat/node_modules/locate-path": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", @@ -3110,13 +3123,10 @@ } }, "node_modules/eslint-plugin-compat/node_modules/semver": { - "version": "7.5.4", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", - "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", + "version": "7.6.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", + "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==", "dev": true, - "dependencies": { - "lru-cache": "^6.0.0" - }, "bin": { "semver": "bin/semver.js" }, @@ -6583,9 +6593,9 @@ "dev": true }, "node_modules/node-releases": { - "version": "2.0.13", - "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.13.tgz", - "integrity": "sha512-uYr7J37ae/ORWdZeQ1xxMJe3NtdmqMC/JZK+geofDrkLUApKRHPd18/TxtBOJ4A0/+uUIliorNrfYV6s1b02eQ==", + "version": "2.0.18", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.18.tgz", + "integrity": "sha512-d9VeXT4SJ7ZeOqGX6R5EM022wpL+eWPooLI+5UpWn2jCT1aosUQEhQP214x33Wkwx3JQMvIm+tIoVOdodFS40g==", "dev": true }, "node_modules/normalize-path": { @@ -6839,9 +6849,9 @@ } }, "node_modules/picocolors": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz", - "integrity": "sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.0.tgz", + "integrity": "sha512-TQ92mBOW0l3LeMeyLV6mzy/kWr8lkd/hp3mTg7wYK7zJhuBStmGMBG0BdeDZS/dZx1IukaX6Bk11zcln25o1Aw==", "dev": true }, "node_modules/picomatch": { @@ -7714,9 +7724,9 @@ } }, "node_modules/update-browserslist-db": { - "version": "1.0.11", - "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.0.11.tgz", - "integrity": "sha512-dCwEFf0/oT85M1fHBg4F0jtLwJrutGoHSQXCh7u4o2t1drG+c0a9Flnqww6XUKSfQMPpJBRjU8d4RXB09qtvaA==", + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.1.1.tgz", + "integrity": "sha512-R8UzCaa9Az+38REPiJ1tXlImTJXlVfgHZsglwBD/k6nj76ctsH1E3q4doGrukiLQd3sGQYu56r5+lo5r94l29A==", "dev": true, "funding": [ { @@ -7733,8 +7743,8 @@ } ], "dependencies": { - "escalade": "^3.1.1", - "picocolors": "^1.0.0" + "escalade": "^3.2.0", + "picocolors": "^1.1.0" }, "bin": { "update-browserslist-db": "cli.js" @@ -9043,9 +9053,9 @@ } }, "@mdn/browser-compat-data": { - "version": "5.3.14", - "resolved": "https://registry.npmjs.org/@mdn/browser-compat-data/-/browser-compat-data-5.3.14.tgz", - "integrity": "sha512-Y9XQrphVcE6u9xMm+gIqN86opbU/5s2W1pdPyKRyFV5B7+2jWM2gLI5JpfhZncaoDKvhy6FYwK04aCz5UM/bTQ==", + "version": "5.6.4", + "resolved": "https://registry.npmjs.org/@mdn/browser-compat-data/-/browser-compat-data-5.6.4.tgz", + "integrity": "sha512-bOOF4GGzn0exmvNHpSWmTfOXB9beTpIFCm2KPY2UVoCdn1YVfr8heuHr1C++BYI9Tun8REgi5TNVdKbBs249CA==", "dev": true }, "@nodelib/fs.scandir": { @@ -9698,15 +9708,15 @@ "dev": true }, "browserslist": { - "version": "4.21.10", - "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.21.10.tgz", - "integrity": "sha512-bipEBdZfVH5/pwrvqc+Ub0kUPVfGUhlKxbvfD+z1BDnPEO/X98ruXGA1WP5ASpAFKan7Qr6j736IacbZQuAlKQ==", + "version": "4.24.0", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.24.0.tgz", + "integrity": "sha512-Rmb62sR1Zpjql25eSanFGEhAxcFwfA1K0GuQcLoaJBAcENegrQut3hYdhXFF1obQfiDyqIW/cLM5HSJ/9k884A==", "dev": true, "requires": { - "caniuse-lite": "^1.0.30001517", - "electron-to-chromium": "^1.4.477", - "node-releases": "^2.0.13", - "update-browserslist-db": "^1.0.11" + "caniuse-lite": "^1.0.30001663", + "electron-to-chromium": "^1.5.28", + "node-releases": "^2.0.18", + "update-browserslist-db": "^1.1.0" } }, "bs-logger": { @@ -9756,9 +9766,9 @@ "dev": true }, "caniuse-lite": { - "version": "1.0.30001528", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001528.tgz", - "integrity": "sha512-0Db4yyjR9QMNlsxh+kKWzQtkyflkG/snYheSzkjmvdEtEXB1+jt7A2HmSEiO6XIJPIbo92lHNGNySvE5pZcs5Q==", + "version": "1.0.30001667", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001667.tgz", + "integrity": "sha512-7LTwJjcRkzKFmtqGsibMeuXmvFDfZq/nzIjnmgCGzKKRVzjD72selLDK1oPF/Oxzmt4fNcPvTDvGqSDG4tCALw==", "dev": true }, "chalk": { @@ -10020,9 +10030,9 @@ } }, "electron-to-chromium": { - "version": "1.4.510", - "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.510.tgz", - "integrity": "sha512-xPfLIPFcN/WLXBpQ/K4UgE98oUBO5Tia6BD4rkSR0wE7ep/PwBVlgvPJQrIBpmJGVAmUzwPKuDbVt9XV6+uC2g==", + "version": "1.5.33", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.33.tgz", + "integrity": "sha512-+cYTcFB1QqD4j4LegwLfpCNxifb6dDFUAwk6RsLusCwIaZI6or2f+q8rs5tTB2YC53HhOlIbEaqHMAAC8IOIwA==", "dev": true }, "emittery": { @@ -10086,9 +10096,9 @@ } }, "escalade": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", - "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==", + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", + "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", "dev": true }, "escape-string-regexp": { @@ -10422,18 +10432,19 @@ } }, "eslint-plugin-compat": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-compat/-/eslint-plugin-compat-4.2.0.tgz", - "integrity": "sha512-RDKSYD0maWy5r7zb5cWQS+uSPc26mgOzdORJ8hxILmWM7S/Ncwky7BcAtXVY5iRbKjBdHsWU8Yg7hfoZjtkv7w==", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-compat/-/eslint-plugin-compat-6.0.1.tgz", + "integrity": "sha512-0MeIEuoy8kWkOhW38kK8hU4vkb6l/VvyjpuYDymYOXmUY9NvTgyErF16lYuX+HPS5hkmym7lfA+XpYZiWYWmYA==", "dev": true, "requires": { - "@mdn/browser-compat-data": "^5.3.13", + "@mdn/browser-compat-data": "^5.5.35", "ast-metadata-inferer": "^0.8.0", - "browserslist": "^4.21.10", - "caniuse-lite": "^1.0.30001524", + "browserslist": "^4.23.1", + "caniuse-lite": "^1.0.30001639", "find-up": "^5.0.0", + "globals": "^15.7.0", "lodash.memoize": "^4.1.2", - "semver": "^7.5.4" + "semver": "^7.6.2" }, "dependencies": { "find-up": { @@ -10446,6 +10457,12 @@ "path-exists": "^4.0.0" } }, + "globals": { + "version": "15.10.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-15.10.0.tgz", + "integrity": "sha512-tqFIbz83w4Y5TCbtgjZjApohbuh7K9BxGYFm7ifwDR240tvdb7P9x+/9VvUKlmkPoiknoJtanI8UOrqxS3a7lQ==", + "dev": true + }, "locate-path": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", @@ -10474,13 +10491,10 @@ } }, "semver": { - "version": "7.5.4", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", - "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", - "dev": true, - "requires": { - "lru-cache": "^6.0.0" - } + "version": "7.6.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", + "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==", + "dev": true } } }, @@ -12929,9 +12943,9 @@ "dev": true }, "node-releases": { - "version": "2.0.13", - "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.13.tgz", - "integrity": "sha512-uYr7J37ae/ORWdZeQ1xxMJe3NtdmqMC/JZK+geofDrkLUApKRHPd18/TxtBOJ4A0/+uUIliorNrfYV6s1b02eQ==", + "version": "2.0.18", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.18.tgz", + "integrity": "sha512-d9VeXT4SJ7ZeOqGX6R5EM022wpL+eWPooLI+5UpWn2jCT1aosUQEhQP214x33Wkwx3JQMvIm+tIoVOdodFS40g==", "dev": true }, "normalize-path": { @@ -13116,9 +13130,9 @@ "dev": true }, "picocolors": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz", - "integrity": "sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.0.tgz", + "integrity": "sha512-TQ92mBOW0l3LeMeyLV6mzy/kWr8lkd/hp3mTg7wYK7zJhuBStmGMBG0BdeDZS/dZx1IukaX6Bk11zcln25o1Aw==", "dev": true }, "picomatch": { @@ -13746,13 +13760,13 @@ "dev": true }, "update-browserslist-db": { - "version": "1.0.11", - "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.0.11.tgz", - "integrity": "sha512-dCwEFf0/oT85M1fHBg4F0jtLwJrutGoHSQXCh7u4o2t1drG+c0a9Flnqww6XUKSfQMPpJBRjU8d4RXB09qtvaA==", + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.1.1.tgz", + "integrity": "sha512-R8UzCaa9Az+38REPiJ1tXlImTJXlVfgHZsglwBD/k6nj76ctsH1E3q4doGrukiLQd3sGQYu56r5+lo5r94l29A==", "dev": true, "requires": { - "escalade": "^3.1.1", - "picocolors": "^1.0.0" + "escalade": "^3.2.0", + "picocolors": "^1.1.0" } }, "uri-js": { diff --git a/package.json b/package.json index 883888e7..1c1a2eb2 100644 --- a/package.json +++ b/package.json @@ -64,7 +64,7 @@ "@typescript-eslint/parser": "^6.6.0", "cross-env": "^7.0.2", "eslint": "^8.48.0", - "eslint-plugin-compat": "^4.2.0", + "eslint-plugin-compat": "^6.0.1", "eslint-plugin-import": "^2.25.3", "fetch-mock": "^9.11.0", "ioredis": "^4.28.0", From f82c7c65eb295ba8efa29f4d8aad24c8605a6d44 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Mon, 7 Oct 2024 19:16:03 -0300 Subject: [PATCH 101/146] Removed Map and Set polyfills --- .../__tests__/evaluate-features.spec.ts | 9 +- src/evaluator/index.ts | 10 +- src/evaluator/matchers/semver_inlist.ts | 3 +- src/evaluator/matchers/whitelist.ts | 4 +- src/logger/__tests__/index.spec.ts | 3 +- src/logger/browser/DebugLogger.ts | 3 +- src/logger/browser/ErrorLogger.ts | 3 +- src/logger/browser/InfoLogger.ts | 3 +- src/logger/browser/WarnLogger.ts | 3 +- src/logger/index.ts | 7 +- src/services/decorateHeaders.ts | 3 +- src/storages/AbstractSplitsCacheAsync.ts | 3 +- src/storages/AbstractSplitsCacheSync.ts | 3 +- .../inLocalStorage/SplitsCacheInLocal.ts | 13 +- .../__tests__/SplitsCacheInLocal.spec.ts | 29 ++--- .../inMemory/SegmentsCacheInMemory.ts | 9 +- src/storages/inMemory/SplitsCacheInMemory.ts | 9 +- .../inMemory/UniqueKeysCacheInMemory.ts | 9 +- .../inMemory/UniqueKeysCacheInMemoryCS.ts | 9 +- .../__tests__/SplitsCacheInMemory.spec.ts | 29 ++--- src/storages/inRedis/RedisAdapter.ts | 7 +- src/storages/inRedis/SplitsCacheInRedis.ts | 6 +- src/storages/inRedis/TelemetryCacheInRedis.ts | 7 +- .../inRedis/UniqueKeysCacheInRedis.ts | 3 +- .../inRedis/__tests__/RedisAdapter.spec.ts | 7 +- .../__tests__/SplitsCacheInRedis.spec.ts | 31 +++-- .../pluggable/SegmentsCachePluggable.ts | 1 - .../pluggable/SplitsCachePluggable.ts | 6 +- .../pluggable/TelemetryCachePluggable.ts | 13 +- .../pluggable/UniqueKeysCachePluggable.ts | 3 +- .../__tests__/SplitsCachePluggable.spec.ts | 31 +++-- src/storages/pluggable/inMemoryWrapper.ts | 17 ++- src/storages/types.ts | 7 +- .../polling/updaters/splitChangesUpdater.ts | 9 +- .../streaming/__tests__/parseUtils.spec.ts | 5 +- src/sync/streaming/pushManager.ts | 7 +- src/sync/submitters/types.ts | 7 +- src/utils/LRUCache/index.ts | 5 +- src/utils/lang/__tests__/maps.spec.ts | 16 --- src/utils/lang/__tests__/sets.spec.ts | 29 +---- src/utils/lang/maps.ts | 108 ---------------- src/utils/lang/sets.ts | 122 +----------------- .../logger/builtinLogger.ts | 3 +- 43 files changed, 162 insertions(+), 452 deletions(-) delete mode 100644 src/utils/lang/__tests__/maps.spec.ts delete mode 100644 src/utils/lang/maps.ts diff --git a/src/evaluator/__tests__/evaluate-features.spec.ts b/src/evaluator/__tests__/evaluate-features.spec.ts index 45431c64..761f2804 100644 --- a/src/evaluator/__tests__/evaluate-features.spec.ts +++ b/src/evaluator/__tests__/evaluate-features.spec.ts @@ -2,7 +2,6 @@ import { evaluateFeatures, evaluateFeaturesByFlagSets } from '../index'; import { EXCEPTION, NOT_IN_SPLIT, SPLIT_ARCHIVED, SPLIT_KILLED, SPLIT_NOT_FOUND } from '../../utils/labels'; import { loggerMock } from '../../logger/__tests__/sdkLogger.mock'; -import { _Set } from '../../utils/lang/sets'; import { WARN_FLAGSET_WITHOUT_FLAGS } from '../../logger/constants'; const splitsMock = { @@ -17,8 +16,8 @@ const splitsMock = { }; const flagSetsMock = { - reg_and_config: new _Set(['regular', 'config']), - arch_and_killed: new _Set(['killed', 'archived']), + reg_and_config: new Set(['regular', 'config']), + arch_and_killed: new Set(['killed', 'archived']), }; const mockStorage = { @@ -38,7 +37,7 @@ const mockStorage = { return splits; }, getNamesByFlagSets(flagSets) { - return flagSets.map(flagset => flagSetsMock[flagset] || new _Set()); + return flagSets.map(flagset => flagSetsMock[flagset] || new Set()); } } }; @@ -192,7 +191,7 @@ describe('EVALUATOR - Multiple evaluations at once by flag sets', () => { // Should support async storage too expect(await getResultsByFlagsets(['inexistent_set1', 'inexistent_set2'], { splits: { - getNamesByFlagSets(flagSets) { return Promise.resolve(flagSets.map(flagset => flagSetsMock[flagset] || new _Set())); } + getNamesByFlagSets(flagSets) { return Promise.resolve(flagSets.map(flagset => flagSetsMock[flagset] || new Set())); } } })).toEqual({}); expect(loggerMock.warn.mock.calls).toEqual([ diff --git a/src/evaluator/index.ts b/src/evaluator/index.ts index 73527d42..883df997 100644 --- a/src/evaluator/index.ts +++ b/src/evaluator/index.ts @@ -7,7 +7,7 @@ import { IStorageAsync, IStorageSync } from '../storages/types'; import { IEvaluationResult } from './types'; import { SplitIO } from '../types'; import { ILogger } from '../logger/types'; -import { ISet, setToArray, returnSetsUnion, _Set } from '../utils/lang/sets'; +import { returnSetsUnion } from '../utils/lang/sets'; import { WARN_FLAGSET_WITHOUT_FLAGS } from '../logger/constants'; const treatmentException = { @@ -97,12 +97,12 @@ export function evaluateFeaturesByFlagSets( storage: IStorageSync | IStorageAsync, method: string, ): MaybeThenable> { - let storedFlagNames: MaybeThenable[]>; + let storedFlagNames: MaybeThenable[]>; function evaluate( - featureFlagsByFlagSets: ISet[], + featureFlagsByFlagSets: Set[], ) { - let featureFlags = new _Set(); + let featureFlags = new Set(); for (let i = 0; i < flagSets.length; i++) { const featureFlagByFlagSet = featureFlagsByFlagSets[i]; if (featureFlagByFlagSet.size) { @@ -113,7 +113,7 @@ export function evaluateFeaturesByFlagSets( } return featureFlags.size ? - evaluateFeatures(log, key, setToArray(featureFlags), attributes, storage) : + evaluateFeatures(log, key, Array.from(featureFlags), attributes, storage) : {}; } diff --git a/src/evaluator/matchers/semver_inlist.ts b/src/evaluator/matchers/semver_inlist.ts index 6d09f7ba..c21b10d8 100644 --- a/src/evaluator/matchers/semver_inlist.ts +++ b/src/evaluator/matchers/semver_inlist.ts @@ -1,11 +1,10 @@ -import { _Set } from '../../utils/lang/sets'; import { Semver } from '../../utils/Semver'; export function inListSemverMatcherContext(ruleAttr: string[]) { // @TODO ruleAttr validation should be done at the `parser` or `matchersTransform` level to reuse for all matchers if (!ruleAttr || ruleAttr.length === 0) throw new Error('whitelistMatcherData is required for IN_LIST_SEMVER matcher type'); - const listOfSemvers = new _Set(ruleAttr.map((version) => new Semver(version).version)); + const listOfSemvers = new Set(ruleAttr.map((version) => new Semver(version).version)); return function inListSemverMatcher(runtimeAttr: string): boolean { const runtimeSemver = new Semver(runtimeAttr).version; diff --git a/src/evaluator/matchers/whitelist.ts b/src/evaluator/matchers/whitelist.ts index 082772ae..309b1540 100644 --- a/src/evaluator/matchers/whitelist.ts +++ b/src/evaluator/matchers/whitelist.ts @@ -1,7 +1,5 @@ -import { _Set } from '../../utils/lang/sets'; - export function whitelistMatcherContext(ruleAttr: string[]) { - const whitelistSet = new _Set(ruleAttr); + const whitelistSet = new Set(ruleAttr); return function whitelistMatcher(runtimeAttr: string): boolean { const isInWhitelist = whitelistSet.has(runtimeAttr); diff --git a/src/logger/__tests__/index.spec.ts b/src/logger/__tests__/index.spec.ts index 20db51e9..754acf0f 100644 --- a/src/logger/__tests__/index.spec.ts +++ b/src/logger/__tests__/index.spec.ts @@ -1,5 +1,4 @@ import { LogLevel } from '../../types'; -import { _Map } from '../../utils/lang/maps'; import { Logger, LogLevels, isLogLevelString, _sprintf } from '../index'; // We'll set this only once. These are the constants we will use for @@ -59,7 +58,7 @@ function testLogLevels(levelToTest: LogLevel) { const logMethod = levelToTest.toLowerCase(); const logCategory = `test-category-${logMethod}`; const instance = new Logger({ prefix: logCategory, showLevel }, - useCodes ? new _Map([[1, 'Test log for level %s with showLevel: %s %s']]) : undefined); + useCodes ? new Map([[1, 'Test log for level %s with showLevel: %s %s']]) : undefined); LOG_LEVELS_IN_ORDER.forEach((logLevel, i) => { const logMsg = `Test log for level ${levelToTest} with showLevel: ${showLevel} ${logLevelLogsCounter}`; diff --git a/src/logger/browser/DebugLogger.ts b/src/logger/browser/DebugLogger.ts index 105e1890..354a497b 100644 --- a/src/logger/browser/DebugLogger.ts +++ b/src/logger/browser/DebugLogger.ts @@ -1,7 +1,6 @@ import { Logger } from '../index'; import { codesDebug } from '../messages/debug'; -import { _Map } from '../../utils/lang/maps'; export function DebugLogger() { - return new Logger({ logLevel: 'DEBUG' }, new _Map(codesDebug)); + return new Logger({ logLevel: 'DEBUG' }, new Map(codesDebug)); } diff --git a/src/logger/browser/ErrorLogger.ts b/src/logger/browser/ErrorLogger.ts index f0702d89..4a685237 100644 --- a/src/logger/browser/ErrorLogger.ts +++ b/src/logger/browser/ErrorLogger.ts @@ -1,7 +1,6 @@ import { Logger } from '../index'; import { codesError } from '../messages/error'; -import { _Map } from '../../utils/lang/maps'; export function ErrorLogger() { - return new Logger({ logLevel: 'ERROR' }, new _Map(codesError)); + return new Logger({ logLevel: 'ERROR' }, new Map(codesError)); } diff --git a/src/logger/browser/InfoLogger.ts b/src/logger/browser/InfoLogger.ts index bdf9be75..a57d1cf2 100644 --- a/src/logger/browser/InfoLogger.ts +++ b/src/logger/browser/InfoLogger.ts @@ -1,7 +1,6 @@ import { Logger } from '../index'; import { codesInfo } from '../messages/info'; -import { _Map } from '../../utils/lang/maps'; export function InfoLogger() { - return new Logger({ logLevel: 'INFO' }, new _Map(codesInfo)); + return new Logger({ logLevel: 'INFO' }, new Map(codesInfo)); } diff --git a/src/logger/browser/WarnLogger.ts b/src/logger/browser/WarnLogger.ts index 8456d012..ebeb59ab 100644 --- a/src/logger/browser/WarnLogger.ts +++ b/src/logger/browser/WarnLogger.ts @@ -1,7 +1,6 @@ import { Logger } from '../index'; import { codesWarn } from '../messages/warn'; -import { _Map } from '../../utils/lang/maps'; export function WarnLogger() { - return new Logger({ logLevel: 'WARN' }, new _Map(codesWarn)); + return new Logger({ logLevel: 'WARN' }, new Map(codesWarn)); } diff --git a/src/logger/index.ts b/src/logger/index.ts index 136b2e58..f343ba75 100644 --- a/src/logger/index.ts +++ b/src/logger/index.ts @@ -2,7 +2,6 @@ import { objectAssign } from '../utils/lang/objectAssign'; import { ILoggerOptions, ILogger } from './types'; import { find, isObject } from '../utils/lang'; import { LogLevel } from '../types'; -import { IMap, _Map } from '../utils/lang/maps'; export const LogLevels: { [level: string]: LogLevel } = { DEBUG: 'DEBUG', @@ -47,12 +46,12 @@ const defaultOptions = { export class Logger implements ILogger { private options: Required; - private codes: IMap; + private codes: Map; private logLevel: number; - constructor(options?: ILoggerOptions, codes?: IMap) { + constructor(options?: ILoggerOptions, codes?: Map) { this.options = objectAssign({}, defaultOptions, options); - this.codes = codes || new _Map(); + this.codes = codes || new Map(); this.logLevel = LogLevelIndexes[this.options.logLevel]; } diff --git a/src/services/decorateHeaders.ts b/src/services/decorateHeaders.ts index 5764ffab..4a95219f 100644 --- a/src/services/decorateHeaders.ts +++ b/src/services/decorateHeaders.ts @@ -1,8 +1,7 @@ import { objectAssign } from '../utils/lang/objectAssign'; -import { _Set } from '../utils/lang/sets'; import { ISettings } from '../types'; -const FORBIDDEN_HEADERS = new _Set([ +const FORBIDDEN_HEADERS = new Set([ 'splitsdkclientkey', 'splitsdkversion', 'splitsdkmachineip', diff --git a/src/storages/AbstractSplitsCacheAsync.ts b/src/storages/AbstractSplitsCacheAsync.ts index 9e4e136c..8374c8ae 100644 --- a/src/storages/AbstractSplitsCacheAsync.ts +++ b/src/storages/AbstractSplitsCacheAsync.ts @@ -1,7 +1,6 @@ import { ISplitsCacheAsync } from './types'; import { ISplit } from '../dtos/types'; import { objectAssign } from '../utils/lang/objectAssign'; -import { ISet } from '../utils/lang/sets'; /** * This class provides a skeletal implementation of the ISplitsCacheAsync interface @@ -18,7 +17,7 @@ export abstract class AbstractSplitsCacheAsync implements ISplitsCacheAsync { abstract getChangeNumber(): Promise abstract getAll(): Promise abstract getSplitNames(): Promise - abstract getNamesByFlagSets(flagSets: string[]): Promise[]> + abstract getNamesByFlagSets(flagSets: string[]): Promise[]> abstract trafficTypeExists(trafficType: string): Promise abstract clear(): Promise diff --git a/src/storages/AbstractSplitsCacheSync.ts b/src/storages/AbstractSplitsCacheSync.ts index ef44db40..92df46d5 100644 --- a/src/storages/AbstractSplitsCacheSync.ts +++ b/src/storages/AbstractSplitsCacheSync.ts @@ -1,7 +1,6 @@ import { ISplitsCacheSync } from './types'; import { ISplit } from '../dtos/types'; import { objectAssign } from '../utils/lang/objectAssign'; -import { ISet } from '../utils/lang/sets'; import { IN_SEGMENT, IN_LARGE_SEGMENT } from '../utils/constants'; /** @@ -80,7 +79,7 @@ export abstract class AbstractSplitsCacheSync implements ISplitsCacheSync { return false; } - abstract getNamesByFlagSets(flagSets: string[]): ISet[] + abstract getNamesByFlagSets(flagSets: string[]): Set[] } diff --git a/src/storages/inLocalStorage/SplitsCacheInLocal.ts b/src/storages/inLocalStorage/SplitsCacheInLocal.ts index ccd4859f..2990a094 100644 --- a/src/storages/inLocalStorage/SplitsCacheInLocal.ts +++ b/src/storages/inLocalStorage/SplitsCacheInLocal.ts @@ -4,7 +4,6 @@ import { isFiniteNumber, toNumber, isNaNNumber } from '../../utils/lang'; import { KeyBuilderCS } from '../KeyBuilderCS'; import { ILogger } from '../../logger/types'; import { LOG_PREFIX } from './constants'; -import { ISet, _Set, setToArray } from '../../utils/lang/sets'; import { ISettings } from '../../types'; import { getStorageHash } from '../KeyBuilder'; @@ -259,12 +258,12 @@ export class SplitsCacheInLocal extends AbstractSplitsCacheSync { // if the filter didn't change, nothing is done } - getNamesByFlagSets(flagSets: string[]): ISet[] { + getNamesByFlagSets(flagSets: string[]): Set[] { return flagSets.map(flagSet => { const flagSetKey = this.keys.buildFlagSetKey(flagSet); const flagSetFromLocalStorage = localStorage.getItem(flagSetKey); - return new _Set(flagSetFromLocalStorage ? JSON.parse(flagSetFromLocalStorage) : []); + return new Set(flagSetFromLocalStorage ? JSON.parse(flagSetFromLocalStorage) : []); }); } @@ -279,10 +278,10 @@ export class SplitsCacheInLocal extends AbstractSplitsCacheSync { const flagSetFromLocalStorage = localStorage.getItem(flagSetKey); - const flagSetCache = new _Set(flagSetFromLocalStorage ? JSON.parse(flagSetFromLocalStorage) : []); + const flagSetCache = new Set(flagSetFromLocalStorage ? JSON.parse(flagSetFromLocalStorage) : []); flagSetCache.add(featureFlag.name); - localStorage.setItem(flagSetKey, JSON.stringify(setToArray(flagSetCache))); + localStorage.setItem(flagSetKey, JSON.stringify(Array.from(flagSetCache))); }); } @@ -301,7 +300,7 @@ export class SplitsCacheInLocal extends AbstractSplitsCacheSync { if (!flagSetFromLocalStorage) return; - const flagSetCache = new _Set(JSON.parse(flagSetFromLocalStorage)); + const flagSetCache = new Set(JSON.parse(flagSetFromLocalStorage)); flagSetCache.delete(featureFlagName); if (flagSetCache.size === 0) { @@ -309,7 +308,7 @@ export class SplitsCacheInLocal extends AbstractSplitsCacheSync { return; } - localStorage.setItem(flagSetKey, JSON.stringify(setToArray(flagSetCache))); + localStorage.setItem(flagSetKey, JSON.stringify(Array.from(flagSetCache))); } } diff --git a/src/storages/inLocalStorage/__tests__/SplitsCacheInLocal.spec.ts b/src/storages/inLocalStorage/__tests__/SplitsCacheInLocal.spec.ts index 732ca8b7..4d8ec076 100644 --- a/src/storages/inLocalStorage/__tests__/SplitsCacheInLocal.spec.ts +++ b/src/storages/inLocalStorage/__tests__/SplitsCacheInLocal.spec.ts @@ -2,7 +2,6 @@ import { SplitsCacheInLocal } from '../SplitsCacheInLocal'; import { KeyBuilderCS } from '../../KeyBuilderCS'; import { splitWithUserTT, splitWithAccountTT, splitWithAccountTTAndUsesSegments, something, somethingElse, featureFlagOne, featureFlagTwo, featureFlagThree, featureFlagWithEmptyFS, featureFlagWithoutFS } from '../../__tests__/testUtils'; import { ISplit } from '../../../dtos/types'; -import { _Set } from '../../../utils/lang/sets'; import { fullSettings } from '../../../utils/settingsValidation/__tests__/settings.mocks'; @@ -174,7 +173,7 @@ test('SPLIT CACHE / LocalStorage / flag set cache tests', () => { } } }, new KeyBuilderCS('SPLITIO', 'user')); - const emptySet = new _Set([]); + const emptySet = new Set([]); cache.addSplits([ [featureFlagOne.name, featureFlagOne], @@ -183,21 +182,21 @@ test('SPLIT CACHE / LocalStorage / flag set cache tests', () => { ]); cache.addSplit(featureFlagWithEmptyFS.name, featureFlagWithEmptyFS); - expect(cache.getNamesByFlagSets(['o'])).toEqual([new _Set(['ff_one', 'ff_two'])]); - expect(cache.getNamesByFlagSets(['n'])).toEqual([new _Set(['ff_one'])]); - expect(cache.getNamesByFlagSets(['e'])).toEqual([new _Set(['ff_one', 'ff_three'])]); + expect(cache.getNamesByFlagSets(['o'])).toEqual([new Set(['ff_one', 'ff_two'])]); + expect(cache.getNamesByFlagSets(['n'])).toEqual([new Set(['ff_one'])]); + expect(cache.getNamesByFlagSets(['e'])).toEqual([new Set(['ff_one', 'ff_three'])]); expect(cache.getNamesByFlagSets(['t'])).toEqual([emptySet]); // 't' not in filter - expect(cache.getNamesByFlagSets(['o', 'n', 'e'])).toEqual([new _Set(['ff_one', 'ff_two']), new _Set(['ff_one']), new _Set(['ff_one', 'ff_three'])]); + expect(cache.getNamesByFlagSets(['o', 'n', 'e'])).toEqual([new Set(['ff_one', 'ff_two']), new Set(['ff_one']), new Set(['ff_one', 'ff_three'])]); cache.addSplit(featureFlagOne.name, { ...featureFlagOne, sets: ['1'] }); expect(cache.getNamesByFlagSets(['1'])).toEqual([emptySet]); // '1' not in filter - expect(cache.getNamesByFlagSets(['o'])).toEqual([new _Set(['ff_two'])]); + expect(cache.getNamesByFlagSets(['o'])).toEqual([new Set(['ff_two'])]); expect(cache.getNamesByFlagSets(['n'])).toEqual([emptySet]); cache.addSplit(featureFlagOne.name, { ...featureFlagOne, sets: ['x'] }); - expect(cache.getNamesByFlagSets(['x'])).toEqual([new _Set(['ff_one'])]); - expect(cache.getNamesByFlagSets(['o', 'e', 'x'])).toEqual([new _Set(['ff_two']), new _Set(['ff_three']), new _Set(['ff_one'])]); + expect(cache.getNamesByFlagSets(['x'])).toEqual([new Set(['ff_one'])]); + expect(cache.getNamesByFlagSets(['o', 'e', 'x'])).toEqual([new Set(['ff_two']), new Set(['ff_three']), new Set(['ff_one'])]); cache.removeSplit(featureFlagOne.name); @@ -214,7 +213,7 @@ test('SPLIT CACHE / LocalStorage / flag set cache tests', () => { // if FlagSets are not defined, it should store all FlagSets in memory. test('SPLIT CACHE / LocalStorage / flag set cache tests without filters', () => { const cacheWithoutFilters = new SplitsCacheInLocal(fullSettings, new KeyBuilderCS('SPLITIO', 'user')); - const emptySet = new _Set([]); + const emptySet = new Set([]); cacheWithoutFilters.addSplits([ [featureFlagOne.name, featureFlagOne], @@ -223,12 +222,12 @@ test('SPLIT CACHE / LocalStorage / flag set cache tests without filters', () => ]); cacheWithoutFilters.addSplit(featureFlagWithEmptyFS.name, featureFlagWithEmptyFS); - expect(cacheWithoutFilters.getNamesByFlagSets(['o'])).toEqual([new _Set(['ff_one', 'ff_two'])]); - expect(cacheWithoutFilters.getNamesByFlagSets(['n'])).toEqual([new _Set(['ff_one'])]); - expect(cacheWithoutFilters.getNamesByFlagSets(['e'])).toEqual([new _Set(['ff_one', 'ff_three'])]); - expect(cacheWithoutFilters.getNamesByFlagSets(['t'])).toEqual([new _Set(['ff_two', 'ff_three'])]); + expect(cacheWithoutFilters.getNamesByFlagSets(['o'])).toEqual([new Set(['ff_one', 'ff_two'])]); + expect(cacheWithoutFilters.getNamesByFlagSets(['n'])).toEqual([new Set(['ff_one'])]); + expect(cacheWithoutFilters.getNamesByFlagSets(['e'])).toEqual([new Set(['ff_one', 'ff_three'])]); + expect(cacheWithoutFilters.getNamesByFlagSets(['t'])).toEqual([new Set(['ff_two', 'ff_three'])]); expect(cacheWithoutFilters.getNamesByFlagSets(['y'])).toEqual([emptySet]); - expect(cacheWithoutFilters.getNamesByFlagSets(['o', 'n', 'e'])).toEqual([new _Set(['ff_one', 'ff_two']), new _Set(['ff_one']), new _Set(['ff_one', 'ff_three'])]); + expect(cacheWithoutFilters.getNamesByFlagSets(['o', 'n', 'e'])).toEqual([new Set(['ff_one', 'ff_two']), new Set(['ff_one']), new Set(['ff_one', 'ff_three'])]); // Validate that the feature flag cache is cleared when calling `clear` method cacheWithoutFilters.clear(); diff --git a/src/storages/inMemory/SegmentsCacheInMemory.ts b/src/storages/inMemory/SegmentsCacheInMemory.ts index a7d52b7c..f3b2cef5 100644 --- a/src/storages/inMemory/SegmentsCacheInMemory.ts +++ b/src/storages/inMemory/SegmentsCacheInMemory.ts @@ -1,5 +1,4 @@ import { AbstractSegmentsCacheSync } from '../AbstractSegmentsCacheSync'; -import { ISet, _Set } from '../../utils/lang/sets'; import { isIntegerNumber } from '../../utils/lang'; /** @@ -8,12 +7,12 @@ import { isIntegerNumber } from '../../utils/lang'; */ export class SegmentsCacheInMemory extends AbstractSegmentsCacheSync { - private segmentCache: Record> = {}; + private segmentCache: Record> = {}; private segmentChangeNumber: Record = {}; addToSegment(name: string, segmentKeys: string[]): boolean { const values = this.segmentCache[name]; - const keySet = values ? values : new _Set(); + const keySet = values ? values : new Set(); segmentKeys.forEach(k => keySet.add(k)); @@ -24,7 +23,7 @@ export class SegmentsCacheInMemory extends AbstractSegmentsCacheSync { removeFromSegment(name: string, segmentKeys: string[]): boolean { const values = this.segmentCache[name]; - const keySet = values ? values : new _Set(); + const keySet = values ? values : new Set(); segmentKeys.forEach(k => keySet.delete(k)); @@ -50,7 +49,7 @@ export class SegmentsCacheInMemory extends AbstractSegmentsCacheSync { private _registerSegment(name: string) { if (!this.segmentCache[name]) { - this.segmentCache[name] = new _Set(); + this.segmentCache[name] = new Set(); } return true; diff --git a/src/storages/inMemory/SplitsCacheInMemory.ts b/src/storages/inMemory/SplitsCacheInMemory.ts index 9294cc43..53da8def 100644 --- a/src/storages/inMemory/SplitsCacheInMemory.ts +++ b/src/storages/inMemory/SplitsCacheInMemory.ts @@ -1,7 +1,6 @@ import { ISplit, ISplitFiltersValidation } from '../../dtos/types'; import { AbstractSplitsCacheSync, usesSegments } from '../AbstractSplitsCacheSync'; import { isFiniteNumber } from '../../utils/lang'; -import { ISet, _Set } from '../../utils/lang/sets'; /** * Default ISplitsCacheSync implementation that stores split definitions in memory. @@ -14,7 +13,7 @@ export class SplitsCacheInMemory extends AbstractSplitsCacheSync { private ttCache: Record = {}; private changeNumber: number = -1; private segmentsCount: number = 0; - private flagSetsCache: Record> = {}; + private flagSetsCache: Record> = {}; constructor(splitFiltersValidation?: ISplitFiltersValidation) { super(); @@ -104,8 +103,8 @@ export class SplitsCacheInMemory extends AbstractSplitsCacheSync { return this.getChangeNumber() === -1 || this.segmentsCount > 0; } - getNamesByFlagSets(flagSets: string[]): ISet[] { - return flagSets.map(flagSet => this.flagSetsCache[flagSet] || new _Set()); + getNamesByFlagSets(flagSets: string[]): Set[] { + return flagSets.map(flagSet => this.flagSetsCache[flagSet] || new Set()); } private addToFlagSets(featureFlag: ISplit) { @@ -114,7 +113,7 @@ export class SplitsCacheInMemory extends AbstractSplitsCacheSync { if (this.flagSetsFilter.length > 0 && !this.flagSetsFilter.some(filterFlagSet => filterFlagSet === featureFlagSet)) return; - if (!this.flagSetsCache[featureFlagSet]) this.flagSetsCache[featureFlagSet] = new _Set([]); + if (!this.flagSetsCache[featureFlagSet]) this.flagSetsCache[featureFlagSet] = new Set([]); this.flagSetsCache[featureFlagSet].add(featureFlag.name); }); diff --git a/src/storages/inMemory/UniqueKeysCacheInMemory.ts b/src/storages/inMemory/UniqueKeysCacheInMemory.ts index e176aa0a..ecb468da 100644 --- a/src/storages/inMemory/UniqueKeysCacheInMemory.ts +++ b/src/storages/inMemory/UniqueKeysCacheInMemory.ts @@ -1,17 +1,16 @@ import { IUniqueKeysCacheBase } from '../types'; -import { ISet, setToArray, _Set } from '../../utils/lang/sets'; import { UniqueKeysPayloadSs } from '../../sync/submitters/types'; import { DEFAULT_CACHE_SIZE } from '../inRedis/constants'; /** * Converts `uniqueKeys` data from cache into request payload for SS. */ -export function fromUniqueKeysCollector(uniqueKeys: { [featureName: string]: ISet }): UniqueKeysPayloadSs { +export function fromUniqueKeysCollector(uniqueKeys: { [featureName: string]: Set }): UniqueKeysPayloadSs { const payload = []; const featureNames = Object.keys(uniqueKeys); for (let i = 0; i < featureNames.length; i++) { const featureName = featureNames[i]; - const userKeys = setToArray(uniqueKeys[featureName]); + const userKeys = Array.from(uniqueKeys[featureName]); const uniqueKeysPayload = { f: featureName, ks: userKeys @@ -27,7 +26,7 @@ export class UniqueKeysCacheInMemory implements IUniqueKeysCacheBase { protected onFullQueue?: () => void; private readonly maxStorage: number; private uniqueTrackerSize = 0; - protected uniqueKeysTracker: { [featureName: string]: ISet } = {}; + protected uniqueKeysTracker: { [featureName: string]: Set } = {}; constructor(uniqueKeysQueueSize = DEFAULT_CACHE_SIZE) { this.maxStorage = uniqueKeysQueueSize; @@ -41,7 +40,7 @@ export class UniqueKeysCacheInMemory implements IUniqueKeysCacheBase { * Store unique keys per feature. */ track(userKey: string, featureName: string) { - if (!this.uniqueKeysTracker[featureName]) this.uniqueKeysTracker[featureName] = new _Set(); + if (!this.uniqueKeysTracker[featureName]) this.uniqueKeysTracker[featureName] = new Set(); const tracker = this.uniqueKeysTracker[featureName]; if (!tracker.has(userKey)) { tracker.add(userKey); diff --git a/src/storages/inMemory/UniqueKeysCacheInMemoryCS.ts b/src/storages/inMemory/UniqueKeysCacheInMemoryCS.ts index 66f54d0c..54b946e0 100644 --- a/src/storages/inMemory/UniqueKeysCacheInMemoryCS.ts +++ b/src/storages/inMemory/UniqueKeysCacheInMemoryCS.ts @@ -1,5 +1,4 @@ import { IUniqueKeysCacheBase } from '../types'; -import { ISet, setToArray, _Set } from '../../utils/lang/sets'; import { UniqueKeysPayloadCs } from '../../sync/submitters/types'; import { DEFAULT_CACHE_SIZE } from '../inRedis/constants'; @@ -8,7 +7,7 @@ export class UniqueKeysCacheInMemoryCS implements IUniqueKeysCacheBase { private onFullQueue?: () => void; private readonly maxStorage: number; private uniqueTrackerSize = 0; - private uniqueKeysTracker: { [userKey: string]: ISet } = {}; + private uniqueKeysTracker: { [userKey: string]: Set } = {}; /** * @@ -28,7 +27,7 @@ export class UniqueKeysCacheInMemoryCS implements IUniqueKeysCacheBase { */ track(userKey: string, featureName: string) { - if (!this.uniqueKeysTracker[userKey]) this.uniqueKeysTracker[userKey] = new _Set(); + if (!this.uniqueKeysTracker[userKey]) this.uniqueKeysTracker[userKey] = new Set(); const tracker = this.uniqueKeysTracker[userKey]; if (!tracker.has(featureName)) { tracker.add(featureName); @@ -66,12 +65,12 @@ export class UniqueKeysCacheInMemoryCS implements IUniqueKeysCacheBase { /** * Converts `uniqueKeys` data from cache into request payload. */ - private fromUniqueKeysCollector(uniqueKeys: { [userKey: string]: ISet }): UniqueKeysPayloadCs { + private fromUniqueKeysCollector(uniqueKeys: { [userKey: string]: Set }): UniqueKeysPayloadCs { const payload = []; const userKeys = Object.keys(uniqueKeys); for (let k = 0; k < userKeys.length; k++) { const userKey = userKeys[k]; - const featureNames = setToArray(uniqueKeys[userKey]); + const featureNames = Array.from(uniqueKeys[userKey]); const uniqueKeysPayload = { k: userKey, fs: featureNames diff --git a/src/storages/inMemory/__tests__/SplitsCacheInMemory.spec.ts b/src/storages/inMemory/__tests__/SplitsCacheInMemory.spec.ts index 14fa62fd..62812586 100644 --- a/src/storages/inMemory/__tests__/SplitsCacheInMemory.spec.ts +++ b/src/storages/inMemory/__tests__/SplitsCacheInMemory.spec.ts @@ -1,7 +1,6 @@ import { SplitsCacheInMemory } from '../SplitsCacheInMemory'; import { ISplit } from '../../../dtos/types'; import { splitWithUserTT, splitWithAccountTT, something, somethingElse, featureFlagWithEmptyFS, featureFlagWithoutFS, featureFlagOne, featureFlagTwo, featureFlagThree } from '../../__tests__/testUtils'; -import { _Set } from '../../../utils/lang/sets'; test('SPLITS CACHE / In Memory', () => { const cache = new SplitsCacheInMemory(); @@ -118,7 +117,7 @@ test('SPLITS CACHE / In Memory / killLocally', () => { test('SPLITS CACHE / In Memory / flag set cache tests', () => { // @ts-ignore const cache = new SplitsCacheInMemory({ groupedFilters: { bySet: ['o', 'n', 'e', 'x'] } }); - const emptySet = new _Set([]); + const emptySet = new Set([]); cache.addSplits([ [featureFlagOne.name, featureFlagOne], @@ -127,21 +126,21 @@ test('SPLITS CACHE / In Memory / flag set cache tests', () => { ]); cache.addSplit(featureFlagWithEmptyFS.name, featureFlagWithEmptyFS); - expect(cache.getNamesByFlagSets(['o'])).toEqual([new _Set(['ff_one', 'ff_two'])]); - expect(cache.getNamesByFlagSets(['n'])).toEqual([new _Set(['ff_one'])]); - expect(cache.getNamesByFlagSets(['e'])).toEqual([new _Set(['ff_one', 'ff_three'])]); + expect(cache.getNamesByFlagSets(['o'])).toEqual([new Set(['ff_one', 'ff_two'])]); + expect(cache.getNamesByFlagSets(['n'])).toEqual([new Set(['ff_one'])]); + expect(cache.getNamesByFlagSets(['e'])).toEqual([new Set(['ff_one', 'ff_three'])]); expect(cache.getNamesByFlagSets(['t'])).toEqual([emptySet]); // 't' not in filter - expect(cache.getNamesByFlagSets(['o', 'n', 'e'])).toEqual([new _Set(['ff_one', 'ff_two']), new _Set(['ff_one']), new _Set(['ff_one', 'ff_three'])]); + expect(cache.getNamesByFlagSets(['o', 'n', 'e'])).toEqual([new Set(['ff_one', 'ff_two']), new Set(['ff_one']), new Set(['ff_one', 'ff_three'])]); cache.addSplit(featureFlagOne.name, { ...featureFlagOne, sets: ['1'] }); expect(cache.getNamesByFlagSets(['1'])).toEqual([emptySet]); // '1' not in filter - expect(cache.getNamesByFlagSets(['o'])).toEqual([new _Set(['ff_two'])]); + expect(cache.getNamesByFlagSets(['o'])).toEqual([new Set(['ff_two'])]); expect(cache.getNamesByFlagSets(['n'])).toEqual([emptySet]); cache.addSplit(featureFlagOne.name, { ...featureFlagOne, sets: ['x'] }); - expect(cache.getNamesByFlagSets(['x'])).toEqual([new _Set(['ff_one'])]); - expect(cache.getNamesByFlagSets(['o', 'e', 'x'])).toEqual([new _Set(['ff_two']), new _Set(['ff_three']), new _Set(['ff_one'])]); + expect(cache.getNamesByFlagSets(['x'])).toEqual([new Set(['ff_one'])]); + expect(cache.getNamesByFlagSets(['o', 'e', 'x'])).toEqual([new Set(['ff_two']), new Set(['ff_three']), new Set(['ff_one'])]); cache.removeSplit(featureFlagOne.name); @@ -158,7 +157,7 @@ test('SPLITS CACHE / In Memory / flag set cache tests', () => { // if FlagSets are not defined, it should store all FlagSets in memory. test('SPLIT CACHE / LocalStorage / flag set cache tests without filters', () => { const cacheWithoutFilters = new SplitsCacheInMemory(); - const emptySet = new _Set([]); + const emptySet = new Set([]); cacheWithoutFilters.addSplits([ [featureFlagOne.name, featureFlagOne], @@ -167,10 +166,10 @@ test('SPLIT CACHE / LocalStorage / flag set cache tests without filters', () => ]); cacheWithoutFilters.addSplit(featureFlagWithEmptyFS.name, featureFlagWithEmptyFS); - expect(cacheWithoutFilters.getNamesByFlagSets(['o'])).toEqual([new _Set(['ff_one', 'ff_two'])]); - expect(cacheWithoutFilters.getNamesByFlagSets(['n'])).toEqual([new _Set(['ff_one'])]); - expect(cacheWithoutFilters.getNamesByFlagSets(['e'])).toEqual([new _Set(['ff_one', 'ff_three'])]); - expect(cacheWithoutFilters.getNamesByFlagSets(['t'])).toEqual([new _Set(['ff_two', 'ff_three'])]); + expect(cacheWithoutFilters.getNamesByFlagSets(['o'])).toEqual([new Set(['ff_one', 'ff_two'])]); + expect(cacheWithoutFilters.getNamesByFlagSets(['n'])).toEqual([new Set(['ff_one'])]); + expect(cacheWithoutFilters.getNamesByFlagSets(['e'])).toEqual([new Set(['ff_one', 'ff_three'])]); + expect(cacheWithoutFilters.getNamesByFlagSets(['t'])).toEqual([new Set(['ff_two', 'ff_three'])]); expect(cacheWithoutFilters.getNamesByFlagSets(['y'])).toEqual([emptySet]); - expect(cacheWithoutFilters.getNamesByFlagSets(['o', 'n', 'e'])).toEqual([new _Set(['ff_one', 'ff_two']), new _Set(['ff_one']), new _Set(['ff_one', 'ff_three'])]); + expect(cacheWithoutFilters.getNamesByFlagSets(['o', 'n', 'e'])).toEqual([new Set(['ff_one', 'ff_two']), new Set(['ff_one']), new Set(['ff_one', 'ff_three'])]); }); diff --git a/src/storages/inRedis/RedisAdapter.ts b/src/storages/inRedis/RedisAdapter.ts index 6d738606..049f9e78 100644 --- a/src/storages/inRedis/RedisAdapter.ts +++ b/src/storages/inRedis/RedisAdapter.ts @@ -1,7 +1,6 @@ import ioredis, { Pipeline } from 'ioredis'; import { ILogger } from '../../logger/types'; import { merge, isString } from '../../utils/lang'; -import { _Set, setToArray, ISet } from '../../utils/lang/sets'; import { thenable } from '../../utils/promise/thenable'; import { timeout } from '../../utils/promise/timeout'; @@ -37,7 +36,7 @@ export class RedisAdapter extends ioredis { private readonly log: ILogger; private _options: object; private _notReadyCommandsQueue?: IRedisCommand[]; - private _runningCommands: ISet>; + private _runningCommands: Set>; constructor(log: ILogger, storageSettings: Record = {}) { const options = RedisAdapter._defineOptions(storageSettings); @@ -47,7 +46,7 @@ export class RedisAdapter extends ioredis { this.log = log; this._options = options; this._notReadyCommandsQueue = []; - this._runningCommands = new _Set(); + this._runningCommands = new Set(); this._listenToEvents(); this._setTimeoutWrappers(); this._setDisconnectWrapper(); @@ -150,7 +149,7 @@ export class RedisAdapter extends ioredis { if (instance._runningCommands.size > 0) { instance.log.info(LOG_PREFIX + `Attempting to disconnect but there are ${instance._runningCommands.size} commands still waiting for resolution. Defering disconnection until those finish.`); - Promise.all(setToArray(instance._runningCommands)) + Promise.all(Array.from(instance._runningCommands)) .then(() => { instance.log.debug(LOG_PREFIX + 'Pending commands finished successfully, disconnecting.'); originalMethod.apply(instance, params); diff --git a/src/storages/inRedis/SplitsCacheInRedis.ts b/src/storages/inRedis/SplitsCacheInRedis.ts index 8822647e..428efb94 100644 --- a/src/storages/inRedis/SplitsCacheInRedis.ts +++ b/src/storages/inRedis/SplitsCacheInRedis.ts @@ -4,7 +4,7 @@ import { ILogger } from '../../logger/types'; import { LOG_PREFIX } from './constants'; import { ISplit, ISplitFiltersValidation } from '../../dtos/types'; import { AbstractSplitsCacheAsync } from '../AbstractSplitsCacheAsync'; -import { ISet, _Set, returnDifference } from '../../utils/lang/sets'; +import { returnDifference } from '../../utils/lang/sets'; import type { RedisAdapter } from './RedisAdapter'; /** @@ -215,14 +215,14 @@ export class SplitsCacheInRedis extends AbstractSplitsCacheAsync { * The returned promise is resolved with the list of feature flag names per flag set, * or rejected if the pipelined redis operation fails (e.g., timeout). */ - getNamesByFlagSets(flagSets: string[]): Promise[]> { + getNamesByFlagSets(flagSets: string[]): Promise[]> { return this.redis.pipeline(flagSets.map(flagSet => ['smembers', this.keys.buildFlagSetKey(flagSet)])).exec() .then((results) => results.map(([e, value], index) => { if (e === null) return value; this.log.error(LOG_PREFIX + `Could not read result from get members of flag set ${flagSets[index]} due to an error: ${e}`); })) - .then(namesByFlagSets => namesByFlagSets.map(namesByFlagSet => new _Set(namesByFlagSet))); + .then(namesByFlagSets => namesByFlagSets.map(namesByFlagSet => new Set(namesByFlagSet))); } /** diff --git a/src/storages/inRedis/TelemetryCacheInRedis.ts b/src/storages/inRedis/TelemetryCacheInRedis.ts index 78108c3d..9cb44711 100644 --- a/src/storages/inRedis/TelemetryCacheInRedis.ts +++ b/src/storages/inRedis/TelemetryCacheInRedis.ts @@ -6,7 +6,6 @@ import { findLatencyIndex } from '../findLatencyIndex'; import { getTelemetryConfigStats } from '../../sync/submitters/telemetrySubmitter'; import { CONSUMER_MODE, STORAGE_REDIS } from '../../utils/constants'; import { isNaNNumber, isString } from '../../utils/lang'; -import { _Map } from '../../utils/lang/maps'; import { MAX_LATENCY_BUCKET_COUNT, newBuckets } from '../inMemory/TelemetryCacheInMemory'; import { parseLatencyField, parseExceptionField, parseMetadata } from '../utils'; import type { RedisAdapter } from './RedisAdapter'; @@ -46,7 +45,7 @@ export class TelemetryCacheInRedis implements ITelemetryCacheAsync { popLatencies(): Promise { return this.redis.hgetall(this.keys.latencyPrefix).then(latencies => { - const result: MultiMethodLatencies = new _Map(); + const result: MultiMethodLatencies = new Map(); Object.keys(latencies).forEach(field => { @@ -86,7 +85,7 @@ export class TelemetryCacheInRedis implements ITelemetryCacheAsync { popExceptions(): Promise { return this.redis.hgetall(this.keys.exceptionPrefix).then(exceptions => { - const result: MultiMethodExceptions = new _Map(); + const result: MultiMethodExceptions = new Map(); Object.keys(exceptions).forEach(field => { @@ -119,7 +118,7 @@ export class TelemetryCacheInRedis implements ITelemetryCacheAsync { popConfigs(): Promise { return this.redis.hgetall(this.keys.initPrefix).then(configs => { - const result: MultiConfigs = new _Map(); + const result: MultiConfigs = new Map(); Object.keys(configs).forEach(field => { diff --git a/src/storages/inRedis/UniqueKeysCacheInRedis.ts b/src/storages/inRedis/UniqueKeysCacheInRedis.ts index 6abdb88a..50651314 100644 --- a/src/storages/inRedis/UniqueKeysCacheInRedis.ts +++ b/src/storages/inRedis/UniqueKeysCacheInRedis.ts @@ -1,6 +1,5 @@ import { IUniqueKeysCacheBase } from '../types'; import { UniqueKeysCacheInMemory } from '../inMemory/UniqueKeysCacheInMemory'; -import { setToArray } from '../../utils/lang/sets'; import { DEFAULT_CACHE_SIZE, REFRESH_RATE, TTL_REFRESH } from './constants'; import { LOG_PREFIX } from './constants'; import { ILogger } from '../../logger/types'; @@ -29,7 +28,7 @@ export class UniqueKeysCacheInRedis extends UniqueKeysCacheInMemory implements I if (!featureNames.length) return Promise.resolve(false); const uniqueKeysArray = featureNames.map((featureName) => { - const featureKeys = setToArray(this.uniqueKeysTracker[featureName]); + const featureKeys = Array.from(this.uniqueKeysTracker[featureName]); const uniqueKeysPayload = { f: featureName, ks: featureKeys diff --git a/src/storages/inRedis/__tests__/RedisAdapter.spec.ts b/src/storages/inRedis/__tests__/RedisAdapter.spec.ts index a8ef69da..6668803c 100644 --- a/src/storages/inRedis/__tests__/RedisAdapter.spec.ts +++ b/src/storages/inRedis/__tests__/RedisAdapter.spec.ts @@ -2,7 +2,6 @@ import forEach from 'lodash/forEach'; import merge from 'lodash/merge'; import reduce from 'lodash/reduce'; -import { _Set, setToArray } from '../../../utils/lang/sets'; // Mocking sdkLogger import { loggerMock } from '../../../logger/__tests__/sdkLogger.mock'; @@ -95,7 +94,7 @@ describe('STORAGE Redis Adapter', () => { expect(typeof instance._options === 'object').toBe(true); // The instance will have an options object. expect(Array.isArray(instance._notReadyCommandsQueue)).toBe(true); // The instance will have an array as the _notReadyCommandsQueue property. - expect(instance._runningCommands instanceof _Set).toBe(true); // The instance will have a set as the _runningCommands property. + expect(instance._runningCommands instanceof Set).toBe(true); // The instance will have a set as the _runningCommands property. }); test('ioredis constructor params and static method _defineLibrarySettings', () => { @@ -374,7 +373,7 @@ describe('STORAGE Redis Adapter', () => { setTimeout(() => { // queued with rejection timeout wrapper expect(loggerMock.info.mock.calls).toEqual([[LOG_PREFIX + 'Attempting to disconnect but there are 2 commands still waiting for resolution. Defering disconnection until those finish.']]); - Promise.all(setToArray(instance._runningCommands)).catch(e => { + Promise.all(Array.from(instance._runningCommands)).catch(e => { setImmediate(() => { // Allow the callback to execute before checking. expect(loggerMock.warn.mock.calls[0]).toEqual([`${LOG_PREFIX}Pending commands finished with error: ${e}. Proceeding with disconnection.`]); // Should warn about the error but tell user that will disconnect anyways. expect(ioredisMock.disconnect).toBeCalledTimes(1); // Original method should have been called once, asynchronously @@ -394,7 +393,7 @@ describe('STORAGE Redis Adapter', () => { setTimeout(() => { expect(loggerMock.info.mock.calls).toEqual([[LOG_PREFIX + 'Attempting to disconnect but there are 4 commands still waiting for resolution. Defering disconnection until those finish.']]); - Promise.all(setToArray(instance._runningCommands)).then(() => { // This one will go through success path + Promise.all(Array.from(instance._runningCommands)).then(() => { // This one will go through success path setImmediate(() => { expect(loggerMock.debug.mock.calls).toEqual([[LOG_PREFIX + 'Pending commands finished successfully, disconnecting.']]); expect(ioredisMock.disconnect).toBeCalledTimes(1); // Original method should have been called once, asynchronously diff --git a/src/storages/inRedis/__tests__/SplitsCacheInRedis.spec.ts b/src/storages/inRedis/__tests__/SplitsCacheInRedis.spec.ts index d10db711..3f577254 100644 --- a/src/storages/inRedis/__tests__/SplitsCacheInRedis.spec.ts +++ b/src/storages/inRedis/__tests__/SplitsCacheInRedis.spec.ts @@ -4,7 +4,6 @@ import { loggerMock } from '../../../logger/__tests__/sdkLogger.mock'; import { splitWithUserTT, splitWithAccountTT, featureFlagOne, featureFlagThree, featureFlagTwo, featureFlagWithEmptyFS, featureFlagWithoutFS } from '../../__tests__/testUtils'; import { ISplit } from '../../../dtos/types'; import { metadata } from '../../__tests__/KeyBuilder.spec'; -import { _Set } from '../../../utils/lang/sets'; import { RedisAdapter } from '../RedisAdapter'; const prefix = 'splits_cache_ut'; @@ -150,7 +149,7 @@ describe('SPLITS CACHE REDIS', () => { const connection = new RedisAdapter(loggerMock); // @ts-ignore const cache = new SplitsCacheInRedis(loggerMock, keysBuilder, connection, { groupedFilters: { bySet: ['o', 'n', 'e', 'x'] } }); - const emptySet = new _Set([]); + const emptySet = new Set([]); await cache.addSplits([ [featureFlagOne.name, featureFlagOne], @@ -159,27 +158,27 @@ describe('SPLITS CACHE REDIS', () => { ]); await cache.addSplit(featureFlagWithEmptyFS.name, featureFlagWithEmptyFS); - expect(await cache.getNamesByFlagSets(['o'])).toEqual([new _Set(['ff_one', 'ff_two'])]); - expect(await cache.getNamesByFlagSets(['n'])).toEqual([new _Set(['ff_one'])]); - expect(await cache.getNamesByFlagSets(['e'])).toEqual([new _Set(['ff_one', 'ff_three'])]); + expect(await cache.getNamesByFlagSets(['o'])).toEqual([new Set(['ff_one', 'ff_two'])]); + expect(await cache.getNamesByFlagSets(['n'])).toEqual([new Set(['ff_one'])]); + expect(await cache.getNamesByFlagSets(['e'])).toEqual([new Set(['ff_one', 'ff_three'])]); expect(await cache.getNamesByFlagSets(['t'])).toEqual([emptySet]); // 't' not in filter - expect(await cache.getNamesByFlagSets(['o', 'n', 'e'])).toEqual([new _Set(['ff_one', 'ff_two']), new _Set(['ff_one']), new _Set(['ff_one', 'ff_three'])]); + expect(await cache.getNamesByFlagSets(['o', 'n', 'e'])).toEqual([new Set(['ff_one', 'ff_two']), new Set(['ff_one']), new Set(['ff_one', 'ff_three'])]); await cache.addSplit(featureFlagOne.name, { ...featureFlagOne, sets: ['1'] }); expect(await cache.getNamesByFlagSets(['1'])).toEqual([emptySet]); // '1' not in filter - expect(await cache.getNamesByFlagSets(['o'])).toEqual([new _Set(['ff_two'])]); + expect(await cache.getNamesByFlagSets(['o'])).toEqual([new Set(['ff_two'])]); expect(await cache.getNamesByFlagSets(['n'])).toEqual([emptySet]); await cache.addSplit(featureFlagOne.name, { ...featureFlagOne, sets: ['x'] }); - expect(await cache.getNamesByFlagSets(['x'])).toEqual([new _Set(['ff_one'])]); - expect(await cache.getNamesByFlagSets(['o', 'e', 'x'])).toEqual([new _Set(['ff_two']), new _Set(['ff_three']), new _Set(['ff_one'])]); + expect(await cache.getNamesByFlagSets(['x'])).toEqual([new Set(['ff_one'])]); + expect(await cache.getNamesByFlagSets(['o', 'e', 'x'])).toEqual([new Set(['ff_two']), new Set(['ff_three']), new Set(['ff_one'])]); // @ts-ignore Simulate an error in connection.pipeline().exec() jest.spyOn(connection, 'pipeline').mockImplementationOnce(() => { return { exec: () => Promise.resolve([['error', null], [null, ['ff_three']], [null, ['ff_one']]]) }; }); - expect(await cache.getNamesByFlagSets(['o', 'e', 'x'])).toEqual([emptySet, new _Set(['ff_three']), new _Set(['ff_one'])]); + expect(await cache.getNamesByFlagSets(['o', 'e', 'x'])).toEqual([emptySet, new Set(['ff_three']), new Set(['ff_one'])]); (connection.pipeline as jest.Mock).mockRestore(); await cache.removeSplit(featureFlagOne.name); @@ -203,7 +202,7 @@ describe('SPLITS CACHE REDIS', () => { const connection = new RedisAdapter(loggerMock); const cacheWithoutFilters = new SplitsCacheInRedis(loggerMock, keysBuilder, connection); - const emptySet = new _Set([]); + const emptySet = new Set([]); await cacheWithoutFilters.addSplits([ [featureFlagOne.name, featureFlagOne], @@ -212,12 +211,12 @@ describe('SPLITS CACHE REDIS', () => { ]); await cacheWithoutFilters.addSplit(featureFlagWithEmptyFS.name, featureFlagWithEmptyFS); - expect(await cacheWithoutFilters.getNamesByFlagSets(['o'])).toEqual([new _Set(['ff_one', 'ff_two'])]); - expect(await cacheWithoutFilters.getNamesByFlagSets(['n'])).toEqual([new _Set(['ff_one'])]); - expect(await cacheWithoutFilters.getNamesByFlagSets(['e'])).toEqual([new _Set(['ff_one', 'ff_three'])]); - expect(await cacheWithoutFilters.getNamesByFlagSets(['t'])).toEqual([new _Set(['ff_two', 'ff_three'])]); + expect(await cacheWithoutFilters.getNamesByFlagSets(['o'])).toEqual([new Set(['ff_one', 'ff_two'])]); + expect(await cacheWithoutFilters.getNamesByFlagSets(['n'])).toEqual([new Set(['ff_one'])]); + expect(await cacheWithoutFilters.getNamesByFlagSets(['e'])).toEqual([new Set(['ff_one', 'ff_three'])]); + expect(await cacheWithoutFilters.getNamesByFlagSets(['t'])).toEqual([new Set(['ff_two', 'ff_three'])]); expect(await cacheWithoutFilters.getNamesByFlagSets(['y'])).toEqual([emptySet]); - expect(await cacheWithoutFilters.getNamesByFlagSets(['o', 'n', 'e'])).toEqual([new _Set(['ff_one', 'ff_two']), new _Set(['ff_one']), new _Set(['ff_one', 'ff_three'])]); + expect(await cacheWithoutFilters.getNamesByFlagSets(['o', 'n', 'e'])).toEqual([new Set(['ff_one', 'ff_two']), new Set(['ff_one']), new Set(['ff_one', 'ff_three'])]); // Delete splits, TT and flag set keys await cacheWithoutFilters.removeSplits([featureFlagThree.name, featureFlagTwo.name, featureFlagOne.name, featureFlagWithEmptyFS.name]); diff --git a/src/storages/pluggable/SegmentsCachePluggable.ts b/src/storages/pluggable/SegmentsCachePluggable.ts index 995c66df..0ec44588 100644 --- a/src/storages/pluggable/SegmentsCachePluggable.ts +++ b/src/storages/pluggable/SegmentsCachePluggable.ts @@ -5,7 +5,6 @@ import { KeyBuilderSS } from '../KeyBuilderSS'; import { IPluggableStorageWrapper, ISegmentsCacheAsync } from '../types'; import { ILogger } from '../../logger/types'; import { LOG_PREFIX } from './constants'; -import { _Set } from '../../utils/lang/sets'; /** * ISegmentsCacheAsync implementation for pluggable storages. diff --git a/src/storages/pluggable/SplitsCachePluggable.ts b/src/storages/pluggable/SplitsCachePluggable.ts index d35299f6..c2bc17fa 100644 --- a/src/storages/pluggable/SplitsCachePluggable.ts +++ b/src/storages/pluggable/SplitsCachePluggable.ts @@ -5,7 +5,7 @@ import { ILogger } from '../../logger/types'; import { ISplit, ISplitFiltersValidation } from '../../dtos/types'; import { LOG_PREFIX } from './constants'; import { AbstractSplitsCacheAsync } from '../AbstractSplitsCacheAsync'; -import { ISet, _Set, returnDifference } from '../../utils/lang/sets'; +import { returnDifference } from '../../utils/lang/sets'; /** * ISplitsCacheAsync implementation for pluggable storages. @@ -181,11 +181,11 @@ export class SplitsCachePluggable extends AbstractSplitsCacheAsync { * The returned promise is resolved with the list of feature flag names per flag set. * It never rejects (If there is a wrapper error for some flag set, an empty set is returned for it). */ - getNamesByFlagSets(flagSets: string[]): Promise[]> { + getNamesByFlagSets(flagSets: string[]): Promise[]> { return Promise.all(flagSets.map(flagSet => { const flagSetKey = this.keys.buildFlagSetKey(flagSet); return this.wrapper.getItems(flagSetKey).catch(() => []); - })).then(namesByFlagSets => namesByFlagSets.map(namesByFlagSet => new _Set(namesByFlagSet))); + })).then(namesByFlagSets => namesByFlagSets.map(namesByFlagSet => new Set(namesByFlagSet))); } /** diff --git a/src/storages/pluggable/TelemetryCachePluggable.ts b/src/storages/pluggable/TelemetryCachePluggable.ts index 5f459f10..995fc6b0 100644 --- a/src/storages/pluggable/TelemetryCachePluggable.ts +++ b/src/storages/pluggable/TelemetryCachePluggable.ts @@ -6,7 +6,6 @@ import { findLatencyIndex } from '../findLatencyIndex'; import { getTelemetryConfigStats } from '../../sync/submitters/telemetrySubmitter'; import { CONSUMER_MODE, STORAGE_PLUGGABLE } from '../../utils/constants'; import { isString, isNaNNumber } from '../../utils/lang'; -import { _Map } from '../../utils/lang/maps'; import { MAX_LATENCY_BUCKET_COUNT, newBuckets } from '../inMemory/TelemetryCacheInMemory'; import { parseLatencyField, parseExceptionField, parseMetadata } from '../utils'; @@ -43,7 +42,7 @@ export class TelemetryCachePluggable implements ITelemetryCacheAsync { return latencyKeys.length ? this.wrapper.getMany(latencyKeys).then(latencies => { - const result: MultiMethodLatencies = new _Map(); + const result: MultiMethodLatencies = new Map(); for (let i = 0; i < latencyKeys.length; i++) { const field = latencyKeys[i].split('::')[1]; @@ -77,7 +76,7 @@ export class TelemetryCachePluggable implements ITelemetryCacheAsync { return Promise.all(latencyKeys.map((latencyKey) => this.wrapper.del(latencyKey))).then(() => result); }) : // If latencyKeys is empty, return an empty map. - new _Map(); + new Map(); }); } @@ -90,7 +89,7 @@ export class TelemetryCachePluggable implements ITelemetryCacheAsync { return exceptionKeys.length ? this.wrapper.getMany(exceptionKeys).then(exceptions => { - const result: MultiMethodExceptions = new _Map(); + const result: MultiMethodExceptions = new Map(); for (let i = 0; i < exceptionKeys.length; i++) { const field = exceptionKeys[i].split('::')[1]; @@ -117,7 +116,7 @@ export class TelemetryCachePluggable implements ITelemetryCacheAsync { return Promise.all(exceptionKeys.map((exceptionKey) => this.wrapper.del(exceptionKey))).then(() => result); }) : // If exceptionKeys is empty, return an empty map. - new _Map(); + new Map(); }); } @@ -130,7 +129,7 @@ export class TelemetryCachePluggable implements ITelemetryCacheAsync { return configKeys.length ? this.wrapper.getMany(configKeys).then(configs => { - const result: MultiConfigs = new _Map(); + const result: MultiConfigs = new Map(); for (let i = 0; i < configKeys.length; i++) { const field = configKeys[i].split('::')[1]; @@ -154,7 +153,7 @@ export class TelemetryCachePluggable implements ITelemetryCacheAsync { return Promise.all(configKeys.map((configKey) => this.wrapper.del(configKey))).then(() => result); }) : // If configKeys is empty, return an empty map. - new _Map(); + new Map(); }); } } diff --git a/src/storages/pluggable/UniqueKeysCachePluggable.ts b/src/storages/pluggable/UniqueKeysCachePluggable.ts index d430682e..ae46171d 100644 --- a/src/storages/pluggable/UniqueKeysCachePluggable.ts +++ b/src/storages/pluggable/UniqueKeysCachePluggable.ts @@ -1,6 +1,5 @@ import { IPluggableStorageWrapper, IUniqueKeysCacheBase } from '../types'; import { UniqueKeysCacheInMemory } from '../inMemory/UniqueKeysCacheInMemory'; -import { setToArray } from '../../utils/lang/sets'; import { DEFAULT_CACHE_SIZE, REFRESH_RATE } from '../inRedis/constants'; import { LOG_PREFIX } from './constants'; import { ILogger } from '../../logger/types'; @@ -28,7 +27,7 @@ export class UniqueKeysCachePluggable extends UniqueKeysCacheInMemory implements if (!featureNames.length) return Promise.resolve(false); const uniqueKeysArray = featureNames.map((featureName) => { - const featureKeys = setToArray(this.uniqueKeysTracker[featureName]); + const featureKeys = Array.from(this.uniqueKeysTracker[featureName]); const uniqueKeysPayload = { f: featureName, ks: featureKeys diff --git a/src/storages/pluggable/__tests__/SplitsCachePluggable.spec.ts b/src/storages/pluggable/__tests__/SplitsCachePluggable.spec.ts index ea8aa73e..57fc34b3 100644 --- a/src/storages/pluggable/__tests__/SplitsCachePluggable.spec.ts +++ b/src/storages/pluggable/__tests__/SplitsCachePluggable.spec.ts @@ -4,7 +4,6 @@ import { loggerMock } from '../../../logger/__tests__/sdkLogger.mock'; import { wrapperMockFactory } from './wrapper.mock'; import { splitWithUserTT, splitWithAccountTT, featureFlagOne, featureFlagThree, featureFlagTwo, featureFlagWithEmptyFS, featureFlagWithoutFS } from '../../__tests__/testUtils'; import { ISplit } from '../../../dtos/types'; -import { _Set } from '../../../utils/lang/sets'; const keysBuilder = new KeyBuilder(); @@ -154,7 +153,7 @@ describe('SPLITS CACHE PLUGGABLE', () => { test('flag set cache tests', async () => { const wrapper = wrapperMockFactory(); // @ts-ignore const cache = new SplitsCachePluggable(loggerMock, keysBuilder, wrapper, { groupedFilters: { bySet: ['o', 'n', 'e', 'x'] } }); - const emptySet = new _Set([]); + const emptySet = new Set([]); await cache.addSplits([ [featureFlagOne.name, featureFlagOne], @@ -163,25 +162,25 @@ describe('SPLITS CACHE PLUGGABLE', () => { ]); await cache.addSplit(featureFlagWithEmptyFS.name, featureFlagWithEmptyFS); - expect(await cache.getNamesByFlagSets(['o'])).toEqual([new _Set(['ff_one', 'ff_two'])]); - expect(await cache.getNamesByFlagSets(['n'])).toEqual([new _Set(['ff_one'])]); - expect(await cache.getNamesByFlagSets(['e'])).toEqual([new _Set(['ff_one', 'ff_three'])]); + expect(await cache.getNamesByFlagSets(['o'])).toEqual([new Set(['ff_one', 'ff_two'])]); + expect(await cache.getNamesByFlagSets(['n'])).toEqual([new Set(['ff_one'])]); + expect(await cache.getNamesByFlagSets(['e'])).toEqual([new Set(['ff_one', 'ff_three'])]); expect(await cache.getNamesByFlagSets(['t'])).toEqual([emptySet]); // 't' not in filter - expect(await cache.getNamesByFlagSets(['o', 'n', 'e'])).toEqual([new _Set(['ff_one', 'ff_two']), new _Set(['ff_one']), new _Set(['ff_one', 'ff_three'])]); + expect(await cache.getNamesByFlagSets(['o', 'n', 'e'])).toEqual([new Set(['ff_one', 'ff_two']), new Set(['ff_one']), new Set(['ff_one', 'ff_three'])]); await cache.addSplit(featureFlagOne.name, { ...featureFlagOne, sets: ['1'] }); expect(await cache.getNamesByFlagSets(['1'])).toEqual([emptySet]); // '1' not in filter - expect(await cache.getNamesByFlagSets(['o'])).toEqual([new _Set(['ff_two'])]); + expect(await cache.getNamesByFlagSets(['o'])).toEqual([new Set(['ff_two'])]); expect(await cache.getNamesByFlagSets(['n'])).toEqual([emptySet]); await cache.addSplit(featureFlagOne.name, { ...featureFlagOne, sets: ['x'] }); - expect(await cache.getNamesByFlagSets(['x'])).toEqual([new _Set(['ff_one'])]); - expect(await cache.getNamesByFlagSets(['o', 'e', 'x'])).toEqual([new _Set(['ff_two']), new _Set(['ff_three']), new _Set(['ff_one'])]); + expect(await cache.getNamesByFlagSets(['x'])).toEqual([new Set(['ff_one'])]); + expect(await cache.getNamesByFlagSets(['o', 'e', 'x'])).toEqual([new Set(['ff_two']), new Set(['ff_three']), new Set(['ff_one'])]); // Simulate one error in getItems wrapper.getItems.mockImplementationOnce(() => Promise.reject('error')); - expect(await cache.getNamesByFlagSets(['o', 'e', 'x'])).toEqual([emptySet, new _Set(['ff_three']), new _Set(['ff_one'])]); + expect(await cache.getNamesByFlagSets(['o', 'e', 'x'])).toEqual([emptySet, new Set(['ff_three']), new Set(['ff_one'])]); await cache.removeSplit(featureFlagOne.name); expect(await cache.getNamesByFlagSets(['x'])).toEqual([emptySet]); @@ -197,7 +196,7 @@ describe('SPLITS CACHE PLUGGABLE', () => { // if FlagSets filter is not defined, it should store all FlagSets in memory. test('flag set cache tests without filters', async () => { const cacheWithoutFilters = new SplitsCachePluggable(loggerMock, keysBuilder, wrapperMockFactory()); - const emptySet = new _Set([]); + const emptySet = new Set([]); await cacheWithoutFilters.addSplits([ [featureFlagOne.name, featureFlagOne], @@ -206,12 +205,12 @@ describe('SPLITS CACHE PLUGGABLE', () => { ]); await cacheWithoutFilters.addSplit(featureFlagWithEmptyFS.name, featureFlagWithEmptyFS); - expect(await cacheWithoutFilters.getNamesByFlagSets(['o'])).toEqual([new _Set(['ff_one', 'ff_two'])]); - expect(await cacheWithoutFilters.getNamesByFlagSets(['n'])).toEqual([new _Set(['ff_one'])]); - expect(await cacheWithoutFilters.getNamesByFlagSets(['e'])).toEqual([new _Set(['ff_one', 'ff_three'])]); - expect(await cacheWithoutFilters.getNamesByFlagSets(['t'])).toEqual([new _Set(['ff_two', 'ff_three'])]); + expect(await cacheWithoutFilters.getNamesByFlagSets(['o'])).toEqual([new Set(['ff_one', 'ff_two'])]); + expect(await cacheWithoutFilters.getNamesByFlagSets(['n'])).toEqual([new Set(['ff_one'])]); + expect(await cacheWithoutFilters.getNamesByFlagSets(['e'])).toEqual([new Set(['ff_one', 'ff_three'])]); + expect(await cacheWithoutFilters.getNamesByFlagSets(['t'])).toEqual([new Set(['ff_two', 'ff_three'])]); expect(await cacheWithoutFilters.getNamesByFlagSets(['y'])).toEqual([emptySet]); - expect(await cacheWithoutFilters.getNamesByFlagSets(['o', 'n', 'e'])).toEqual([new _Set(['ff_one', 'ff_two']), new _Set(['ff_one']), new _Set(['ff_one', 'ff_three'])]); + expect(await cacheWithoutFilters.getNamesByFlagSets(['o', 'n', 'e'])).toEqual([new Set(['ff_one', 'ff_two']), new Set(['ff_one']), new Set(['ff_one', 'ff_three'])]); }); }); diff --git a/src/storages/pluggable/inMemoryWrapper.ts b/src/storages/pluggable/inMemoryWrapper.ts index 7d8a8837..afc00285 100644 --- a/src/storages/pluggable/inMemoryWrapper.ts +++ b/src/storages/pluggable/inMemoryWrapper.ts @@ -1,6 +1,5 @@ import { IPluggableStorageWrapper } from '../types'; import { startsWith, toNumber } from '../../utils/lang'; -import { ISet, setToArray, _Set } from '../../utils/lang/sets'; /** * Creates a IPluggableStorageWrapper implementation that stores items in memory. @@ -9,9 +8,9 @@ import { ISet, setToArray, _Set } from '../../utils/lang/sets'; * * @param connDelay delay in millis for `connect` resolve. If not provided, `connect` resolves immediately. */ -export function inMemoryWrapperFactory(connDelay?: number): IPluggableStorageWrapper & { _cache: Record>, _setConnDelay(connDelay: number): void } { +export function inMemoryWrapperFactory(connDelay?: number): IPluggableStorageWrapper & { _cache: Record>, _setConnDelay(connDelay: number): void } { - let _cache: Record> = {}; + let _cache: Record> = {}; let _connDelay = connDelay; return { @@ -84,22 +83,22 @@ export function inMemoryWrapperFactory(connDelay?: number): IPluggableStorageWra itemContains(key: string, item: string) { const set = _cache[key]; if (!set) return Promise.resolve(false); - if (set instanceof _Set) return Promise.resolve(set.has(item)); + if (set instanceof Set) return Promise.resolve(set.has(item)); return Promise.reject('key is not a set'); }, addItems(key: string, items: string[]) { - if (!(key in _cache)) _cache[key] = new _Set(); + if (!(key in _cache)) _cache[key] = new Set(); const set = _cache[key]; - if (set instanceof _Set) { + if (set instanceof Set) { items.forEach(item => set.add(item)); return Promise.resolve(); } return Promise.reject('key is not a set'); }, removeItems(key: string, items: string[]) { - if (!(key in _cache)) _cache[key] = new _Set(); + if (!(key in _cache)) _cache[key] = new Set(); const set = _cache[key]; - if (set instanceof _Set) { + if (set instanceof Set) { items.forEach(item => set.delete(item)); return Promise.resolve(); } @@ -108,7 +107,7 @@ export function inMemoryWrapperFactory(connDelay?: number): IPluggableStorageWra getItems(key: string) { const set = _cache[key]; if (!set) return Promise.resolve([]); - if (set instanceof _Set) return Promise.resolve(setToArray(set)); + if (set instanceof Set) return Promise.resolve(Array.from(set)); return Promise.reject('key is not a set'); }, diff --git a/src/storages/types.ts b/src/storages/types.ts index b3b1076c..a345ac7d 100644 --- a/src/storages/types.ts +++ b/src/storages/types.ts @@ -2,7 +2,6 @@ import { MaybeThenable, ISplit, IMySegmentsResponse } from '../dtos/types'; import { MySegmentsData } from '../sync/polling/types'; import { EventDataType, HttpErrors, HttpLatencies, ImpressionDataType, LastSync, Method, MethodExceptions, MethodLatencies, MultiMethodExceptions, MultiMethodLatencies, MultiConfigs, OperationType, StoredEventWithMetadata, StoredImpressionWithMetadata, StreamingEvent, UniqueKeysPayloadCs, UniqueKeysPayloadSs, TelemetryUsageStatsPayload, UpdatesFromSSEEnum } from '../sync/submitters/types'; import { SplitIO, ImpressionDTO, ISettings } from '../types'; -import { ISet } from '../utils/lang/sets'; /** * Interface of a pluggable storage wrapper. @@ -211,7 +210,7 @@ export interface ISplitsCacheBase { // should never reject or throw an exception. Instead return false by default, to avoid emitting SDK_READY_FROM_CACHE. checkCache(): MaybeThenable, killLocally(name: string, defaultTreatment: string, changeNumber: number): MaybeThenable, - getNamesByFlagSets(flagSets: string[]): MaybeThenable[]> + getNamesByFlagSets(flagSets: string[]): MaybeThenable[]> } export interface ISplitsCacheSync extends ISplitsCacheBase { @@ -228,7 +227,7 @@ export interface ISplitsCacheSync extends ISplitsCacheBase { clear(): void, checkCache(): boolean, killLocally(name: string, defaultTreatment: string, changeNumber: number): boolean, - getNamesByFlagSets(flagSets: string[]): ISet[] + getNamesByFlagSets(flagSets: string[]): Set[] } export interface ISplitsCacheAsync extends ISplitsCacheBase { @@ -245,7 +244,7 @@ export interface ISplitsCacheAsync extends ISplitsCacheBase { clear(): Promise, checkCache(): Promise, killLocally(name: string, defaultTreatment: string, changeNumber: number): Promise, - getNamesByFlagSets(flagSets: string[]): Promise[]> + getNamesByFlagSets(flagSets: string[]): Promise[]> } /** Segments cache */ diff --git a/src/sync/polling/updaters/splitChangesUpdater.ts b/src/sync/polling/updaters/splitChangesUpdater.ts index 669a2010..a125c8e2 100644 --- a/src/sync/polling/updaters/splitChangesUpdater.ts +++ b/src/sync/polling/updaters/splitChangesUpdater.ts @@ -1,4 +1,3 @@ -import { _Set, setToArray, ISet } from '../../../utils/lang/sets'; import { ISegmentsCacheBase, ISplitsCacheBase } from '../../../storages/types'; import { ISplitChangesFetcher } from '../fetchers/types'; import { ISplit, ISplitChangesResponse, ISplitFiltersValidation } from '../../../dtos/types'; @@ -27,8 +26,8 @@ function checkAllSegmentsExist(segments: ISegmentsCacheBase): Promise { * Collect segments from a raw split definition. * Exported for testing purposes. */ -export function parseSegments({ conditions }: ISplit): ISet { - let segments = new _Set(); +export function parseSegments({ conditions }: ISplit): Set { + let segments = new Set(); for (let i = 0; i < conditions.length; i++) { const matchers = conditions[i].matcherGroup.matchers; @@ -74,7 +73,7 @@ function matchFilters(featureFlag: ISplit, filters: ISplitFiltersValidation) { * Exported for testing purposes. */ export function computeSplitsMutation(entries: ISplit[], filters: ISplitFiltersValidation): ISplitMutations { - const segments = new _Set(); + const segments = new Set(); const computed = entries.reduce((accum, split) => { if (split.status === 'ACTIVE' && matchFilters(split, filters)) { accum.added.push([split.name, split]); @@ -89,7 +88,7 @@ export function computeSplitsMutation(entries: ISplit[], filters: ISplitFiltersV return accum; }, { added: [], removed: [], segments: [] } as ISplitMutations); - computed.segments = setToArray(segments); + computed.segments = Array.from(segments); return computed; } diff --git a/src/sync/streaming/__tests__/parseUtils.spec.ts b/src/sync/streaming/__tests__/parseUtils.spec.ts index a1501917..a12a0a4e 100644 --- a/src/sync/streaming/__tests__/parseUtils.spec.ts +++ b/src/sync/streaming/__tests__/parseUtils.spec.ts @@ -2,7 +2,6 @@ import { hash64 } from '../../../utils/murmur3/murmur3_64'; import { keylists, bitmaps, splitNotifications } from './dataMocks'; import { parseKeyList, parseBitmap, isInBitmap, parseFFUpdatePayload, getDelay } from '../parseUtils'; -import { _Set } from '../../../utils/lang/sets'; test('parseKeyList', () => { keylists.forEach(keylist => { @@ -10,8 +9,8 @@ test('parseKeyList', () => { expect(parseKeyList(keyListDataCompressed, compression)).toEqual(keyListData); // decompress KeyList - const added = new _Set(keyListData.a); - const removed = new _Set(keyListData.r); + const added = new Set(keyListData.a); + const removed = new Set(keyListData.r); addedUserKeys.forEach(userKey => { const hash = hash64(userKey); diff --git a/src/sync/streaming/pushManager.ts b/src/sync/streaming/pushManager.ts index 01eeeffc..d089a167 100644 --- a/src/sync/streaming/pushManager.ts +++ b/src/sync/streaming/pushManager.ts @@ -15,7 +15,6 @@ import { MEMBERSHIPS_MS_UPDATE, MEMBERSHIPS_LS_UPDATE, PUSH_NONRETRYABLE_ERROR, import { STREAMING_FALLBACK, STREAMING_REFRESH_TOKEN, STREAMING_CONNECTING, STREAMING_DISABLED, ERROR_STREAMING_AUTH, STREAMING_DISCONNECTING, STREAMING_RECONNECT, STREAMING_PARSING_MEMBERSHIPS_UPDATE, STREAMING_PARSING_SPLIT_UPDATE } from '../../logger/constants'; import { IMembershipMSUpdateData, IMembershipLSUpdateData, KeyList, UpdateStrategy } from './SSEHandler/types'; import { getDelay, isInBitmap, parseBitmap, parseFFUpdatePayload, parseKeyList } from './parseUtils'; -import { ISet, _Set } from '../../utils/lang/sets'; import { Hash64, hash64 } from '../../utils/murmur3/murmur3_64'; import { IAuthTokenPushEnabled } from './AuthClient/types'; import { TOKEN_REFRESH, AUTH_REJECTION } from '../../utils/constants'; @@ -254,11 +253,11 @@ export function pushManagerFactory( return; } case UpdateStrategy.KeyList: { - let keyList: KeyList, added: ISet, removed: ISet; + let keyList: KeyList, added: Set, removed: Set; try { keyList = parseKeyList(parsedData.d!, parsedData.c!); - added = new _Set(keyList.a); - removed = new _Set(keyList.r); + added = new Set(keyList.a); + removed = new Set(keyList.r); } catch (e) { log.warn(STREAMING_PARSING_MEMBERSHIPS_UPDATE, ['KeyList', e]); break; diff --git a/src/sync/submitters/types.ts b/src/sync/submitters/types.ts index 8aa61c2b..d1629c34 100644 --- a/src/sync/submitters/types.ts +++ b/src/sync/submitters/types.ts @@ -1,7 +1,6 @@ /* eslint-disable no-use-before-define */ import { IMetadata } from '../../dtos/types'; import { SplitIO } from '../../types'; -import { IMap } from '../../utils/lang/maps'; import { ISyncTask } from '../types'; export type ImpressionsPayload = { @@ -88,11 +87,11 @@ export type StoredEventWithMetadata = { e: SplitIO.EventData } -export type MultiMethodLatencies = IMap +export type MultiMethodLatencies = Map -export type MultiMethodExceptions = IMap +export type MultiMethodExceptions = Map -export type MultiConfigs = IMap +export type MultiConfigs = Map /** * Telemetry usage stats diff --git a/src/utils/LRUCache/index.ts b/src/utils/LRUCache/index.ts index edf1b59b..2d1a0ec5 100644 --- a/src/utils/LRUCache/index.ts +++ b/src/utils/LRUCache/index.ts @@ -1,14 +1,13 @@ -import { IMap, _Map } from '../lang/maps'; import { LinkedList, Node } from './LinkedList'; export class LRUCache { maxLen: number; - items: IMap>; + items: Map>; lru: LinkedList<{ key: K, value: V }>; constructor(maxSize?: number) { this.maxLen = maxSize || 1; - this.items = new _Map(); + this.items = new Map(); this.lru = new LinkedList(); } diff --git a/src/utils/lang/__tests__/maps.spec.ts b/src/utils/lang/__tests__/maps.spec.ts deleted file mode 100644 index 02312d50..00000000 --- a/src/utils/lang/__tests__/maps.spec.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { __getMapConstructor, MapPoly } from '../maps'; - -test('__getMapConstructor', () => { - - // should return global Map constructor if available - expect(__getMapConstructor()).toBe(global.Map); - - const originalMap = global.Map; // @ts-ignore - global.Map = undefined; // overwrite global Map - - // should return Map polyfill if global Map constructor is not available - expect(__getMapConstructor()).toBe(MapPoly); - - global.Map = originalMap; // restore original global Map - -}); diff --git a/src/utils/lang/__tests__/sets.spec.ts b/src/utils/lang/__tests__/sets.spec.ts index 1cb99853..4745e6b7 100644 --- a/src/utils/lang/__tests__/sets.spec.ts +++ b/src/utils/lang/__tests__/sets.spec.ts @@ -1,28 +1,13 @@ -import { __getSetConstructor, _Set, returnSetsUnion, SetPoly } from '../sets'; - -test('__getSetConstructor', () => { - - // should return global Set constructor if available - expect(__getSetConstructor()).toBe(global.Set); - - const originalSet = global.Set; // @ts-ignore - global.Set = undefined; // overwrite global Set - - // should return Set polyfill if global Set constructor is not available - expect(__getSetConstructor()).toBe(SetPoly); - - global.Set = originalSet; // restore original global Set - -}); +import { returnSetsUnion } from '../sets'; test('returnSetsUnion', () => { - const set = new _Set(['1','2','3']); - const set2 = new _Set(['4','5','6']); - expect(returnSetsUnion(set, set2)).toEqual(new _Set(['1','2','3','4','5','6'])); - expect(set).toEqual(new _Set(['1','2','3'])); - expect(set2).toEqual(new _Set(['4','5','6'])); + const set = new Set(['1', '2', '3']); + const set2 = new Set(['4', '5', '6']); + expect(returnSetsUnion(set, set2)).toEqual(new Set(['1', '2', '3', '4', '5', '6'])); + expect(set).toEqual(new Set(['1', '2', '3'])); + expect(set2).toEqual(new Set(['4', '5', '6'])); - const emptySet = new _Set([]); + const emptySet = new Set([]); expect(returnSetsUnion(emptySet, emptySet)).toEqual(emptySet); expect(returnSetsUnion(set, emptySet)).toEqual(set); expect(returnSetsUnion(emptySet, set2)).toEqual(set2); diff --git a/src/utils/lang/maps.ts b/src/utils/lang/maps.ts deleted file mode 100644 index 277712bd..00000000 --- a/src/utils/lang/maps.ts +++ /dev/null @@ -1,108 +0,0 @@ -/** - * Map implementation based on es6-map polyfill (https://github.com/medikoo/es6-map/blob/master/polyfill.js), - * with the minimal features used by the SDK. - -Copyright (C) 2013 Mariusz Nowak (www.medikoo.com) - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. -**/ - -export interface IMap { - clear(): void; - delete(key: K): boolean; - forEach(callbackfn: (value: V, key: K, map: Map) => void, thisArg?: any): void; - get(key: K): V | undefined; - has(key: K): boolean; - set(key: K, value: V): this; - readonly size: number; -} - -export class MapPoly implements IMap{ - private __mapKeysData__: K[] = []; - private __mapValuesData__: V[] = []; - - // unlike ES6 `Map`, it only accepts an array as first argument iterable - constructor(entries?: readonly (readonly [K, V])[] | null) { - if (Array.isArray(entries)) entries.forEach(entry => { this.set(entry[0], entry[1]); }); - } - - clear() { - if (!this.__mapKeysData__.length) return; - this.__mapKeysData__.length = 0; - this.__mapValuesData__.length = 0; - } - - delete(key: K) { - const index = this.__mapKeysData__.indexOf(key); - if (index === -1) return false; - this.__mapKeysData__.splice(index, 1); - this.__mapValuesData__.splice(index, 1); - return true; - } - - forEach(callbackfn: (value: V, key: K, map: Map) => void, thisArg?: any) { - for (let i = 0; i < this.__mapKeysData__.length; i++) { - callbackfn.call(thisArg, this.__mapValuesData__[i], this.__mapKeysData__[i], this as any); - } - } - - get(key: K) { - const index = this.__mapKeysData__.indexOf(key); - if (index === -1) return; - return this.__mapValuesData__[index]; - } - - has(key: K): boolean { - return this.__mapKeysData__.indexOf(key) !== -1; - } - - set(key: K, value: V) { - let index = this.__mapKeysData__.indexOf(key); - if (index === -1) index = this.__mapKeysData__.push(key) - 1; - this.__mapValuesData__[index] = value; - return this; - } - - get size() { - return this.__mapKeysData__.length; - } - -} - -interface IMapConstructor { - new(): IMap; - new (entries?: readonly (readonly [K, V])[] | null): IMap; - readonly prototype: IMap; -} - -/** - * return the Map constructor to use. If native Map is not available or it doesn't support the required features (e.g., IE11), - * a ponyfill with minimal features is returned instead. - * - * Exported for testing purposes only. - */ -export function __getMapConstructor(): IMapConstructor { - // eslint-disable-next-line compat/compat - if (typeof Array.from === 'function' && typeof Map === 'function' && Map.prototype && Map.prototype.values) { - return Map; - } - return MapPoly; -} - -export const _Map = __getMapConstructor(); diff --git a/src/utils/lang/sets.ts b/src/utils/lang/sets.ts index d8d63e7a..77c8caef 100644 --- a/src/utils/lang/sets.ts +++ b/src/utils/lang/sets.ts @@ -1,119 +1,5 @@ -/** - * Set implementation based on es6-set polyfill (https://github.com/medikoo/es6-set/blob/master/polyfill.js), - * with the minimal features used by the SDK. - -Copyright (C) 2013 Mariusz Nowak (www.medikoo.com) - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. -**/ - -export interface ISet { - add(value: T): this; - clear(): void; - delete(value: T): boolean; - forEach(callbackfn: (value: T, value2: T, set: ISet) => void, thisArg?: any): void; - has(value: T): boolean; - readonly size: number; -} - -export class SetPoly implements ISet{ - __setData__: T[] = []; - - // unlike ES6 `Set`, it only accepts an array as first argument iterable - constructor(values?: readonly T[] | null) { - if (Array.isArray(values)) values.forEach(value => { this.add(value); }); - } - - clear() { - if (!this.__setData__.length) return; - this.__setData__.length = 0; - } - - add(value: T) { - if (this.has(value)) return this; - this.__setData__.push(value); - return this; - } - - delete(value: T) { - let index = this.__setData__.indexOf(value); - if (index === -1) return false; - this.__setData__.splice(index, 1); - return true; - } - - has(value: T) { - return this.__setData__.indexOf(value) !== -1; - } - - forEach(callbackfn: (value: T, value2: T, set: SetPoly) => void, thisArg?: any): void { - if (typeof callbackfn !== 'function') throw new TypeError(callbackfn + ' is not a function'); - - for (let i = 0; i < this.__setData__.length; i++) { - const value = this.__setData__[i]; - callbackfn.call(thisArg, value, value, this); - } - } - - get size() { - return this.__setData__.length; - } - -} - - -/** - * return an array containing the items of the given set. - * @param set Set or SetPoly instance - */ -export function setToArray(set: ISet): T[] { - if (set instanceof SetPoly) { - return set.__setData__.slice(); - } - // if not using SetPoly as set, it means Array.from is supported - // eslint-disable-next-line compat/compat - return Array.from(set as Set); -} - -interface ISetConstructor { - new (values?: readonly T[] | null): ISet; - readonly prototype: ISet; -} - -/** - * return the Set constructor to use. If `Array.from` built-in or native Set is not available or it doesn't support the required features, - * a ponyfill with minimal features is returned instead. - * - * Exported for testing purposes only. - */ -export function __getSetConstructor(): ISetConstructor { - // eslint-disable-next-line compat/compat - if (typeof Array.from === 'function' && typeof Set === 'function' && Set.prototype && Set.prototype.values) { - return Set; - } - return SetPoly; -} - -export const _Set = __getSetConstructor(); - -export function returnSetsUnion(set: ISet, set2: ISet): ISet { - const result = new _Set(setToArray(set)); +export function returnSetsUnion(set: Set, set2: Set): Set { + const result = new Set(Array.from(set)); set2.forEach(value => { result.add(value); }); @@ -121,9 +7,9 @@ export function returnSetsUnion(set: ISet, set2: ISet): ISet { } export function returnDifference(list: T[] = [], list2: T[] = []): T[] { - const result = new _Set(list); + const result = new Set(list); list2.forEach(item => { result.delete(item); }); - return setToArray(result); + return Array.from(result); } diff --git a/src/utils/settingsValidation/logger/builtinLogger.ts b/src/utils/settingsValidation/logger/builtinLogger.ts index 5db9cfb0..abc4f56d 100644 --- a/src/utils/settingsValidation/logger/builtinLogger.ts +++ b/src/utils/settingsValidation/logger/builtinLogger.ts @@ -3,11 +3,10 @@ import { ILogger } from '../../../logger/types'; import { isLocalStorageAvailable } from '../../env/isLocalStorageAvailable'; import { isNode } from '../../env/isNode'; import { codesDebug } from '../../../logger/messages/debug'; -import { _Map } from '../../lang/maps'; import { getLogLevel } from './commons'; import { LogLevel } from '../../../types'; -const allCodes = new _Map(codesDebug); +const allCodes = new Map(codesDebug); // @TODO set default debug setting instead of initialLogLevel when integrating in JS and Node packages const LS_KEY = 'splitio_debug'; From 781e72a289186148d624b0963c0e2b41e393a879 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Mon, 7 Oct 2024 19:53:34 -0300 Subject: [PATCH 102/146] Simplify set utils and add CHANGELOG entry --- .eslintrc | 2 +- CHANGES.txt | 3 ++- src/utils/lang/__tests__/sets.spec.ts | 22 +++++++++++++++++----- src/utils/lang/sets.ts | 12 ++---------- 4 files changed, 22 insertions(+), 17 deletions(-) diff --git a/.eslintrc b/.eslintrc index 0442d7a9..c9af6d19 100644 --- a/.eslintrc +++ b/.eslintrc @@ -80,7 +80,7 @@ "message": "Don't declare const enum, because it is not supported by Babel used for building RN SDK" } ], - "compat/compat": ["error", "defaults, node 14"], + "compat/compat": ["error", "defaults, node >=14"], "no-throw-literal": "error", "import/no-default-export": "error", "import/no-self-import": "error" diff --git a/CHANGES.txt b/CHANGES.txt index 9ec1a0e7..02b24147 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -3,10 +3,11 @@ - Added `factory.destroy()` method, which invokes the `destroy` method on all SDK clients created by the factory. - Bugfixing - Fixed an issue with the server-side polling manager that caused dangling timers when the SDK was destroyed before it was ready. - BREAKING CHANGES: - - Removed the deprecated `GOOGLE_ANALYTICS_TO_SPLIT` and `SPLIT_TO_GOOGLE_ANALYTICS` integrations. - Updated default flag spec version to 1.2. - Removed `/mySegments` endpoint from SplitAPI module, as it is replaced by `/memberships` endpoint. - Removed support for MY_SEGMENTS_UPDATE and MY_SEGMENTS_UPDATE_V2 notification types, as they are replaced by MEMBERSHIPS_MS_UPDATE and MEMBERSHIPS_LS_UPDATE notification types. + - Removed the deprecated `GOOGLE_ANALYTICS_TO_SPLIT` and `SPLIT_TO_GOOGLE_ANALYTICS` integrations. + - Removed internal ponyfills for `Map`, `Set` and `Array.from` global objects, dropping support for IE and other outdated browsers. The SDK now requires the runtime environment to support these features natively or to provide a polyfill. 1.17.0 (September 6, 2024) - Added `sync.requestOptions.getHeaderOverrides` configuration option to enhance SDK HTTP request Headers for Authorization Frameworks. diff --git a/src/utils/lang/__tests__/sets.spec.ts b/src/utils/lang/__tests__/sets.spec.ts index 4745e6b7..8be359eb 100644 --- a/src/utils/lang/__tests__/sets.spec.ts +++ b/src/utils/lang/__tests__/sets.spec.ts @@ -1,14 +1,26 @@ -import { returnSetsUnion } from '../sets'; +import { returnSetsUnion, returnDifference } from '../sets'; test('returnSetsUnion', () => { - const set = new Set(['1', '2', '3']); - const set2 = new Set(['4', '5', '6']); + const set = new Set(['1', '2', '3', '4']); + const set2 = new Set(['4', '5', '6', '1']); expect(returnSetsUnion(set, set2)).toEqual(new Set(['1', '2', '3', '4', '5', '6'])); - expect(set).toEqual(new Set(['1', '2', '3'])); - expect(set2).toEqual(new Set(['4', '5', '6'])); + expect(set).toEqual(new Set(['1', '2', '3', '4'])); + expect(set2).toEqual(new Set(['4', '5', '6', '1'])); const emptySet = new Set([]); expect(returnSetsUnion(emptySet, emptySet)).toEqual(emptySet); expect(returnSetsUnion(set, emptySet)).toEqual(set); expect(returnSetsUnion(emptySet, set2)).toEqual(set2); }); + +test('returnDifference', () => { + const list = ['1', '2', '3']; + const list2 = ['2', '3', '4']; + expect(returnDifference(list, list2)).toEqual(['1']); + expect(list).toEqual(['1', '2', '3']); + expect(list2).toEqual(['2', '3', '4']); + + expect(returnDifference([], [])).toEqual([]); + expect(returnDifference(list, [])).toEqual(list); + expect(returnDifference([], list2)).toEqual([]); +}); diff --git a/src/utils/lang/sets.ts b/src/utils/lang/sets.ts index 77c8caef..155c4587 100644 --- a/src/utils/lang/sets.ts +++ b/src/utils/lang/sets.ts @@ -1,15 +1,7 @@ export function returnSetsUnion(set: Set, set2: Set): Set { - const result = new Set(Array.from(set)); - set2.forEach(value => { - result.add(value); - }); - return result; + return new Set(Array.from(set).concat(Array.from(set2))); } export function returnDifference(list: T[] = [], list2: T[] = []): T[] { - const result = new Set(list); - list2.forEach(item => { - result.delete(item); - }); - return Array.from(result); + return list.filter(item => list2.indexOf(item) === -1); } From e1a49f5b478d258d0106c9725f115c703013d220 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Tue, 8 Oct 2024 11:39:17 -0300 Subject: [PATCH 103/146] Removed eslint-disable-next-line compat/compat --- src/listeners/browser.ts | 3 +-- src/sync/streaming/parseUtils.ts | 1 - src/utils/lang/index.ts | 13 ++++++------- 3 files changed, 7 insertions(+), 10 deletions(-) diff --git a/src/listeners/browser.ts b/src/listeners/browser.ts index 409241cc..2320879f 100644 --- a/src/listeners/browser.ts +++ b/src/listeners/browser.ts @@ -115,7 +115,6 @@ export class BrowserSignalListener implements ISignalListener { * Returns true if beacon API was used successfully, false otherwise. */ private _sendBeacon(url: string, data: any, extraMetadata?: {}) { - // eslint-disable-next-line compat/compat if (typeof navigator !== 'undefined' && navigator.sendBeacon) { const json = { entries: data, @@ -130,7 +129,7 @@ export class BrowserSignalListener implements ISignalListener { const payload = JSON.stringify(json); // https://xgwang.me/posts/you-may-not-know-beacon/#it-may-throw-error%2C-be-sure-to-catch - try { // eslint-disable-next-line compat/compat + try { return navigator.sendBeacon(url, payload); } catch (e) { return false; diff --git a/src/sync/streaming/parseUtils.ts b/src/sync/streaming/parseUtils.ts index 925b0524..2f2a0c49 100644 --- a/src/sync/streaming/parseUtils.ts +++ b/src/sync/streaming/parseUtils.ts @@ -13,7 +13,6 @@ function Uint8ArrayToString(myUint8Arr: Uint8Array) { // @ts-ignore function StringToUint8Array(myString: string) { const charCodes = myString.split('').map((e) => e.charCodeAt(0)); - // eslint-disable-next-line compat/compat return new Uint8Array(charCodes); } diff --git a/src/utils/lang/index.ts b/src/utils/lang/index.ts index 0a828dda..11b6afd0 100644 --- a/src/utils/lang/index.ts +++ b/src/utils/lang/index.ts @@ -122,10 +122,9 @@ export function isBoolean(val: any): boolean { */ export function isFiniteNumber(val: any): boolean { if (val instanceof Number) val = val.valueOf(); - // @TODO remove `isFinite` once `Number.isFinite` is fully supported by targets - // eslint-disable-next-line compat/compat - if (typeof val === 'number') return Number.isFinite ? Number.isFinite(val) : isFinite(val); - return false; + return typeof val === 'number' ? + Number.isFinite ? Number.isFinite(val) : isFinite(val) : + false; } /** @@ -134,9 +133,9 @@ export function isFiniteNumber(val: any): boolean { */ export function isIntegerNumber(val: any): boolean { if (val instanceof Number) val = val.valueOf(); - // eslint-disable-next-line compat/compat - if (typeof val === 'number') return Number.isInteger ? Number.isInteger(val) : isFinite(val) && Math.floor(val) === val; - return false; + return typeof val === 'number' ? + Number.isInteger ? Number.isInteger(val) : isFinite(val) && Math.floor(val) === val : + false; } /** From 4c979f747a7d9fd4032d94912013bb4b47d52341 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Tue, 8 Oct 2024 14:14:14 -0300 Subject: [PATCH 104/146] Removed the migration logic for the old format of MySegments keys in LocalStorage introduced in JavaScript SDK v10.17.3 --- CHANGES.txt | 1 + src/storages/KeyBuilderCS.ts | 13 ------- .../inLocalStorage/MySegmentsCacheInLocal.ts | 22 +----------- .../__tests__/MySegmentsCacheInLocal.spec.ts | 34 ------------------- 4 files changed, 2 insertions(+), 68 deletions(-) diff --git a/CHANGES.txt b/CHANGES.txt index 02b24147..36d509db 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -8,6 +8,7 @@ - Removed support for MY_SEGMENTS_UPDATE and MY_SEGMENTS_UPDATE_V2 notification types, as they are replaced by MEMBERSHIPS_MS_UPDATE and MEMBERSHIPS_LS_UPDATE notification types. - Removed the deprecated `GOOGLE_ANALYTICS_TO_SPLIT` and `SPLIT_TO_GOOGLE_ANALYTICS` integrations. - Removed internal ponyfills for `Map`, `Set` and `Array.from` global objects, dropping support for IE and other outdated browsers. The SDK now requires the runtime environment to support these features natively or to provide a polyfill. + - Removed the migration logic for the old format of MySegments keys in LocalStorage introduced in JavaScript SDK v10.17.3. 1.17.0 (September 6, 2024) - Added `sync.requestOptions.getHeaderOverrides` configuration option to enhance SDK HTTP request Headers for Authorization Frameworks. diff --git a/src/storages/KeyBuilderCS.ts b/src/storages/KeyBuilderCS.ts index 0fe9c9bf..a59d7208 100644 --- a/src/storages/KeyBuilderCS.ts +++ b/src/storages/KeyBuilderCS.ts @@ -4,7 +4,6 @@ import { KeyBuilder } from './KeyBuilder'; export interface MySegmentsKeyBuilder { buildSegmentNameKey(segmentName: string): string; extractSegmentName(builtSegmentKeyName: string): string | undefined; - extractOldSegmentKey(builtSegmentKeyName: string): string | undefined; buildTillKey(): string; } @@ -33,14 +32,6 @@ export class KeyBuilderCS extends KeyBuilder implements MySegmentsKeyBuilder { return builtSegmentKeyName.substr(prefix.length); } - // @BREAKING: The key used to start with the matching key instead of the prefix, this was changed on version 10.17.3 - extractOldSegmentKey(builtSegmentKeyName: string) { - const prefix = `${this.matchingKey}.${this.prefix}.segment.`; - - if (startsWith(builtSegmentKeyName, prefix)) - return builtSegmentKeyName.substr(prefix.length); - } - buildLastUpdatedKey() { return `${this.prefix}.splits.lastUpdated`; } @@ -66,10 +57,6 @@ export function myLargeSegmentsKeyBuilder(prefix: string, matchingKey: string): if (startsWith(builtSegmentKeyName, p)) return builtSegmentKeyName.substr(p.length); }, - extractOldSegmentKey() { - return undefined; - }, - buildTillKey() { return `${prefix}.${matchingKey}.largeSegments.till`; } diff --git a/src/storages/inLocalStorage/MySegmentsCacheInLocal.ts b/src/storages/inLocalStorage/MySegmentsCacheInLocal.ts index 7e01a906..f7b065e4 100644 --- a/src/storages/inLocalStorage/MySegmentsCacheInLocal.ts +++ b/src/storages/inLocalStorage/MySegmentsCacheInLocal.ts @@ -51,27 +51,7 @@ export class MySegmentsCacheInLocal extends AbstractSegmentsCacheSync { return Object.keys(localStorage).reduce((accum, key) => { let segmentName = this.keys.extractSegmentName(key); - if (segmentName) { - accum.push(segmentName); - } else { - // @TODO @BREAKING: This is only to clean up "old" keys. Remove this whole else code block - segmentName = this.keys.extractOldSegmentKey(key); - - if (segmentName) { // this was an old segment key, let's clean up. - const newSegmentKey = this.keys.buildSegmentNameKey(segmentName); - try { - // If the new format key is not there, create it. - if (!localStorage.getItem(newSegmentKey)) { - localStorage.setItem(newSegmentKey, DEFINED); - // we are migrating a segment, let's track it. - accum.push(segmentName); - } - localStorage.removeItem(key); // we migrated the current key, let's delete it. - } catch (e) { - this.log.error(e); - } - } - } + if (segmentName) accum.push(segmentName); return accum; }, [] as string[]); diff --git a/src/storages/inLocalStorage/__tests__/MySegmentsCacheInLocal.spec.ts b/src/storages/inLocalStorage/__tests__/MySegmentsCacheInLocal.spec.ts index aac52cac..22cd6633 100644 --- a/src/storages/inLocalStorage/__tests__/MySegmentsCacheInLocal.spec.ts +++ b/src/storages/inLocalStorage/__tests__/MySegmentsCacheInLocal.spec.ts @@ -34,37 +34,3 @@ test('SEGMENT CACHE / in LocalStorage', () => { expect(localStorage.getItem('SPLITIO.user.largeSegment.mocked-segment-2')).toBe('1'); expect(localStorage.getItem('SPLITIO.user.largeSegment.mocked-segment')).toBe(null); }); - -// @BREAKING: REMOVE when removing this backwards compatibility. -test('SEGMENT CACHE / in LocalStorage migration for mysegments keys', () => { - - const keys = new KeyBuilderCS('LS_BC_test.SPLITIO', 'test_nico'); - const cache = new MySegmentsCacheInLocal(loggerMock, keys); - - const oldKey1 = 'test_nico.LS_BC_test.SPLITIO.segment.segment1'; - const oldKey2 = 'test_nico.LS_BC_test.SPLITIO.segment.segment2'; - const newKey1 = keys.buildSegmentNameKey('segment1'); - const newKey2 = keys.buildSegmentNameKey('segment2'); - - cache.clear(); // cleanup before starting. - - // Not adding a full suite for LS keys now, testing here - expect(oldKey1).toBe(`test_nico.${keys.prefix}.segment.segment1`); - expect('segment1').toBe(keys.extractOldSegmentKey(oldKey1)); - - // add two segments, one we don't want to send on reset, should only be cleared, other one will be migrated. - localStorage.setItem(oldKey1, '1'); - localStorage.setItem(oldKey2, '1'); - expect(localStorage.getItem(newKey1)).toBe(null); // control assertion - - cache.resetSegments({ k: [{ n: 'segment1' }] }); - - expect(localStorage.getItem(newKey1)).toBe('1'); // The segment key for segment1, as is part of the new list, should be migrated. - expect(localStorage.getItem(newKey2)).toBe(null); // The segment key for segment2 should not be migrated. - expect(localStorage.getItem(oldKey1)).toBe(null); // Old keys are removed. - expect(localStorage.getItem(oldKey2)).toBe(null); // Old keys are removed. - - cache.clear(); - expect(cache.getRegisteredSegments()).toEqual([]); - expect(cache.getChangeNumber()).toBe(-1); -}); From 66210e60a185c5345cb92e53bc65df0e03320891 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Tue, 8 Oct 2024 15:23:25 -0300 Subject: [PATCH 105/146] Removed the function, which handled the logic to bound an optional traffic type to SDK clients --- CHANGES.txt | 1 + .../__tests__/sdkClientMethodCS.spec.ts | 69 +++++-------- src/sdkClient/clientCS.ts | 13 +-- src/sdkClient/sdkClientMethodCS.ts | 2 +- src/sdkClient/sdkClientMethodCSWithTT.ts | 98 ------------------- src/sdkFactory/types.ts | 2 +- src/types.ts | 10 +- .../__tests__/index.spec.ts | 21 ++-- .../__tests__/settings.mocks.ts | 11 --- src/utils/settingsValidation/index.ts | 10 -- src/utils/settingsValidation/types.ts | 2 - 11 files changed, 38 insertions(+), 201 deletions(-) delete mode 100644 src/sdkClient/sdkClientMethodCSWithTT.ts diff --git a/CHANGES.txt b/CHANGES.txt index 36d509db..a78d749a 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -9,6 +9,7 @@ - Removed the deprecated `GOOGLE_ANALYTICS_TO_SPLIT` and `SPLIT_TO_GOOGLE_ANALYTICS` integrations. - Removed internal ponyfills for `Map`, `Set` and `Array.from` global objects, dropping support for IE and other outdated browsers. The SDK now requires the runtime environment to support these features natively or to provide a polyfill. - Removed the migration logic for the old format of MySegments keys in LocalStorage introduced in JavaScript SDK v10.17.3. + - Removed the `sdkClientMethodCSWithTTFactory` function, which handled the logic to bound an optional traffic type to SDK clients. Client-side SDK implementations must use `sdkClientMethodCSWithTT` which, unlike the previous function, does not allow passing a traffic type but simplifies the SDK API. 1.17.0 (September 6, 2024) - Added `sync.requestOptions.getHeaderOverrides` configuration option to enhance SDK HTTP request Headers for Authorization Frameworks. diff --git a/src/sdkClient/__tests__/sdkClientMethodCS.spec.ts b/src/sdkClient/__tests__/sdkClientMethodCS.spec.ts index 1abb2a93..ad8d689a 100644 --- a/src/sdkClient/__tests__/sdkClientMethodCS.spec.ts +++ b/src/sdkClient/__tests__/sdkClientMethodCS.spec.ts @@ -1,8 +1,7 @@ -import { sdkClientMethodCSFactory as sdkClientMethodCSWithTTFactory } from '../sdkClientMethodCSWithTT'; import { sdkClientMethodCSFactory } from '../sdkClientMethodCS'; import { assertClientApi } from './testUtils'; import { telemetryTrackerFactory } from '../../trackers/telemetryTracker'; -import { settingsWithKey, settingsWithKeyAndTT, settingsWithKeyObject } from '../../utils/settingsValidation/__tests__/settings.mocks'; +import { settingsWithKey, settingsWithKeyObject } from '../../utils/settingsValidation/__tests__/settings.mocks'; const partialStorages: { destroy: jest.Mock }[] = []; @@ -75,13 +74,7 @@ describe('sdkClientMethodCSFactory', () => { params.clients = {}; }); - // list of factory functions and their types (whether it ignores TT or not) - const testTargets = [ - [sdkClientMethodCSWithTTFactory, false], - [sdkClientMethodCSFactory, true] - ]; - - test.each(testTargets)('main client', (sdkClientMethodCSFactory) => { + test('main client', () => { // @ts-expect-error const sdkClientMethod = sdkClientMethodCSFactory(params); @@ -106,21 +99,20 @@ describe('sdkClientMethodCSFactory', () => { }); - test.each(testTargets)('multiple clients', async (sdkClientMethodCSFactory, ignoresTT) => { + test('multiple clients', async () => { // @ts-expect-error const sdkClientMethod = sdkClientMethodCSFactory(params); - // calling the function with a diferent key than settings, should return a new client instance + // calling the function with a different key than settings, should return a new client instance const newClients = new Set([ - sdkClientMethod('other-key'), // new client - sdkClientMethod('other-key', 'other-tt'), // new client - sdkClientMethod({ matchingKey: 'other-key', bucketingKey: 'buck' }) // new client + sdkClientMethod('other-key'), // @ts-expect-error + sdkClientMethod('other-key', 'ignored-tt'), + sdkClientMethod({ matchingKey: 'other-key', bucketingKey: 'buck' }) ]); - if (ignoresTT) expect(newClients.size).toBe(2); - else expect(newClients.size).toBe(3); + expect(newClients.size).toBe(2); - // each new client must follog the Client API + // each new client must follow the Client API newClients.forEach(newClient => { assertClientApi(newClient); expect(newClient).not.toBe(sdkClientMethod()); @@ -150,7 +142,7 @@ describe('sdkClientMethodCSFactory', () => { }); - test.each(testTargets)('return main client instance if called with same key', (sdkClientMethodCSFactory) => { + test('returns main client instance if called with same key', () => { params.settings = settingsWithKey; // @ts-expect-error @@ -163,20 +155,7 @@ describe('sdkClientMethodCSFactory', () => { expect(params.syncManager.shared).not.toBeCalled(); }); - test.each(testTargets)('return main client instance if called with same key and TT', (sdkClientMethodCSFactory) => { - - params.settings = settingsWithKeyAndTT; - // @ts-expect-error - const sdkClientMethod = sdkClientMethodCSFactory(params); - - expect(sdkClientMethod()).toBe(sdkClientMethod(settingsWithKeyAndTT.core.key, settingsWithKeyAndTT.core.trafficType)); - - expect(params.storage.shared).not.toBeCalled(); - expect(params.sdkReadinessManager.shared).not.toBeCalled(); - expect(params.syncManager.shared).not.toBeCalled(); - }); - - test.each(testTargets)('return main client instance if called with same key object', (sdkClientMethodCSFactory) => { + test('returns main client instance if called with same key object', () => { // @ts-expect-error params.settings = settingsWithKeyObject; // @ts-expect-error @@ -189,39 +168,37 @@ describe('sdkClientMethodCSFactory', () => { expect(params.syncManager.shared).not.toBeCalled(); }); - test.each(testTargets)('return same client instance if called with same key or traffic type (input validation)', (sdkClientMethodCSFactory, ignoresTT) => { + test('returns same client instance if called with same key (input validation)', () => { // @ts-expect-error const sdkClientMethod = sdkClientMethodCSFactory(params); - const clientInstance = sdkClientMethod('key', 'tt'); + const clientInstance = sdkClientMethod('key'); - expect(sdkClientMethod('key', 'tT')).toBe(clientInstance); // No new client created: TT is lowercased / ignored - expect(sdkClientMethod(' key ', 'tt')).toBe(clientInstance); // No new client created: key is trimmed - expect(sdkClientMethod({ matchingKey: 'key ', bucketingKey: ' key' }, 'TT')).toBe(clientInstance); // No new client created: key object is equivalent to 'key' string + expect(sdkClientMethod('key')).toBe(clientInstance); // No new client created: same key + expect(sdkClientMethod(' key ')).toBe(clientInstance); // No new client created: key is trimmed + expect(sdkClientMethod({ matchingKey: 'key ', bucketingKey: ' key' })).toBe(clientInstance); // No new client created: key object is equivalent to 'key' string expect(params.storage.shared).toBeCalledTimes(1); expect(params.sdkReadinessManager.shared).toBeCalledTimes(1); expect(params.syncManager.shared).toBeCalledTimes(1); - expect(sdkClientMethod('KEY', 'tt')).not.toBe(clientInstance); // New client created: key is case-sensitive - if (!ignoresTT) expect(sdkClientMethod('key', 'TT ')).not.toBe(clientInstance); // New client created: TT is not trimmed + expect(sdkClientMethod('KEY')).not.toBe(clientInstance); // New client created: key is case-sensitive - const clientCount = ignoresTT ? 2 : 3; + const clientCount = 2; expect(params.storage.shared).toBeCalledTimes(clientCount); expect(params.sdkReadinessManager.shared).toBeCalledTimes(clientCount); expect(params.syncManager.shared).toBeCalledTimes(clientCount); }); - test.each(testTargets)('invalid calls throw an error', (sdkClientMethodCSFactory, ignoresTT) => { + test('invalid calls throw an error', () => { // @ts-expect-error - const sdkClientMethod = sdkClientMethodCSFactory(params); + const sdkClientMethod = sdkClientMethodCSFactory(params); // @ts-expect-error expect(() => sdkClientMethod({ matchingKey: settingsWithKey.core.key, bucketingKey: undefined })).toThrow('Shared Client needs a valid key.'); - if (!ignoresTT) expect(() => sdkClientMethod('valid-key', ['invalid-TT'])).toThrow('Shared Client needs a valid traffic type or no traffic type at all.'); }); - test.each(testTargets)('attributes binding - main client', (sdkClientMethodCSFactory) => { + test('attributes binding - main client', () => { // @ts-expect-error - const sdkClientMethod = sdkClientMethodCSFactory(params); + const sdkClientMethod = sdkClientMethodCSFactory(params) as any; // should return a function expect(typeof sdkClientMethod).toBe('function'); @@ -273,7 +250,7 @@ describe('sdkClientMethodCSFactory', () => { }); - test.each(testTargets)('attributes binding - shared clients', (sdkClientMethodCSFactory) => { + test('attributes binding - shared clients', () => { // @ts-expect-error const sdkClientMethod = sdkClientMethodCSFactory(params); diff --git a/src/sdkClient/clientCS.ts b/src/sdkClient/clientCS.ts index 29fe99d8..a4a63edb 100644 --- a/src/sdkClient/clientCS.ts +++ b/src/sdkClient/clientCS.ts @@ -5,18 +5,17 @@ import { clientAttributesDecoration } from './clientAttributesDecoration'; /** - * Decorator that binds a key and (optionally) a traffic type to client methods + * Decorator that binds a key to client methods * * @param client sync client instance * @param key validated split key - * @param trafficType validated traffic type */ -export function clientCSDecorator(log: ILogger, client: SplitIO.IClient, key: SplitIO.SplitKey, trafficType?: string): SplitIO.ICsClient { +export function clientCSDecorator(log: ILogger, client: SplitIO.IClient, key: SplitIO.SplitKey): SplitIO.ICsClient { let clientCS = clientAttributesDecoration(log, client); return objectAssign(clientCS, { - // In the client-side API, we bind a key to the client `getTreatment*` methods + // In the client-side API, we bind a key to the client `getTreatment*` and `track` methods getTreatment: clientCS.getTreatment.bind(clientCS, key), getTreatmentWithConfig: clientCS.getTreatmentWithConfig.bind(clientCS, key), getTreatments: clientCS.getTreatments.bind(clientCS, key), @@ -26,12 +25,10 @@ export function clientCSDecorator(log: ILogger, client: SplitIO.IClient, key: Sp getTreatmentsByFlagSet: clientCS.getTreatmentsByFlagSet.bind(clientCS, key), getTreatmentsWithConfigByFlagSet: clientCS.getTreatmentsWithConfigByFlagSet.bind(clientCS, key), - // Key is bound to the `track` method. Same thing happens with trafficType but only if provided - track: trafficType ? clientCS.track.bind(clientCS, key, trafficType) : clientCS.track.bind(clientCS, key), + track: clientCS.track.bind(clientCS, key), // Not part of the public API. These properties are used to support other modules (e.g., Split Suite) isClientSide: true, - key, - trafficType + key }) as SplitIO.ICsClient; } diff --git a/src/sdkClient/sdkClientMethodCS.ts b/src/sdkClient/sdkClientMethodCS.ts index 87380d7b..efba752d 100644 --- a/src/sdkClient/sdkClientMethodCS.ts +++ b/src/sdkClient/sdkClientMethodCS.ts @@ -35,7 +35,7 @@ export function sdkClientMethodCSFactory(params: ISdkFactoryContext): (key?: Spl return mainClientInstance; } - // Validate the key value. The trafficType (2nd argument) is ignored + // Validate the key value const validKey = validateKey(log, key, LOG_PREFIX_CLIENT_INSTANTIATION); if (validKey === false) { throw new Error('Shared Client needs a valid key.'); diff --git a/src/sdkClient/sdkClientMethodCSWithTT.ts b/src/sdkClient/sdkClientMethodCSWithTT.ts deleted file mode 100644 index 9f577471..00000000 --- a/src/sdkClient/sdkClientMethodCSWithTT.ts +++ /dev/null @@ -1,98 +0,0 @@ -import { clientCSDecorator } from './clientCS'; -import { SplitIO } from '../types'; -import { validateKey } from '../utils/inputValidation/key'; -import { validateTrafficType } from '../utils/inputValidation/trafficType'; -import { getMatching, keyParser } from '../utils/key'; -import { sdkClientFactory } from './sdkClient'; -import { ISyncManagerCS } from '../sync/types'; -import { objectAssign } from '../utils/lang/objectAssign'; -import { RETRIEVE_CLIENT_DEFAULT, NEW_SHARED_CLIENT, RETRIEVE_CLIENT_EXISTING, LOG_PREFIX_CLIENT_INSTANTIATION } from '../logger/constants'; -import { SDK_SEGMENTS_ARRIVED } from '../readiness/constants'; -import { ISdkFactoryContext } from '../sdkFactory/types'; -import { buildInstanceId } from './identity'; - -/** - * Factory of client method for the client-side (browser) variant of the Isomorphic JS SDK, - * where clients can have a bound TT for the track method, which is provided via the settings - * (default client) or the client method (shared clients). - */ -export function sdkClientMethodCSFactory(params: ISdkFactoryContext): (key?: SplitIO.SplitKey, trafficType?: string) => SplitIO.ICsClient { - const { clients, storage, syncManager, sdkReadinessManager, settings: { core: { key, trafficType }, log } } = params; - - const mainClientInstance = clientCSDecorator( - log, - sdkClientFactory(params) as SplitIO.IClient, - key, - trafficType - ); - - const parsedDefaultKey = keyParser(key); - const defaultInstanceId = buildInstanceId(parsedDefaultKey, trafficType); - - // Cache instances created per factory. - clients[defaultInstanceId] = mainClientInstance; - - return function client(key?: SplitIO.SplitKey, trafficType?: string) { - if (key === undefined) { - log.debug(RETRIEVE_CLIENT_DEFAULT); - return mainClientInstance; - } - - // Validate the key value - const validKey = validateKey(log, key, LOG_PREFIX_CLIENT_INSTANTIATION); - if (validKey === false) { - throw new Error('Shared Client needs a valid key.'); - } - - let validTrafficType; - if (trafficType !== undefined) { - validTrafficType = validateTrafficType(log, trafficType, LOG_PREFIX_CLIENT_INSTANTIATION); - if (validTrafficType === false) { - throw new Error('Shared Client needs a valid traffic type or no traffic type at all.'); - } - } - const instanceId = buildInstanceId(validKey, validTrafficType); - - if (!clients[instanceId]) { - const matchingKey = getMatching(validKey); - - const sharedSdkReadiness = sdkReadinessManager.shared(); - const sharedStorage = storage.shared && storage.shared(matchingKey, (err) => { - if (err) { - sharedSdkReadiness.readinessManager.timeout(); - return; - } - // Emit SDK_READY in consumer mode for shared clients - sharedSdkReadiness.readinessManager.segments.emit(SDK_SEGMENTS_ARRIVED); - }); - - // 3 possibilities: - // - Standalone mode: both syncManager and sharedSyncManager are defined - // - Consumer mode: both syncManager and sharedSyncManager are undefined - // - Consumer partial mode: syncManager is defined (only for submitters) but sharedSyncManager is undefined - // @ts-ignore - const sharedSyncManager = syncManager && sharedStorage && (syncManager as ISyncManagerCS).shared(matchingKey, sharedSdkReadiness.readinessManager, sharedStorage); - - // As shared clients reuse all the storage information, we don't need to check here if we - // will use offline or online mode. We should stick with the original decision. - clients[instanceId] = clientCSDecorator( - log, - sdkClientFactory(objectAssign({}, params, { - sdkReadinessManager: sharedSdkReadiness, - storage: sharedStorage || storage, - syncManager: sharedSyncManager, - }), true) as SplitIO.IClient, - validKey, - validTrafficType - ); - - sharedSyncManager && sharedSyncManager.start(); - - log.info(NEW_SHARED_CLIENT); - } else { - log.debug(RETRIEVE_CLIENT_EXISTING); - } - - return clients[instanceId] as SplitIO.ICsClient; - }; -} diff --git a/src/sdkFactory/types.ts b/src/sdkFactory/types.ts index b0a3b3f2..7956a80a 100644 --- a/src/sdkFactory/types.ts +++ b/src/sdkFactory/types.ts @@ -93,7 +93,7 @@ export interface ISdkFactoryParams { // Sdk client method factory (ISDK::client method). // It Allows to distinguish SDK clients with the client-side API (`ICsSDK`) or server-side API (`ISDK` or `IAsyncSDK`). - sdkClientMethodFactory: (params: ISdkFactoryContext) => ({ (): SplitIO.ICsClient; (key: SplitIO.SplitKey, trafficType?: string | undefined): SplitIO.ICsClient; } | (() => SplitIO.IClient) | (() => SplitIO.IAsyncClient)) + sdkClientMethodFactory: (params: ISdkFactoryContext) => ({ (): SplitIO.ICsClient; (key: SplitIO.SplitKey): SplitIO.ICsClient; } | (() => SplitIO.IClient) | (() => SplitIO.IAsyncClient)) // Impression observer factory. impressionsObserverFactory: () => IImpressionObserver diff --git a/src/types.ts b/src/types.ts index 2a65b297..3647b804 100644 --- a/src/types.ts +++ b/src/types.ts @@ -71,7 +71,6 @@ export interface ISettings { readonly core: { authorizationKey: string, key: SplitIO.SplitKey, - trafficType?: string, labelsEnabled: boolean, IPAddressesEnabled: boolean }, @@ -923,12 +922,6 @@ export namespace SplitIO { * @property {SplitKey} key */ key: SplitKey, - /** - * Traffic type associated with the customer identifier. @see {@link https://help.split.io/hc/en-us/articles/360019916311-Traffic-type} - * If no provided as a setting it will be required on the client.track() calls. - * @property {string} trafficType - */ - trafficType?: string, /** * Disable labels from being sent to Split backend. Labels may contain sensitive information. * @property {boolean} labelsEnabled @@ -1037,10 +1030,9 @@ export namespace SplitIO { * Returns a shared client of the SDK, with the given key and optional traffic type. * @function client * @param {SplitKey} key The key for the new client instance. - * @param {string=} trafficType The traffic type of the provided key. * @returns {ICsClient} The client instance. */ - client(key: SplitKey, trafficType?: string): ICsClient, + client(key: SplitKey): ICsClient, /** * Returns a manager instance of the SDK to explore available information. * @function manager diff --git a/src/utils/settingsValidation/__tests__/index.spec.ts b/src/utils/settingsValidation/__tests__/index.spec.ts index 1deffc98..a230fdf3 100644 --- a/src/utils/settingsValidation/__tests__/index.spec.ts +++ b/src/utils/settingsValidation/__tests__/index.spec.ts @@ -233,40 +233,32 @@ describe('settingsValidation', () => { expect(settings.core.key).toBe(undefined); }); - test('validates and sanitizes key and traffic type in client-side', () => { - const clientSideValidationParams = { ...minimalSettingsParams, acceptKey: true, acceptTT: true }; + test('validates and sanitizes key in client-side', () => { + const clientSideValidationParams = { ...minimalSettingsParams, acceptKey: true }; const samples = [{ key: ' valid-key ', settingsKey: 'valid-key', // key string is trimmed - trafficType: 'VALID-TT', settingsTrafficType: 'valid-tt', // TT is converted to lowercase }, { key: undefined, settingsKey: false, // undefined key is not valid in client-side - trafficType: undefined, settingsTrafficType: undefined, }, { - key: null, settingsKey: false, - trafficType: null, settingsTrafficType: false, + key: {}, settingsKey: false, }, { key: true, settingsKey: false, - trafficType: true, settingsTrafficType: false, }, { key: 1.5, settingsKey: '1.5', // finite number as key is parsed - trafficType: 100, settingsTrafficType: false, }, { key: { matchingKey: 100, bucketingKey: ' BUCK ' }, settingsKey: { matchingKey: '100', bucketingKey: 'BUCK' }, - trafficType: {}, settingsTrafficType: false, }]; - samples.forEach(({ key, trafficType, settingsKey, settingsTrafficType }) => { + samples.forEach(({ key, settingsKey }) => { const settings = settingsValidation({ core: { authorizationKey: 'dummy token', - key, - trafficType + key } }, clientSideValidationParams); expect(settings.core.key).toEqual(settingsKey); - expect(settings.core.trafficType).toEqual(settingsTrafficType); }); }); @@ -275,12 +267,11 @@ describe('settingsValidation', () => { core: { authorizationKey: 'dummy token', key: true, - trafficType: true + trafficType: 'ignored' } }, { ...minimalSettingsParams, acceptKey: true }); expect(settings.core.key).toEqual(false); // key is validated - expect(settings.core.trafficType).toEqual(true); // traffic type is ignored }); // Not implemented yet diff --git a/src/utils/settingsValidation/__tests__/settings.mocks.ts b/src/utils/settingsValidation/__tests__/settings.mocks.ts index d5f4b2e5..d4e29e51 100644 --- a/src/utils/settingsValidation/__tests__/settings.mocks.ts +++ b/src/utils/settingsValidation/__tests__/settings.mocks.ts @@ -13,17 +13,6 @@ export const settingsWithKey = { log: loggerMock }; -export const settingsWithKeyAndTT = { - core: { - key: 'some_key', - trafficType: 'some_tt' - }, - startup: { - readyTimeout: 1, - }, - log: loggerMock -}; - export const settingsWithKeyObject = { core: { key: { diff --git a/src/utils/settingsValidation/index.ts b/src/utils/settingsValidation/index.ts index c6641c9a..6438f512 100644 --- a/src/utils/settingsValidation/index.ts +++ b/src/utils/settingsValidation/index.ts @@ -6,7 +6,6 @@ import { validImpressionsMode } from './impressionsMode'; import { ISettingsValidationParams } from './types'; import { ISettings } from '../../types'; import { validateKey } from '../inputValidation/key'; -import { validateTrafficType } from '../inputValidation/trafficType'; import { ERROR_MIN_CONFIG_PARAM, LOG_PREFIX_CLIENT_INSTANTIATION } from '../../logger/constants'; // Exported for telemetry @@ -19,8 +18,6 @@ export const base = { authorizationKey: undefined, // key used in your system (only required for browser version) key: undefined, - // traffic type for the given key (only used on browser version) - trafficType: undefined, // toggle impressions tracking of labels labelsEnabled: true, // toggle sendind (true) or not sending (false) IP and Host Name with impressions, events, and telemetries requests (only used on nodejs version) @@ -168,13 +165,6 @@ export function settingsValidation(config: unknown, validationParams: ISettingsV // @ts-ignore, @TODO handle invalid keys as a non-recoverable error? withDefaults.core.key = validateKey(log, maybeKey, LOG_PREFIX_CLIENT_INSTANTIATION); } - - if (validationParams.acceptTT) { - const maybeTT = withDefaults.core.trafficType; - if (maybeTT !== undefined) { // @ts-ignore - withDefaults.core.trafficType = validateTrafficType(log, maybeTT, LOG_PREFIX_CLIENT_INSTANTIATION); - } - } } else { // On server-side, key is undefined and used to distinguish from client-side if (maybeKey !== undefined) log.warn('Provided `key` is ignored in server-side SDK.'); // @ts-ignore diff --git a/src/utils/settingsValidation/types.ts b/src/utils/settingsValidation/types.ts index dcc0d8f8..32145836 100644 --- a/src/utils/settingsValidation/types.ts +++ b/src/utils/settingsValidation/types.ts @@ -12,8 +12,6 @@ export interface ISettingsValidationParams { defaults: Partial & { version: string } & { startup: ISettings['startup'] }, /** If true, validates core.key */ acceptKey?: boolean, - /** If true, validates core.trafficType */ - acceptTT?: boolean, /** Define runtime values (`settings.runtime`) */ runtime: (settings: ISettings) => ISettings['runtime'], /** Storage validator (`settings.storage`) */ From 9af5a941cd5a672723bd0cc186edb2700aeceef4 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Tue, 8 Oct 2024 15:48:59 -0300 Subject: [PATCH 106/146] Update changelog entry --- CHANGES.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGES.txt b/CHANGES.txt index a78d749a..8bb7bec6 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -9,7 +9,7 @@ - Removed the deprecated `GOOGLE_ANALYTICS_TO_SPLIT` and `SPLIT_TO_GOOGLE_ANALYTICS` integrations. - Removed internal ponyfills for `Map`, `Set` and `Array.from` global objects, dropping support for IE and other outdated browsers. The SDK now requires the runtime environment to support these features natively or to provide a polyfill. - Removed the migration logic for the old format of MySegments keys in LocalStorage introduced in JavaScript SDK v10.17.3. - - Removed the `sdkClientMethodCSWithTTFactory` function, which handled the logic to bound an optional traffic type to SDK clients. Client-side SDK implementations must use `sdkClientMethodCSWithTT` which, unlike the previous function, does not allow passing a traffic type but simplifies the SDK API. + - Removed the `sdkClientMethodCSWithTT` function, which handled the logic to bound an optional traffic type to SDK clients. Client-side SDK implementations must use `sdkClientMethodCS` module, which, unlike the previous function, does not allow passing a traffic type but simplifies the SDK API. 1.17.0 (September 6, 2024) - Added `sync.requestOptions.getHeaderOverrides` configuration option to enhance SDK HTTP request Headers for Authorization Frameworks. From 464102fe9b98e20926ea49e43967ad5cb2d63acd Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Tue, 8 Oct 2024 17:45:59 -0300 Subject: [PATCH 107/146] rc --- package-lock.json | 4 ++-- package.json | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/package-lock.json b/package-lock.json index 2493ec38..9a90a643 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "@splitsoftware/splitio-commons", - "version": "1.17.1-rc.1", + "version": "2.0.0-rc.0", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "@splitsoftware/splitio-commons", - "version": "1.17.1-rc.1", + "version": "2.0.0-rc.0", "license": "Apache-2.0", "dependencies": { "tslib": "^2.3.1" diff --git a/package.json b/package.json index 1c1a2eb2..f5818af3 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@splitsoftware/splitio-commons", - "version": "1.17.1-rc.1", + "version": "2.0.0-rc.0", "description": "Split JavaScript SDK common components", "main": "cjs/index.js", "module": "esm/index.js", From 131ee7aa202746563ebc199d7d124dbace31d645 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Wed, 9 Oct 2024 12:13:20 -0300 Subject: [PATCH 108/146] revert data_loader_for_ssr --- CHANGES.txt | 2 - .../__tests__/sdkClientMethodCS.spec.ts | 1 - src/sdkFactory/index.ts | 4 +- src/sdkFactory/types.ts | 1 - src/storages/__tests__/dataLoader.spec.ts | 31 ------ src/storages/dataLoader.ts | 94 +++++++------------ src/storages/inMemory/InMemoryStorageCS.ts | 23 +---- src/storages/types.ts | 2 + src/trackers/eventTracker.ts | 2 +- src/trackers/impressionsTracker.ts | 2 +- src/types.ts | 17 ++-- 11 files changed, 50 insertions(+), 129 deletions(-) delete mode 100644 src/storages/__tests__/dataLoader.spec.ts diff --git a/CHANGES.txt b/CHANGES.txt index 0ab19c90..4c333159 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -1,8 +1,6 @@ 2.0.0 (October XX, 2024) - Added support for targeting rules based on large segments. - Added `factory.destroy()` method, which invokes the `destroy` method on all SDK clients created by the factory. - - Added `factory.getState()` method for standalone server-side SDKs, which returns the rollout plan snapshot from the storage. - - Added `preloadedData` configuration option for standalone client-side SDKs, which allows preloading the SDK storage with a snapshot of the rollout plan. - Updated internal storage factory to emit the SDK_READY_FROM_CACHE event when it corresponds, to clean up the initialization flow. - Updated the handling of timers and async operations by moving them into an `init` factory method to enable lazy initialization of the SDK. This update is intended for the React SDK. - Bugfixing - Fixed an issue with the server-side polling manager that caused dangling timers when the SDK was destroyed before it was ready. diff --git a/src/sdkClient/__tests__/sdkClientMethodCS.spec.ts b/src/sdkClient/__tests__/sdkClientMethodCS.spec.ts index 1ac052aa..793b12f8 100644 --- a/src/sdkClient/__tests__/sdkClientMethodCS.spec.ts +++ b/src/sdkClient/__tests__/sdkClientMethodCS.spec.ts @@ -47,7 +47,6 @@ const params = { settings: settingsWithKey, telemetryTracker: telemetryTrackerFactory(), clients: {}, - whenInit: (cb: () => void) => cb() }; const invalidAttributes = [ diff --git a/src/sdkFactory/index.ts b/src/sdkFactory/index.ts index 0951abd3..41706cc6 100644 --- a/src/sdkFactory/index.ts +++ b/src/sdkFactory/index.ts @@ -56,7 +56,7 @@ export function sdkFactory(params: ISdkFactoryParams): SplitIO.ICsSDK | SplitIO. readiness.splits.emit(SDK_SPLITS_CACHE_LOADED); } }); - + // @TODO add support for dataloader: `if (params.dataLoader) params.dataLoader(storage);` const clients: Record = {}; const telemetryTracker = telemetryTrackerFactory(storage.telemetry, platform.now); const integrationsManager = integrationsManagerFactory && integrationsManagerFactory({ settings, storage, telemetryTracker }); @@ -82,7 +82,7 @@ export function sdkFactory(params: ISdkFactoryParams): SplitIO.ICsSDK | SplitIO. // splitApi is used by SyncManager and Browser signal listener const splitApi = splitApiFactory && splitApiFactory(settings, platform, telemetryTracker); - const ctx: ISdkFactoryContext = { clients, splitApi, eventTracker, impressionsTracker, telemetryTracker, uniqueKeysTracker, sdkReadinessManager, readiness, settings, storage, platform, whenInit }; + const ctx: ISdkFactoryContext = { clients, splitApi, eventTracker, impressionsTracker, telemetryTracker, uniqueKeysTracker, sdkReadinessManager, readiness, settings, storage, platform }; const syncManager = syncManagerFactory && syncManagerFactory(ctx as ISdkFactoryContextSync); ctx.syncManager = syncManager; diff --git a/src/sdkFactory/types.ts b/src/sdkFactory/types.ts index 774110c5..d52ce348 100644 --- a/src/sdkFactory/types.ts +++ b/src/sdkFactory/types.ts @@ -50,7 +50,6 @@ export interface ISdkFactoryContext { splitApi?: ISplitApi syncManager?: ISyncManager, clients: Record, - whenInit(cb: () => void): void } export interface ISdkFactoryContextSync extends ISdkFactoryContext { diff --git a/src/storages/__tests__/dataLoader.spec.ts b/src/storages/__tests__/dataLoader.spec.ts deleted file mode 100644 index c9f77849..00000000 --- a/src/storages/__tests__/dataLoader.spec.ts +++ /dev/null @@ -1,31 +0,0 @@ -import { InMemoryStorageFactory } from '../inMemory/InMemoryStorage'; -import { InMemoryStorageCSFactory } from '../inMemory/InMemoryStorageCS'; -import { fullSettings } from '../../utils/settingsValidation/__tests__/settings.mocks'; - -import * as dataLoader from '../dataLoader'; - -test('loadData & getSnapshot', () => { - jest.spyOn(dataLoader, 'loadData'); - const onReadyFromCacheCb = jest.fn(); - // @ts-expect-error - const serverStorage = InMemoryStorageFactory({ settings: fullSettings }); - serverStorage.splits.setChangeNumber(123); // @ts-expect-error - serverStorage.splits.addSplits([['split1', { name: 'split1' }]]); - serverStorage.segments.addToSegment('segment1', [fullSettings.core.key as string]); - - const preloadedData = dataLoader.getSnapshot(serverStorage, [fullSettings.core.key as string]); - - // @ts-expect-error - const clientStorage = InMemoryStorageCSFactory({ settings: { ...fullSettings, preloadedData }, onReadyFromCacheCb }); - - // Assert - expect(dataLoader.loadData).toBeCalledTimes(1); - expect(onReadyFromCacheCb).toBeCalledTimes(1); - expect(dataLoader.getSnapshot(clientStorage, [fullSettings.core.key as string])).toEqual(preloadedData); - expect(preloadedData).toEqual({ - since: 123, - splitsData: [{ name: 'split1' }], - mySegmentsData: { [fullSettings.core.key as string]: ['segment1'] }, - segmentsData: undefined - }); -}); diff --git a/src/storages/dataLoader.ts b/src/storages/dataLoader.ts index 7b44df91..24898d68 100644 --- a/src/storages/dataLoader.ts +++ b/src/storages/dataLoader.ts @@ -1,85 +1,55 @@ import { SplitIO } from '../types'; -import { ISegmentsCacheSync, ISplitsCacheSync, IStorageSync } from './types'; -import { setToArray, ISet } from '../utils/lang/sets'; -import { getMatching } from '../utils/key'; +import { DEFAULT_CACHE_EXPIRATION_IN_MILLIS } from '../utils/constants/browser'; +import { DataLoader, ISegmentsCacheSync, ISplitsCacheSync } from './types'; /** - * Storage-agnostic adaptation of `loadDataIntoLocalStorage` function - * (https://github.com/godaddy/split-javascript-data-loader/blob/master/src/load-data.js) + * Factory of client-side storage loader * - * @param preloadedData validated data following the format proposed in https://github.com/godaddy/split-javascript-data-loader and extended with a `mySegmentsData` property. - * @param storage object containing `splits` and `segments` cache (client-side variant) - * @param userKey user key (matching key) of the provided MySegmentsCache - * - * @TODO extend to load largeSegments - * @TODO extend to load data on shared mySegments storages. Be specific when emitting SDK_READY_FROM_CACHE on shared clients. Maybe the serializer should provide the `useSegments` flag. - * @TODO add logs, and input validation in this module, in favor of size reduction. - * @TODO unit tests + * @param preloadedData validated data following the format proposed in https://github.com/godaddy/split-javascript-data-loader + * and extended with a `mySegmentsData` property. + * @returns function to preload the storage */ -export function loadData(preloadedData: SplitIO.PreloadedData, storage: { splits?: ISplitsCacheSync, segments: ISegmentsCacheSync, largeSegments?: ISegmentsCacheSync }, matchingKey?: string) { - // Do not load data if current preloadedData is empty - if (Object.keys(preloadedData).length === 0) return; - - const { segmentsData = {}, since = -1, splitsData = [] } = preloadedData; +export function dataLoaderFactory(preloadedData: SplitIO.PreloadedData): DataLoader { + + /** + * Storage-agnostic adaptation of `loadDataIntoLocalStorage` function + * (https://github.com/godaddy/split-javascript-data-loader/blob/master/src/load-data.js) + * + * @param storage object containing `splits` and `segments` cache (client-side variant) + * @param userId user key string of the provided MySegmentsCache + * + * @TODO extend to support SegmentsCache (server-side variant) by making `userId` optional and adding the corresponding logic. + * @TODO extend to load data on shared mySegments storages. Be specific when emitting SDK_READY_FROM_CACHE on shared clients. Maybe the serializer should provide the `useSegments` flag. + */ + return function loadData(storage: { splits: ISplitsCacheSync, segments: ISegmentsCacheSync }, userId: string) { + // Do not load data if current preloadedData is empty + if (Object.keys(preloadedData).length === 0) return; + + const { lastUpdated = -1, segmentsData = {}, since = -1, splitsData = {} } = preloadedData; - if (storage.splits) { const storedSince = storage.splits.getChangeNumber(); + const expirationTimestamp = Date.now() - DEFAULT_CACHE_EXPIRATION_IN_MILLIS; - // Do not load data if current data is more recent - if (storedSince > since) return; + // Do not load data if current localStorage data is more recent, + // or if its `lastUpdated` timestamp is older than the given `expirationTimestamp`, + if (storedSince > since || lastUpdated < expirationTimestamp) return; // cleaning up the localStorage data, since some cached splits might need be part of the preloaded data storage.splits.clear(); storage.splits.setChangeNumber(since); // splitsData in an object where the property is the split name and the pertaining value is a stringified json of its data - storage.splits.addSplits(splitsData.map(split => ([split.name, split]))); - } + storage.splits.addSplits(Object.keys(splitsData).map(splitName => JSON.parse(splitsData[splitName]))); - if (matchingKey) { // add mySegments data (client-side) - let mySegmentsData = preloadedData.mySegmentsData && preloadedData.mySegmentsData[matchingKey]; + // add mySegments data + let mySegmentsData = preloadedData.mySegmentsData && preloadedData.mySegmentsData[userId]; if (!mySegmentsData) { // segmentsData in an object where the property is the segment name and the pertaining value is a stringified object that contains the `added` array of userIds mySegmentsData = Object.keys(segmentsData).filter(segmentName => { - const matchingKeys = segmentsData[segmentName]; - return matchingKeys.indexOf(matchingKey) > -1; + const userIds = JSON.parse(segmentsData[segmentName]).added; + return Array.isArray(userIds) && userIds.indexOf(userId) > -1; }); } storage.segments.resetSegments({ k: mySegmentsData.map(s => ({ n: s })) }); - } else { // add segments data (server-side) - Object.keys(segmentsData).filter(segmentName => { - const matchingKeys = segmentsData[segmentName]; - storage.segments.addToSegment(segmentName, matchingKeys); - }); - } -} - -export function getSnapshot(storage: IStorageSync, userKeys?: SplitIO.SplitKey[]): SplitIO.PreloadedData { - return { - // lastUpdated: Date.now(), - since: storage.splits.getChangeNumber(), - splitsData: storage.splits.getAll(), - segmentsData: userKeys ? - undefined : // @ts-ignore accessing private prop - Object.keys(storage.segments.segmentCache).reduce((prev, cur) => { // @ts-ignore accessing private prop - prev[cur] = setToArray(storage.segments.segmentCache[cur] as ISet); - return prev; - }, {}), - mySegmentsData: userKeys ? - userKeys.reduce>((prev, userKey) => { - prev[getMatching(userKey)] = storage.shared ? - // Client-side segments - // @ts-ignore accessing private prop - Object.keys(storage.shared(userKey).segments.segmentCache) : - // Server-side segments - // @ts-ignore accessing private prop - Object.keys(storage.segments.segmentCache).reduce((prev, segmentName) => { // @ts-ignore accessing private prop - return storage.segments.segmentCache[segmentName].has(userKey) ? - prev.concat(segmentName) : - prev; - }, []); - return prev; - }, {}) : - undefined }; } diff --git a/src/storages/inMemory/InMemoryStorageCS.ts b/src/storages/inMemory/InMemoryStorageCS.ts index 670b91f1..30667369 100644 --- a/src/storages/inMemory/InMemoryStorageCS.ts +++ b/src/storages/inMemory/InMemoryStorageCS.ts @@ -7,8 +7,6 @@ import { ImpressionCountsCacheInMemory } from './ImpressionCountsCacheInMemory'; import { DEBUG, LOCALHOST_MODE, NONE, STORAGE_MEMORY } from '../../utils/constants'; import { shouldRecordTelemetry, TelemetryCacheInMemory } from './TelemetryCacheInMemory'; import { UniqueKeysCacheInMemoryCS } from './UniqueKeysCacheInMemoryCS'; -import { getMatching } from '../../utils/key'; -import { loadData } from '../dataLoader'; /** * InMemory storage factory for standalone client-side SplitFactory @@ -16,7 +14,7 @@ import { loadData } from '../dataLoader'; * @param params parameters required by EventsCacheSync */ export function InMemoryStorageCSFactory(params: IStorageFactoryParams): IStorageSync { - const { settings: { scheduler: { impressionsQueueSize, eventsQueueSize, }, sync: { impressionsMode, __splitFiltersValidation }, preloadedData }, onReadyFromCacheCb } = params; + const { settings: { scheduler: { impressionsQueueSize, eventsQueueSize, }, sync: { impressionsMode, __splitFiltersValidation } } } = params; const splits = new SplitsCacheInMemory(__splitFiltersValidation); const segments = new MySegmentsCacheInMemory(); @@ -44,18 +42,11 @@ export function InMemoryStorageCSFactory(params: IStorageFactoryParams): IStorag }, // When using shared instanciation with MEMORY we reuse everything but segments (they are unique per key) - shared(matchingKey: string) { - const segments = new MySegmentsCacheInMemory(); - const largeSegments = new MySegmentsCacheInMemory(); - - if (preloadedData) { - loadData(preloadedData, { segments, largeSegments }, matchingKey); - } - + shared() { return { splits: this.splits, - segments, - largeSegments, + segments: new MySegmentsCacheInMemory(), + largeSegments: new MySegmentsCacheInMemory(), impressions: this.impressions, impressionCounts: this.impressionCounts, events: this.events, @@ -81,12 +72,6 @@ export function InMemoryStorageCSFactory(params: IStorageFactoryParams): IStorag if (storage.uniqueKeys) storage.uniqueKeys.track = noopTrack; } - - if (preloadedData) { - loadData(preloadedData, storage, getMatching(params.settings.core.key)); - if (splits.getChangeNumber() > -1) onReadyFromCacheCb(); - } - return storage; } diff --git a/src/storages/types.ts b/src/storages/types.ts index 21945587..61ab10f2 100644 --- a/src/storages/types.ts +++ b/src/storages/types.ts @@ -492,6 +492,8 @@ export interface IStorageAsync extends IStorageBase< /** StorageFactory */ +export type DataLoader = (storage: IStorageSync, matchingKey: string) => void + export interface IStorageFactoryParams { settings: ISettings, /** diff --git a/src/trackers/eventTracker.ts b/src/trackers/eventTracker.ts index 18b1e94c..8efcf413 100644 --- a/src/trackers/eventTracker.ts +++ b/src/trackers/eventTracker.ts @@ -32,8 +32,8 @@ export function eventTrackerFactory( if (tracked) { log.info(EVENTS_TRACKER_SUCCESS, [msg]); if (integrationsManager) { - // Wrap in a timeout because we don't want it to be blocking. whenInit(() => { + // Wrap in a timeout because we don't want it to be blocking. setTimeout(() => { // copy of event, to avoid unexpected behaviour if modified by integrations const eventDataCopy = objectAssign({}, eventData); diff --git a/src/trackers/impressionsTracker.ts b/src/trackers/impressionsTracker.ts index d8a3fbc0..dcf998fc 100644 --- a/src/trackers/impressionsTracker.ts +++ b/src/trackers/impressionsTracker.ts @@ -67,8 +67,8 @@ export function impressionsTrackerFactory( sdkLanguageVersion: version }; - // Wrap in a timeout because we don't want it to be blocking. whenInit(() => { + // Wrap in a timeout because we don't want it to be blocking. setTimeout(() => { // integrationsManager.handleImpression does not throw errors if (integrationsManager) integrationsManager.handleImpression(impressionData); diff --git a/src/types.ts b/src/types.ts index 777b3258..2a65b297 100644 --- a/src/types.ts +++ b/src/types.ts @@ -1,4 +1,4 @@ -import { ISplit, ISplitFiltersValidation } from './dtos/types'; +import { ISplitFiltersValidation } from './dtos/types'; import { IIntegration, IIntegrationFactoryParams } from './integrations/types'; import { ILogger } from './logger/types'; import { ISdkFactoryContext } from './sdkFactory/types'; @@ -98,7 +98,6 @@ export interface ISettings { eventsFirstPushWindow: number }, readonly storage: IStorageSyncFactory | IStorageAsyncFactory, - readonly preloadedData?: SplitIO.PreloadedData, readonly integrations: Array<{ readonly type: string, (params: IIntegrationFactoryParams): IIntegration | void @@ -772,20 +771,21 @@ export namespace SplitIO { * If this value is older than 10 days ago (expiration time policy), the data is not used to update the storage content. * @TODO configurable expiration time policy? */ - // lastUpdated: number, + lastUpdated: number, /** * Change number of the preloaded data. * If this value is older than the current changeNumber at the storage, the data is not used to update the storage content. */ since: number, /** - * List of feature flag definitions. - * @TODO rename to flags + * Map of feature flags to their stringified definitions. */ - splitsData: ISplit[], + splitsData: { + [splitName: string]: string + }, /** * Optional map of user keys to their list of segments. - * @TODO rename to memberships + * @TODO remove when releasing first version */ mySegmentsData?: { [key: string]: string[] @@ -793,10 +793,9 @@ export namespace SplitIO { /** * Optional map of segments to their stringified definitions. * This property is ignored if `mySegmentsData` was provided. - * @TODO rename to segments */ segmentsData?: { - [segmentName: string]: string[] + [segmentName: string]: string }, } /** From c7a20029b63acf203531a6ecc28254d2800cd3f7 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Wed, 9 Oct 2024 17:26:19 -0300 Subject: [PATCH 109/146] Adding setToArray util to avoid depending on Array.from --- CHANGES.txt | 2 +- src/evaluator/Engine.ts | 2 +- src/evaluator/index.ts | 4 ++-- src/storages/inLocalStorage/SplitsCacheInLocal.ts | 5 +++-- src/storages/inLocalStorage/index.ts | 2 +- src/storages/inMemory/InMemoryStorageCS.ts | 2 +- src/storages/inMemory/UniqueKeysCacheInMemory.ts | 3 ++- src/storages/inMemory/UniqueKeysCacheInMemoryCS.ts | 3 ++- src/storages/inRedis/RedisAdapter.ts | 3 ++- src/storages/inRedis/UniqueKeysCacheInRedis.ts | 3 ++- src/storages/pluggable/UniqueKeysCachePluggable.ts | 3 ++- src/storages/pluggable/inMemoryWrapper.ts | 3 ++- src/sync/polling/updaters/splitChangesUpdater.ts | 3 ++- src/utils/lang/sets.ts | 12 +++++++++++- 14 files changed, 34 insertions(+), 16 deletions(-) diff --git a/CHANGES.txt b/CHANGES.txt index 02b24147..b8a7c789 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -7,7 +7,7 @@ - Removed `/mySegments` endpoint from SplitAPI module, as it is replaced by `/memberships` endpoint. - Removed support for MY_SEGMENTS_UPDATE and MY_SEGMENTS_UPDATE_V2 notification types, as they are replaced by MEMBERSHIPS_MS_UPDATE and MEMBERSHIPS_LS_UPDATE notification types. - Removed the deprecated `GOOGLE_ANALYTICS_TO_SPLIT` and `SPLIT_TO_GOOGLE_ANALYTICS` integrations. - - Removed internal ponyfills for `Map`, `Set` and `Array.from` global objects, dropping support for IE and other outdated browsers. The SDK now requires the runtime environment to support these features natively or to provide a polyfill. + - Removed internal ponyfills for `Map` and `Set` global objects, dropping support for IE and other outdated browsers. The SDK now requires the runtime environment to support these features natively or to provide a polyfill. 1.17.0 (September 6, 2024) - Added `sync.requestOptions.getHeaderOverrides` configuration option to enhance SDK HTTP request Headers for Authorization Frameworks. diff --git a/src/evaluator/Engine.ts b/src/evaluator/Engine.ts index e3b38975..89f4ef69 100644 --- a/src/evaluator/Engine.ts +++ b/src/evaluator/Engine.ts @@ -21,7 +21,7 @@ export class Engine { constructor(private baseInfo: ISplit, private evaluator: IEvaluator) { - // in case we don't have a default treatment in the instanciation, use 'control' + // in case we don't have a default treatment in the instantiation, use 'control' if (typeof this.baseInfo.defaultTreatment !== 'string') { this.baseInfo.defaultTreatment = CONTROL; } diff --git a/src/evaluator/index.ts b/src/evaluator/index.ts index 883df997..d58c7f5d 100644 --- a/src/evaluator/index.ts +++ b/src/evaluator/index.ts @@ -7,7 +7,7 @@ import { IStorageAsync, IStorageSync } from '../storages/types'; import { IEvaluationResult } from './types'; import { SplitIO } from '../types'; import { ILogger } from '../logger/types'; -import { returnSetsUnion } from '../utils/lang/sets'; +import { returnSetsUnion, setToArray } from '../utils/lang/sets'; import { WARN_FLAGSET_WITHOUT_FLAGS } from '../logger/constants'; const treatmentException = { @@ -113,7 +113,7 @@ export function evaluateFeaturesByFlagSets( } return featureFlags.size ? - evaluateFeatures(log, key, Array.from(featureFlags), attributes, storage) : + evaluateFeatures(log, key, setToArray(featureFlags), attributes, storage) : {}; } diff --git a/src/storages/inLocalStorage/SplitsCacheInLocal.ts b/src/storages/inLocalStorage/SplitsCacheInLocal.ts index 2990a094..a777e081 100644 --- a/src/storages/inLocalStorage/SplitsCacheInLocal.ts +++ b/src/storages/inLocalStorage/SplitsCacheInLocal.ts @@ -6,6 +6,7 @@ import { ILogger } from '../../logger/types'; import { LOG_PREFIX } from './constants'; import { ISettings } from '../../types'; import { getStorageHash } from '../KeyBuilder'; +import { setToArray } from '../../utils/lang/sets'; /** * ISplitsCacheSync implementation that stores split definitions in browser LocalStorage. @@ -281,7 +282,7 @@ export class SplitsCacheInLocal extends AbstractSplitsCacheSync { const flagSetCache = new Set(flagSetFromLocalStorage ? JSON.parse(flagSetFromLocalStorage) : []); flagSetCache.add(featureFlag.name); - localStorage.setItem(flagSetKey, JSON.stringify(Array.from(flagSetCache))); + localStorage.setItem(flagSetKey, JSON.stringify(setToArray(flagSetCache))); }); } @@ -308,7 +309,7 @@ export class SplitsCacheInLocal extends AbstractSplitsCacheSync { return; } - localStorage.setItem(flagSetKey, JSON.stringify(Array.from(flagSetCache))); + localStorage.setItem(flagSetKey, JSON.stringify(setToArray(flagSetCache))); } } diff --git a/src/storages/inLocalStorage/index.ts b/src/storages/inLocalStorage/index.ts index 63c14f3b..93e735e8 100644 --- a/src/storages/inLocalStorage/index.ts +++ b/src/storages/inLocalStorage/index.ts @@ -65,7 +65,7 @@ export function InLocalStorage(options: InLocalStorageOptions = {}): IStorageSyn this.uniqueKeys?.clear(); }, - // When using shared instanciation with MEMORY we reuse everything but segments (they are customer per key). + // When using shared instantiation with MEMORY we reuse everything but segments (they are customer per key). shared(matchingKey: string) { return { diff --git a/src/storages/inMemory/InMemoryStorageCS.ts b/src/storages/inMemory/InMemoryStorageCS.ts index 30667369..0efcbffe 100644 --- a/src/storages/inMemory/InMemoryStorageCS.ts +++ b/src/storages/inMemory/InMemoryStorageCS.ts @@ -41,7 +41,7 @@ export function InMemoryStorageCSFactory(params: IStorageFactoryParams): IStorag this.uniqueKeys && this.uniqueKeys.clear(); }, - // When using shared instanciation with MEMORY we reuse everything but segments (they are unique per key) + // When using shared instantiation with MEMORY we reuse everything but segments (they are unique per key) shared() { return { splits: this.splits, diff --git a/src/storages/inMemory/UniqueKeysCacheInMemory.ts b/src/storages/inMemory/UniqueKeysCacheInMemory.ts index ecb468da..9c45721c 100644 --- a/src/storages/inMemory/UniqueKeysCacheInMemory.ts +++ b/src/storages/inMemory/UniqueKeysCacheInMemory.ts @@ -1,6 +1,7 @@ import { IUniqueKeysCacheBase } from '../types'; import { UniqueKeysPayloadSs } from '../../sync/submitters/types'; import { DEFAULT_CACHE_SIZE } from '../inRedis/constants'; +import { setToArray } from '../../utils/lang/sets'; /** * Converts `uniqueKeys` data from cache into request payload for SS. @@ -10,7 +11,7 @@ export function fromUniqueKeysCollector(uniqueKeys: { [featureName: string]: Set const featureNames = Object.keys(uniqueKeys); for (let i = 0; i < featureNames.length; i++) { const featureName = featureNames[i]; - const userKeys = Array.from(uniqueKeys[featureName]); + const userKeys = setToArray(uniqueKeys[featureName]); const uniqueKeysPayload = { f: featureName, ks: userKeys diff --git a/src/storages/inMemory/UniqueKeysCacheInMemoryCS.ts b/src/storages/inMemory/UniqueKeysCacheInMemoryCS.ts index 54b946e0..c49fddd1 100644 --- a/src/storages/inMemory/UniqueKeysCacheInMemoryCS.ts +++ b/src/storages/inMemory/UniqueKeysCacheInMemoryCS.ts @@ -1,6 +1,7 @@ import { IUniqueKeysCacheBase } from '../types'; import { UniqueKeysPayloadCs } from '../../sync/submitters/types'; import { DEFAULT_CACHE_SIZE } from '../inRedis/constants'; +import { setToArray } from '../../utils/lang/sets'; export class UniqueKeysCacheInMemoryCS implements IUniqueKeysCacheBase { @@ -70,7 +71,7 @@ export class UniqueKeysCacheInMemoryCS implements IUniqueKeysCacheBase { const userKeys = Object.keys(uniqueKeys); for (let k = 0; k < userKeys.length; k++) { const userKey = userKeys[k]; - const featureNames = Array.from(uniqueKeys[userKey]); + const featureNames = setToArray(uniqueKeys[userKey]); const uniqueKeysPayload = { k: userKey, fs: featureNames diff --git a/src/storages/inRedis/RedisAdapter.ts b/src/storages/inRedis/RedisAdapter.ts index 049f9e78..6a6b423b 100644 --- a/src/storages/inRedis/RedisAdapter.ts +++ b/src/storages/inRedis/RedisAdapter.ts @@ -3,6 +3,7 @@ import { ILogger } from '../../logger/types'; import { merge, isString } from '../../utils/lang'; import { thenable } from '../../utils/promise/thenable'; import { timeout } from '../../utils/promise/timeout'; +import { setToArray } from '../../utils/lang/sets'; const LOG_PREFIX = 'storage:redis-adapter: '; @@ -149,7 +150,7 @@ export class RedisAdapter extends ioredis { if (instance._runningCommands.size > 0) { instance.log.info(LOG_PREFIX + `Attempting to disconnect but there are ${instance._runningCommands.size} commands still waiting for resolution. Defering disconnection until those finish.`); - Promise.all(Array.from(instance._runningCommands)) + Promise.all(setToArray(instance._runningCommands)) .then(() => { instance.log.debug(LOG_PREFIX + 'Pending commands finished successfully, disconnecting.'); originalMethod.apply(instance, params); diff --git a/src/storages/inRedis/UniqueKeysCacheInRedis.ts b/src/storages/inRedis/UniqueKeysCacheInRedis.ts index 50651314..d74e2f03 100644 --- a/src/storages/inRedis/UniqueKeysCacheInRedis.ts +++ b/src/storages/inRedis/UniqueKeysCacheInRedis.ts @@ -5,6 +5,7 @@ import { LOG_PREFIX } from './constants'; import { ILogger } from '../../logger/types'; import { UniqueKeysItemSs } from '../../sync/submitters/types'; import type { RedisAdapter } from './RedisAdapter'; +import { setToArray } from '../../utils/lang/sets'; export class UniqueKeysCacheInRedis extends UniqueKeysCacheInMemory implements IUniqueKeysCacheBase { @@ -28,7 +29,7 @@ export class UniqueKeysCacheInRedis extends UniqueKeysCacheInMemory implements I if (!featureNames.length) return Promise.resolve(false); const uniqueKeysArray = featureNames.map((featureName) => { - const featureKeys = Array.from(this.uniqueKeysTracker[featureName]); + const featureKeys = setToArray(this.uniqueKeysTracker[featureName]); const uniqueKeysPayload = { f: featureName, ks: featureKeys diff --git a/src/storages/pluggable/UniqueKeysCachePluggable.ts b/src/storages/pluggable/UniqueKeysCachePluggable.ts index ae46171d..9deddac4 100644 --- a/src/storages/pluggable/UniqueKeysCachePluggable.ts +++ b/src/storages/pluggable/UniqueKeysCachePluggable.ts @@ -4,6 +4,7 @@ import { DEFAULT_CACHE_SIZE, REFRESH_RATE } from '../inRedis/constants'; import { LOG_PREFIX } from './constants'; import { ILogger } from '../../logger/types'; import { UniqueKeysItemSs } from '../../sync/submitters/types'; +import { setToArray } from '../../utils/lang/sets'; export class UniqueKeysCachePluggable extends UniqueKeysCacheInMemory implements IUniqueKeysCacheBase { @@ -27,7 +28,7 @@ export class UniqueKeysCachePluggable extends UniqueKeysCacheInMemory implements if (!featureNames.length) return Promise.resolve(false); const uniqueKeysArray = featureNames.map((featureName) => { - const featureKeys = Array.from(this.uniqueKeysTracker[featureName]); + const featureKeys = setToArray(this.uniqueKeysTracker[featureName]); const uniqueKeysPayload = { f: featureName, ks: featureKeys diff --git a/src/storages/pluggable/inMemoryWrapper.ts b/src/storages/pluggable/inMemoryWrapper.ts index afc00285..ba2f10ed 100644 --- a/src/storages/pluggable/inMemoryWrapper.ts +++ b/src/storages/pluggable/inMemoryWrapper.ts @@ -1,5 +1,6 @@ import { IPluggableStorageWrapper } from '../types'; import { startsWith, toNumber } from '../../utils/lang'; +import { setToArray } from '../../utils/lang/sets'; /** * Creates a IPluggableStorageWrapper implementation that stores items in memory. @@ -107,7 +108,7 @@ export function inMemoryWrapperFactory(connDelay?: number): IPluggableStorageWra getItems(key: string) { const set = _cache[key]; if (!set) return Promise.resolve([]); - if (set instanceof Set) return Promise.resolve(Array.from(set)); + if (set instanceof Set) return Promise.resolve(setToArray(set)); return Promise.reject('key is not a set'); }, diff --git a/src/sync/polling/updaters/splitChangesUpdater.ts b/src/sync/polling/updaters/splitChangesUpdater.ts index a125c8e2..f3b9e824 100644 --- a/src/sync/polling/updaters/splitChangesUpdater.ts +++ b/src/sync/polling/updaters/splitChangesUpdater.ts @@ -8,6 +8,7 @@ import { ILogger } from '../../../logger/types'; import { SYNC_SPLITS_FETCH, SYNC_SPLITS_NEW, SYNC_SPLITS_REMOVED, SYNC_SPLITS_SEGMENTS, SYNC_SPLITS_FETCH_FAILS, SYNC_SPLITS_FETCH_RETRY } from '../../../logger/constants'; import { startsWith } from '../../../utils/lang'; import { IN_SEGMENT } from '../../../utils/constants'; +import { setToArray } from '../../../utils/lang/sets'; type ISplitChangesUpdater = (noCache?: boolean, till?: number, splitUpdateNotification?: { payload: ISplit, changeNumber: number }) => Promise @@ -88,7 +89,7 @@ export function computeSplitsMutation(entries: ISplit[], filters: ISplitFiltersV return accum; }, { added: [], removed: [], segments: [] } as ISplitMutations); - computed.segments = Array.from(segments); + computed.segments = setToArray(segments); return computed; } diff --git a/src/utils/lang/sets.ts b/src/utils/lang/sets.ts index 155c4587..5015a0fc 100644 --- a/src/utils/lang/sets.ts +++ b/src/utils/lang/sets.ts @@ -1,5 +1,15 @@ +export function setToArray(set: Set): T[] { + if (Array.from) return Array.from(set); + + const array: T[] = []; + set.forEach((value: T) => { + array.push(value); + }); + return array; +} + export function returnSetsUnion(set: Set, set2: Set): Set { - return new Set(Array.from(set).concat(Array.from(set2))); + return new Set(setToArray(set).concat(setToArray(set2))); } export function returnDifference(list: T[] = [], list2: T[] = []): T[] { From c8b7074edd353778500f9c0e5fe100a347930d1a Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Fri, 18 Oct 2024 11:04:43 -0300 Subject: [PATCH 110/146] Add issue link for future ref --- src/storages/inMemory/InMemoryStorageCS.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/storages/inMemory/InMemoryStorageCS.ts b/src/storages/inMemory/InMemoryStorageCS.ts index 30667369..dc85d542 100644 --- a/src/storages/inMemory/InMemoryStorageCS.ts +++ b/src/storages/inMemory/InMemoryStorageCS.ts @@ -63,7 +63,7 @@ export function InMemoryStorageCSFactory(params: IStorageFactoryParams): IStorag }; // @TODO revisit storage logic in localhost mode - // No tracking data in localhost mode to avoid memory leaks + // No tracking in localhost mode to avoid memory leaks: https://github.com/splitio/javascript-commons/issues/181 if (params.settings.mode === LOCALHOST_MODE) { const noopTrack = () => true; storage.impressions.track = noopTrack; From 270d811259e21660675e5c4d38ff5e8eda06943f Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Fri, 18 Oct 2024 11:08:39 -0300 Subject: [PATCH 111/146] Revert "Add changelog entry" This reverts commit cf822b900b7fc174698f6b18b263018b805920c0. --- CHANGES.txt | 1 - 1 file changed, 1 deletion(-) diff --git a/CHANGES.txt b/CHANGES.txt index 4c333159..a6c1ed8b 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -1,7 +1,6 @@ 2.0.0 (October XX, 2024) - Added support for targeting rules based on large segments. - Added `factory.destroy()` method, which invokes the `destroy` method on all SDK clients created by the factory. - - Updated internal storage factory to emit the SDK_READY_FROM_CACHE event when it corresponds, to clean up the initialization flow. - Updated the handling of timers and async operations by moving them into an `init` factory method to enable lazy initialization of the SDK. This update is intended for the React SDK. - Bugfixing - Fixed an issue with the server-side polling manager that caused dangling timers when the SDK was destroyed before it was ready. - BREAKING CHANGES: From 34c4bcef699aeff29dd59602019544cda2054c88 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Fri, 18 Oct 2024 11:20:52 -0300 Subject: [PATCH 112/146] Revert "Add onReadyFromCacheCb to storage factory params for code cleanup" This reverts commit 38092dc8247ea7aff08df579b8a3bf09135d9849. --- src/sdkFactory/index.ts | 5 +---- src/storages/AbstractSplitsCacheAsync.ts | 8 ++++++++ src/storages/AbstractSplitsCacheSync.ts | 8 ++++++++ src/storages/inLocalStorage/SplitsCacheInLocal.ts | 11 ++++++++++- .../__tests__/SplitsCacheInLocal.spec.ts | 6 ++++++ src/storages/inLocalStorage/index.ts | 10 ++-------- src/storages/types.ts | 5 ++++- src/sync/offline/syncTasks/fromObjectSyncTask.ts | 10 +++++++--- src/sync/polling/updaters/splitChangesUpdater.ts | 14 +++++++++++--- .../storage/__tests__/storageCS.spec.ts | 7 ++++++- src/utils/settingsValidation/storage/storageCS.ts | 13 +++++++++++++ 11 files changed, 76 insertions(+), 21 deletions(-) diff --git a/src/sdkFactory/index.ts b/src/sdkFactory/index.ts index 41706cc6..ee790794 100644 --- a/src/sdkFactory/index.ts +++ b/src/sdkFactory/index.ts @@ -7,7 +7,7 @@ import { IBasicClient, SplitIO } from '../types'; import { validateAndTrackApiKey } from '../utils/inputValidation/apiKey'; import { createLoggerAPI } from '../logger/sdkLogger'; import { NEW_FACTORY, RETRIEVE_MANAGER } from '../logger/constants'; -import { SDK_SPLITS_ARRIVED, SDK_SEGMENTS_ARRIVED, SDK_SPLITS_CACHE_LOADED } from '../readiness/constants'; +import { SDK_SPLITS_ARRIVED, SDK_SEGMENTS_ARRIVED } from '../readiness/constants'; import { objectAssign } from '../utils/lang/objectAssign'; import { strategyDebugFactory } from '../trackers/strategy/strategyDebug'; import { strategyOptimizedFactory } from '../trackers/strategy/strategyOptimized'; @@ -52,9 +52,6 @@ export function sdkFactory(params: ISdkFactoryParams): SplitIO.ICsSDK | SplitIO. readiness.splits.emit(SDK_SPLITS_ARRIVED); readiness.segments.emit(SDK_SEGMENTS_ARRIVED); }, - onReadyFromCacheCb: () => { - readiness.splits.emit(SDK_SPLITS_CACHE_LOADED); - } }); // @TODO add support for dataloader: `if (params.dataLoader) params.dataLoader(storage);` const clients: Record = {}; diff --git a/src/storages/AbstractSplitsCacheAsync.ts b/src/storages/AbstractSplitsCacheAsync.ts index 6534c82d..9e4e136c 100644 --- a/src/storages/AbstractSplitsCacheAsync.ts +++ b/src/storages/AbstractSplitsCacheAsync.ts @@ -28,6 +28,14 @@ export abstract class AbstractSplitsCacheAsync implements ISplitsCacheAsync { return Promise.resolve(true); } + /** + * Check if the splits information is already stored in cache. + * Noop, just keeping the interface. This is used by client-side implementations only. + */ + checkCache(): Promise { + return Promise.resolve(false); + } + /** * Kill `name` split and set `defaultTreatment` and `changeNumber`. * Used for SPLIT_KILL push notifications. diff --git a/src/storages/AbstractSplitsCacheSync.ts b/src/storages/AbstractSplitsCacheSync.ts index c0e370ac..ef44db40 100644 --- a/src/storages/AbstractSplitsCacheSync.ts +++ b/src/storages/AbstractSplitsCacheSync.ts @@ -48,6 +48,14 @@ export abstract class AbstractSplitsCacheSync implements ISplitsCacheSync { abstract clear(): void + /** + * Check if the splits information is already stored in cache. This data can be preloaded. + * It is used as condition to emit SDK_SPLITS_CACHE_LOADED, and then SDK_READY_FROM_CACHE. + */ + checkCache(): boolean { + return false; + } + /** * Kill `name` split and set `defaultTreatment` and `changeNumber`. * Used for SPLIT_KILL push notifications. diff --git a/src/storages/inLocalStorage/SplitsCacheInLocal.ts b/src/storages/inLocalStorage/SplitsCacheInLocal.ts index 66d8869a..ccd4859f 100644 --- a/src/storages/inLocalStorage/SplitsCacheInLocal.ts +++ b/src/storages/inLocalStorage/SplitsCacheInLocal.ts @@ -217,6 +217,15 @@ export class SplitsCacheInLocal extends AbstractSplitsCacheSync { } } + /** + * Check if the splits information is already stored in browser LocalStorage. + * In this function we could add more code to check if the data is valid. + * @override + */ + checkCache(): boolean { + return this.getChangeNumber() > -1; + } + /** * Clean Splits cache if its `lastUpdated` timestamp is older than the given `expirationTimestamp`, * @@ -241,7 +250,7 @@ export class SplitsCacheInLocal extends AbstractSplitsCacheSync { this.updateNewFilter = true; // if there is cache, clear it - if (this.getChangeNumber() > -1) this.clear(); + if (this.checkCache()) this.clear(); } catch (e) { this.log.error(LOG_PREFIX + e); diff --git a/src/storages/inLocalStorage/__tests__/SplitsCacheInLocal.spec.ts b/src/storages/inLocalStorage/__tests__/SplitsCacheInLocal.spec.ts index 4205d7a6..732ca8b7 100644 --- a/src/storages/inLocalStorage/__tests__/SplitsCacheInLocal.spec.ts +++ b/src/storages/inLocalStorage/__tests__/SplitsCacheInLocal.spec.ts @@ -31,10 +31,16 @@ test('SPLIT CACHE / LocalStorage', () => { expect(cache.getSplit('lol1')).toEqual(null); expect(cache.getSplit('lol2')).toEqual(somethingElse); + expect(cache.checkCache()).toBe(false); // checkCache should return false until localstorage has data. + expect(cache.getChangeNumber() === -1).toBe(true); + expect(cache.checkCache()).toBe(false); // checkCache should return false until localstorage has data. + cache.setChangeNumber(123); + expect(cache.checkCache()).toBe(true); // checkCache should return true once localstorage has data. + expect(cache.getChangeNumber() === 123).toBe(true); }); diff --git a/src/storages/inLocalStorage/index.ts b/src/storages/inLocalStorage/index.ts index 7360d4e3..63c14f3b 100644 --- a/src/storages/inLocalStorage/index.ts +++ b/src/storages/inLocalStorage/index.ts @@ -12,7 +12,7 @@ import { SplitsCacheInMemory } from '../inMemory/SplitsCacheInMemory'; import { DEFAULT_CACHE_EXPIRATION_IN_MILLIS } from '../../utils/constants/browser'; import { InMemoryStorageCSFactory } from '../inMemory/InMemoryStorageCS'; import { LOG_PREFIX } from './constants'; -import { DEBUG, LOCALHOST_MODE, NONE, STORAGE_LOCALSTORAGE } from '../../utils/constants'; +import { DEBUG, NONE, STORAGE_LOCALSTORAGE } from '../../utils/constants'; import { shouldRecordTelemetry, TelemetryCacheInMemory } from '../inMemory/TelemetryCacheInMemory'; import { UniqueKeysCacheInMemoryCS } from '../inMemory/UniqueKeysCacheInMemoryCS'; import { getMatching } from '../../utils/key'; @@ -36,7 +36,7 @@ export function InLocalStorage(options: InLocalStorageOptions = {}): IStorageSyn return InMemoryStorageCSFactory(params); } - const { onReadyFromCacheCb, settings, settings: { log, scheduler: { impressionsQueueSize, eventsQueueSize, }, sync: { impressionsMode, __splitFiltersValidation } } } = params; + const { settings, settings: { log, scheduler: { impressionsQueueSize, eventsQueueSize, }, sync: { impressionsMode, __splitFiltersValidation } } } = params; const matchingKey = getMatching(settings.core.key); const keys = new KeyBuilderCS(prefix, matchingKey); const expirationTimestamp = Date.now() - DEFAULT_CACHE_EXPIRATION_IN_MILLIS; @@ -55,12 +55,6 @@ export function InLocalStorage(options: InLocalStorageOptions = {}): IStorageSyn telemetry: shouldRecordTelemetry(params) ? new TelemetryCacheInMemory(splits, segments) : undefined, uniqueKeys: impressionsMode === NONE ? new UniqueKeysCacheInMemoryCS() : undefined, - init() { - if (settings.mode === LOCALHOST_MODE || splits.getChangeNumber() > -1) { - Promise.resolve().then(onReadyFromCacheCb); - } - }, - destroy() { this.splits = new SplitsCacheInMemory(__splitFiltersValidation); this.segments = new MySegmentsCacheInMemory(); diff --git a/src/storages/types.ts b/src/storages/types.ts index 61ab10f2..c7f84258 100644 --- a/src/storages/types.ts +++ b/src/storages/types.ts @@ -208,6 +208,8 @@ export interface ISplitsCacheBase { // only for Client-Side. Returns true if the storage is not synchronized yet (getChangeNumber() === -1) or contains a FF using segments or large segments usesSegments(): MaybeThenable, clear(): MaybeThenable, + // should never reject or throw an exception. Instead return false by default, to avoid emitting SDK_READY_FROM_CACHE. + checkCache(): MaybeThenable, killLocally(name: string, defaultTreatment: string, changeNumber: number): MaybeThenable, getNamesByFlagSets(flagSets: string[]): MaybeThenable[]> } @@ -224,6 +226,7 @@ export interface ISplitsCacheSync extends ISplitsCacheBase { trafficTypeExists(trafficType: string): boolean, usesSegments(): boolean, clear(): void, + checkCache(): boolean, killLocally(name: string, defaultTreatment: string, changeNumber: number): boolean, getNamesByFlagSets(flagSets: string[]): ISet[] } @@ -240,6 +243,7 @@ export interface ISplitsCacheAsync extends ISplitsCacheBase { trafficTypeExists(trafficType: string): Promise, usesSegments(): Promise, clear(): Promise, + checkCache(): Promise, killLocally(name: string, defaultTreatment: string, changeNumber: number): Promise, getNamesByFlagSets(flagSets: string[]): Promise[]> } @@ -501,7 +505,6 @@ export interface IStorageFactoryParams { * It is meant for emitting SDK_READY event in consumer mode, and waiting before using the storage in the synchronizer. */ onReadyCb: (error?: any) => void, - onReadyFromCacheCb: (error?: any) => void, } export type StorageType = 'MEMORY' | 'LOCALSTORAGE' | 'REDIS' | 'PLUGGABLE'; diff --git a/src/sync/offline/syncTasks/fromObjectSyncTask.ts b/src/sync/offline/syncTasks/fromObjectSyncTask.ts index d555552b..84805110 100644 --- a/src/sync/offline/syncTasks/fromObjectSyncTask.ts +++ b/src/sync/offline/syncTasks/fromObjectSyncTask.ts @@ -7,7 +7,7 @@ import { syncTaskFactory } from '../../syncTask'; import { ISyncTask } from '../../types'; import { ISettings } from '../../../types'; import { CONTROL } from '../../../utils/constants'; -import { SDK_SPLITS_ARRIVED, SDK_SEGMENTS_ARRIVED } from '../../../readiness/constants'; +import { SDK_SPLITS_ARRIVED, SDK_SEGMENTS_ARRIVED, SDK_SPLITS_CACHE_LOADED } from '../../../readiness/constants'; import { SYNC_OFFLINE_DATA, ERROR_SYNC_OFFLINE_LOADING } from '../../../logger/constants'; /** @@ -60,8 +60,12 @@ export function fromObjectUpdaterFactory( if (startingUp) { startingUp = false; - // Emits SDK_READY - readiness.segments.emit(SDK_SEGMENTS_ARRIVED); + Promise.resolve(splitsCache.checkCache()).then(cacheReady => { + // Emits SDK_READY_FROM_CACHE + if (cacheReady) readiness.splits.emit(SDK_SPLITS_CACHE_LOADED); + // Emits SDK_READY + readiness.segments.emit(SDK_SEGMENTS_ARRIVED); + }); } return true; }); diff --git a/src/sync/polling/updaters/splitChangesUpdater.ts b/src/sync/polling/updaters/splitChangesUpdater.ts index 29f023cf..669a2010 100644 --- a/src/sync/polling/updaters/splitChangesUpdater.ts +++ b/src/sync/polling/updaters/splitChangesUpdater.ts @@ -4,7 +4,7 @@ import { ISplitChangesFetcher } from '../fetchers/types'; import { ISplit, ISplitChangesResponse, ISplitFiltersValidation } from '../../../dtos/types'; import { ISplitsEventEmitter } from '../../../readiness/types'; import { timeout } from '../../../utils/promise/timeout'; -import { SDK_SPLITS_ARRIVED } from '../../../readiness/constants'; +import { SDK_SPLITS_ARRIVED, SDK_SPLITS_CACHE_LOADED } from '../../../readiness/constants'; import { ILogger } from '../../../logger/types'; import { SYNC_SPLITS_FETCH, SYNC_SPLITS_NEW, SYNC_SPLITS_REMOVED, SYNC_SPLITS_SEGMENTS, SYNC_SPLITS_FETCH_FAILS, SYNC_SPLITS_FETCH_RETRY } from '../../../logger/constants'; import { startsWith } from '../../../utils/lang'; @@ -153,8 +153,7 @@ export function splitChangesUpdaterFactory( */ function _splitChangesUpdater(since: number, retry = 0): Promise { log.debug(SYNC_SPLITS_FETCH, [since]); - - return Promise.resolve(splitUpdateNotification ? + const fetcherPromise = Promise.resolve(splitUpdateNotification ? { splits: [splitUpdateNotification.payload], till: splitUpdateNotification.changeNumber } : splitChangesFetcher(since, noCache, till, _promiseDecorator) ) @@ -201,6 +200,15 @@ export function splitChangesUpdaterFactory( } return false; }); + + // After triggering the requests, if we have cached splits information let's notify that to emit SDK_READY_FROM_CACHE. + // Wrapping in a promise since checkCache can be async. + if (splitsEventEmitter && startingUp) { + Promise.resolve(splits.checkCache()).then(isCacheReady => { + if (isCacheReady) splitsEventEmitter.emit(SDK_SPLITS_CACHE_LOADED); + }); + } + return fetcherPromise; } let sincePromise = Promise.resolve(splits.getChangeNumber()); // `getChangeNumber` never rejects or throws error diff --git a/src/utils/settingsValidation/storage/__tests__/storageCS.spec.ts b/src/utils/settingsValidation/storage/__tests__/storageCS.spec.ts index 88e078a0..5bd7c389 100644 --- a/src/utils/settingsValidation/storage/__tests__/storageCS.spec.ts +++ b/src/utils/settingsValidation/storage/__tests__/storageCS.spec.ts @@ -1,4 +1,4 @@ -import { validateStorageCS } from '../storageCS'; +import { validateStorageCS, __InLocalStorageMockFactory } from '../storageCS'; import { InMemoryStorageCSFactory } from '../../../../storages/inMemory/InMemoryStorageCS'; import { loggerMock as log } from '../../../../logger/__tests__/sdkLogger.mock'; @@ -32,6 +32,11 @@ describe('storage validator for pluggable storage (client-side)', () => { expect(log.error).not.toBeCalled(); }); + test('fallbacks to mock InLocalStorage storage if the storage is InLocalStorage and the mode localhost', () => { + expect(validateStorageCS({ log, mode: 'localhost', storage: mockInLocalStorageFactory })).toBe(__InLocalStorageMockFactory); + expect(log.error).not.toBeCalled(); + }); + test('throws error if the provided storage factory is not compatible with the mode', () => { expect(() => { validateStorageCS({ log, mode: 'consumer', storage: mockInLocalStorageFactory }); }).toThrow('A PluggableStorage instance is required on consumer mode'); expect(() => { validateStorageCS({ log, mode: 'consumer_partial', storage: mockInLocalStorageFactory }); }).toThrow('A PluggableStorage instance is required on consumer mode'); diff --git a/src/utils/settingsValidation/storage/storageCS.ts b/src/utils/settingsValidation/storage/storageCS.ts index 43783630..097ce95d 100644 --- a/src/utils/settingsValidation/storage/storageCS.ts +++ b/src/utils/settingsValidation/storage/storageCS.ts @@ -3,6 +3,14 @@ import { ISettings, SDKMode } from '../../../types'; import { ILogger } from '../../../logger/types'; import { ERROR_STORAGE_INVALID } from '../../../logger/constants'; import { LOCALHOST_MODE, STANDALONE_MODE, STORAGE_PLUGGABLE, STORAGE_LOCALSTORAGE, STORAGE_MEMORY } from '../../../utils/constants'; +import { IStorageFactoryParams, IStorageSync } from '../../../storages/types'; + +export function __InLocalStorageMockFactory(params: IStorageFactoryParams): IStorageSync { + const result = InMemoryStorageCSFactory(params); + result.splits.checkCache = () => true; // to emit SDK_READY_FROM_CACHE + return result; +} +__InLocalStorageMockFactory.type = STORAGE_MEMORY; /** * This function validates `settings.storage` object @@ -22,6 +30,11 @@ export function validateStorageCS(settings: { log: ILogger, storage?: any, mode: log.error(ERROR_STORAGE_INVALID); } + // In localhost mode with InLocalStorage, fallback to a mock InLocalStorage to emit SDK_READY_FROM_CACHE + if (mode === LOCALHOST_MODE && storage.type === STORAGE_LOCALSTORAGE) { + return __InLocalStorageMockFactory; + } + if ([LOCALHOST_MODE, STANDALONE_MODE].indexOf(mode) === -1) { // Consumer modes require an async storage if (storage.type !== STORAGE_PLUGGABLE) throw new Error('A PluggableStorage instance is required on consumer mode'); From 476013248b165fb995780c264ba6e1bf76f44115 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Fri, 18 Oct 2024 11:29:55 -0300 Subject: [PATCH 113/146] Rename internal var --- src/sdkFactory/index.ts | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/sdkFactory/index.ts b/src/sdkFactory/index.ts index ee790794..86065c5f 100644 --- a/src/sdkFactory/index.ts +++ b/src/sdkFactory/index.ts @@ -30,11 +30,11 @@ export function sdkFactory(params: ISdkFactoryParams): SplitIO.ICsSDK | SplitIO. // On non-recoverable errors, we should mark the SDK as destroyed and not start synchronization. // initialization - let isInit = false; + let hasInit = false; const initCallbacks: (() => void)[] = []; function whenInit(cb: () => void) { - if (isInit) cb(); + if (hasInit) cb(); else initCallbacks.push(cb); } @@ -93,8 +93,8 @@ export function sdkFactory(params: ISdkFactoryParams): SplitIO.ICsSDK | SplitIO. function init() { - if (isInit) return; - isInit = true; + if (hasInit) return; + hasInit = true; // We will just log and allow for the SDK to end up throwing an SDK_TIMEOUT event for devs to handle. validateAndTrackApiKey(log, settings.core.authorizationKey); From 57ba406d6c45d7e32b66ed0bcf489d563de72e2d Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Fri, 18 Oct 2024 11:43:56 -0300 Subject: [PATCH 114/146] Revert lazy init in storage --- src/sdkFactory/index.ts | 1 - src/storages/inRedis/RedisAdapter.ts | 2 +- .../pluggable/__tests__/index.spec.ts | 4 -- src/storages/pluggable/index.ts | 71 +++++++++---------- src/storages/types.ts | 1 - 5 files changed, 34 insertions(+), 45 deletions(-) diff --git a/src/sdkFactory/index.ts b/src/sdkFactory/index.ts index 86065c5f..4bfe62e6 100644 --- a/src/sdkFactory/index.ts +++ b/src/sdkFactory/index.ts @@ -99,7 +99,6 @@ export function sdkFactory(params: ISdkFactoryParams): SplitIO.ICsSDK | SplitIO. // We will just log and allow for the SDK to end up throwing an SDK_TIMEOUT event for devs to handle. validateAndTrackApiKey(log, settings.core.authorizationKey); readiness.init(); - storage.init && storage.init(); uniqueKeysTracker && uniqueKeysTracker.start(); syncManager && syncManager.start(); signalListener && signalListener.start(); diff --git a/src/storages/inRedis/RedisAdapter.ts b/src/storages/inRedis/RedisAdapter.ts index 5a800ec5..6d738606 100644 --- a/src/storages/inRedis/RedisAdapter.ts +++ b/src/storages/inRedis/RedisAdapter.ts @@ -20,7 +20,7 @@ const DEFAULT_OPTIONS = { const DEFAULT_LIBRARY_OPTIONS = { enableOfflineQueue: false, connectTimeout: DEFAULT_OPTIONS.connectionTimeout, - lazyConnect: false // @TODO true to avoid side-effects on instantiation. + lazyConnect: false }; interface IRedisCommand { diff --git a/src/storages/pluggable/__tests__/index.spec.ts b/src/storages/pluggable/__tests__/index.spec.ts index 98f5622e..a0f32b1d 100644 --- a/src/storages/pluggable/__tests__/index.spec.ts +++ b/src/storages/pluggable/__tests__/index.spec.ts @@ -28,7 +28,6 @@ describe('PLUGGABLE STORAGE', () => { test('creates a storage instance', async () => { const storageFactory = PluggableStorage({ prefix, wrapper: wrapperMock }); const storage = storageFactory(internalSdkParams); - storage.init(); assertStorageInterface(storage); // the instance must implement the storage interface expect(wrapperMock.connect).toBeCalledTimes(1); // wrapper connect method should be called once when storage is created @@ -75,7 +74,6 @@ describe('PLUGGABLE STORAGE', () => { test('creates a storage instance for partial consumer mode (events and impressions cache in memory)', async () => { const storageFactory = PluggableStorage({ prefix, wrapper: wrapperMock }); const storage = storageFactory({ ...internalSdkParams, settings: { ...internalSdkParams.settings, mode: CONSUMER_PARTIAL_MODE } }); - storage.init(); assertStorageInterface(storage); expect(wrapperMock.connect).toBeCalledTimes(1); @@ -104,7 +102,6 @@ describe('PLUGGABLE STORAGE', () => { // Create storage instance. Wrapper is pollute but doesn't have filter query key, so it should clear the cache await new Promise(resolve => { storage = storageFactory({ onReadyCb: resolve, settings: { ...fullSettings, mode: undefined } }); - storage.init(); }); // Assert that expected caches are present @@ -124,7 +121,6 @@ describe('PLUGGABLE STORAGE', () => { // Create storage instance. This time the wrapper has the current filter query key, so it should not clear the cache await new Promise(resolve => { storage = storageFactory({ onReadyCb: resolve, settings: { ...fullSettings, mode: undefined } }); - storage.init(); }); // Assert that cache was not cleared diff --git a/src/storages/pluggable/index.ts b/src/storages/pluggable/index.ts index 09bf2e45..60350d66 100644 --- a/src/storages/pluggable/index.ts +++ b/src/storages/pluggable/index.ts @@ -1,4 +1,4 @@ -import { IPluggableStorageWrapper, IStorageAsyncFactory, IStorageFactoryParams, ITelemetryCacheAsync } from '../types'; +import { IPluggableStorageWrapper, IStorageAsync, IStorageAsyncFactory, IStorageFactoryParams, ITelemetryCacheAsync } from '../types'; import { KeyBuilderSS } from '../KeyBuilderSS'; import { SplitsCachePluggable } from './SplitsCachePluggable'; @@ -62,12 +62,11 @@ export function PluggableStorage(options: PluggableStorageOptions): IStorageAsyn const prefix = validatePrefix(options.prefix); - function PluggableStorageFactory(params: IStorageFactoryParams) { + function PluggableStorageFactory(params: IStorageFactoryParams): IStorageAsync { const { onReadyCb, settings, settings: { log, mode, sync: { impressionsMode }, scheduler: { impressionsQueueSize, eventsQueueSize } } } = params; const metadata = metadataBuilder(settings); const keys = new KeyBuilderSS(prefix, metadata); const wrapper = wrapperAdapter(log, options.wrapper); - let connectPromise: Promise; const isSyncronizer = mode === undefined; // If mode is not defined, the synchronizer is running const isPartialConsumer = mode === CONSUMER_PARTIAL_MODE; @@ -90,6 +89,35 @@ export function PluggableStorage(options: PluggableStorageOptions): IStorageAsyn new UniqueKeysCachePluggable(log, keys.buildUniqueKeysKey(), wrapper) : undefined; + // Connects to wrapper and emits SDK_READY event on main client + const connectPromise = wrapper.connect().then(() => { + if (isSyncronizer) { + // In standalone or producer mode, clear storage if SDK key or feature flag filter has changed + return wrapper.get(keys.buildHashKey()).then((hash) => { + const currentHash = getStorageHash(settings); + if (hash !== currentHash) { + log.info(LOG_PREFIX + 'Storage HASH has changed (SDK key, flags filter criteria or flags spec version was modified). Clearing cache'); + return wrapper.getKeysByPrefix(`${keys.prefix}.`).then(storageKeys => { + return Promise.all(storageKeys.map(storageKey => wrapper.del(storageKey))); + }).then(() => wrapper.set(keys.buildHashKey(), currentHash)); + } + }).then(() => { + onReadyCb(); + }); + } else { + // Start periodic flush of async storages if not running synchronizer (producer mode) + if (impressionCountsCache && (impressionCountsCache as ImpressionCountsCachePluggable).start) (impressionCountsCache as ImpressionCountsCachePluggable).start(); + if (uniqueKeysCache && (uniqueKeysCache as UniqueKeysCachePluggable).start) (uniqueKeysCache as UniqueKeysCachePluggable).start(); + if (telemetry && (telemetry as ITelemetryCacheAsync).recordConfig) (telemetry as ITelemetryCacheAsync).recordConfig(); + + onReadyCb(); + } + }).catch((e) => { + e = e || new Error('Error connecting wrapper'); + onReadyCb(e); + return e; // Propagate error for shared clients + }); + return { splits: new SplitsCachePluggable(log, keys, wrapper, settings.sync.__splitFiltersValidation), segments: new SegmentsCachePluggable(log, keys, wrapper), @@ -99,39 +127,6 @@ export function PluggableStorage(options: PluggableStorageOptions): IStorageAsyn telemetry, uniqueKeys: uniqueKeysCache, - init() { - if (connectPromise) return connectPromise; - - // Connects to wrapper and emits SDK_READY event on main client - return connectPromise = wrapper.connect().then(() => { - if (isSyncronizer) { - // In standalone or producer mode, clear storage if SDK key or feature flag filter has changed - return wrapper.get(keys.buildHashKey()).then((hash) => { - const currentHash = getStorageHash(settings); - if (hash !== currentHash) { - log.info(LOG_PREFIX + 'Storage HASH has changed (SDK key, flags filter criteria or flags spec version was modified). Clearing cache'); - return wrapper.getKeysByPrefix(`${keys.prefix}.`).then(storageKeys => { - return Promise.all(storageKeys.map(storageKey => wrapper.del(storageKey))); - }).then(() => wrapper.set(keys.buildHashKey(), currentHash)); - } - }).then(() => { - onReadyCb(); - }); - } else { - // Start periodic flush of async storages if not running synchronizer (producer mode) - if (impressionCountsCache && (impressionCountsCache as ImpressionCountsCachePluggable).start) (impressionCountsCache as ImpressionCountsCachePluggable).start(); - if (uniqueKeysCache && (uniqueKeysCache as UniqueKeysCachePluggable).start) (uniqueKeysCache as UniqueKeysCachePluggable).start(); - if (telemetry && (telemetry as ITelemetryCacheAsync).recordConfig) (telemetry as ITelemetryCacheAsync).recordConfig(); - - onReadyCb(); - } - }).catch((e) => { - e = e || new Error('Error connecting wrapper'); - onReadyCb(e); - return e; // Propagate error for shared clients - }); - }, - // Stop periodic flush and disconnect the underlying storage destroy() { return Promise.all(isSyncronizer ? [] : [ @@ -141,8 +136,8 @@ export function PluggableStorage(options: PluggableStorageOptions): IStorageAsyn }, // emits SDK_READY event on shared clients and returns a reference to the storage - shared(_: string, onReadyCb: (error?: any) => void) { - this.init().then(onReadyCb); + shared(_, onReadyCb) { + connectPromise.then(onReadyCb); return { ...this, diff --git a/src/storages/types.ts b/src/storages/types.ts index 2cce9298..1daa81fb 100644 --- a/src/storages/types.ts +++ b/src/storages/types.ts @@ -460,7 +460,6 @@ export interface IStorageBase< events: TEventsCache, telemetry?: TTelemetryCache, uniqueKeys?: TUniqueKeysCache, - init?: () => void | Promise, destroy(): void | Promise, shared?: (matchingKey: string, onReadyCb: (error?: any) => void) => this } From f6e492215b3b1e544d0c376da09afedac1858e7b Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Fri, 18 Oct 2024 12:54:23 -0300 Subject: [PATCH 115/146] Update changelog entry --- CHANGES.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGES.txt b/CHANGES.txt index a6c1ed8b..4af451c9 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -1,7 +1,7 @@ 2.0.0 (October XX, 2024) - Added support for targeting rules based on large segments. - Added `factory.destroy()` method, which invokes the `destroy` method on all SDK clients created by the factory. - - Updated the handling of timers and async operations by moving them into an `init` factory method to enable lazy initialization of the SDK. This update is intended for the React SDK. + - Updated the handling of timers and async operations inside an `init` factory method to enable lazy initialization of the SDK in standalone mode. This update is intended for the React SDK. - Bugfixing - Fixed an issue with the server-side polling manager that caused dangling timers when the SDK was destroyed before it was ready. - BREAKING CHANGES: - Updated default flag spec version to 1.2. From 411ce8f7d4e80cb88ce071ca94fe21ddf4970032 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Fri, 18 Oct 2024 13:20:07 -0300 Subject: [PATCH 116/146] Fix lazy start on syncManagerOffline --- package-lock.json | 4 +-- package.json | 2 +- src/sync/offline/syncManagerOffline.ts | 34 ++++++++++++++++---------- src/sync/syncManagerOnline.ts | 2 -- 4 files changed, 24 insertions(+), 18 deletions(-) diff --git a/package-lock.json b/package-lock.json index de54ab44..7d9e3847 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "@splitsoftware/splitio-commons", - "version": "1.17.1-rc.3", + "version": "1.17.1-rc.4", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "@splitsoftware/splitio-commons", - "version": "1.17.1-rc.3", + "version": "1.17.1-rc.4", "license": "Apache-2.0", "dependencies": { "tslib": "^2.3.1" diff --git a/package.json b/package.json index f09e09f7..92de0749 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@splitsoftware/splitio-commons", - "version": "1.17.1-rc.3", + "version": "1.17.1-rc.4", "description": "Split JavaScript SDK common components", "main": "cjs/index.js", "module": "esm/index.js", diff --git a/src/sync/offline/syncManagerOffline.ts b/src/sync/offline/syncManagerOffline.ts index cd5c435d..cd3f0bcd 100644 --- a/src/sync/offline/syncManagerOffline.ts +++ b/src/sync/offline/syncManagerOffline.ts @@ -1,4 +1,4 @@ -import { ISyncManager, ISyncManagerCS } from '../types'; +import { ISyncManagerCS } from '../types'; import { fromObjectSyncTaskFactory } from './syncTasks/fromObjectSyncTask'; import { objectAssign } from '../../utils/lang/objectAssign'; import { ISplitsParser } from './splitsParser/types'; @@ -29,26 +29,34 @@ export function syncManagerOfflineFactory( storage, }: ISdkFactoryContextSync): ISyncManagerCS { + const mainSyncManager = fromObjectSyncTaskFactory(splitsParserFactory(), storage, readiness, settings); + const mainStart = mainSyncManager.start; + const sharedStarts: Array<() => void> = []; + return objectAssign( - fromObjectSyncTaskFactory(splitsParserFactory(), storage, readiness, settings), + mainSyncManager, { + start() { + mainStart(); + sharedStarts.forEach(cb => cb()); + sharedStarts.length = 0; + }, // fake flush, that resolves immediately flush, // [Only used for client-side] - shared(matchingKey: string, readinessManager: IReadinessManager): ISyncManager { + shared(matchingKey: string, readinessManager: IReadinessManager) { + // In LOCALHOST mode, shared clients are ready in the next event-loop cycle than created + // SDK_READY cannot be emitted directly because this will not update the readiness status + function emitSdkReady() { + readinessManager.segments.emit(SDK_SEGMENTS_ARRIVED); // SDK_SPLITS_ARRIVED emitted by main SyncManager + } + + if (mainSyncManager.isRunning()) setTimeout(emitSdkReady); + else sharedStarts.push(emitSdkReady); + return { - start() { - // In LOCALHOST mode, shared clients are ready in the next event-loop cycle than created - // SDK_READY cannot be emitted directly because this will not update the readiness status - setTimeout(() => { - readinessManager.segments.emit(SDK_SEGMENTS_ARRIVED); // SDK_SPLITS_ARRIVED emitted by main SyncManager - }, 0); - }, stop() { }, - isRunning() { - return true; - }, flush, }; } diff --git a/src/sync/syncManagerOnline.ts b/src/sync/syncManagerOnline.ts index b3c4aad7..9dfda547 100644 --- a/src/sync/syncManagerOnline.ts +++ b/src/sync/syncManagerOnline.ts @@ -164,8 +164,6 @@ export function syncManagerOnlineFactory( } return { - isRunning: mySegmentsSyncTask.isRunning, - stop() { // check in case `client.destroy()` has been invoked more than once for the same client const mySegmentsSyncTask = (pollingManager as IPollingManagerCS).get(matchingKey); From 18b32439fee98534bc0ee15a5f90ef4aee476c52 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Fri, 18 Oct 2024 17:29:53 -0300 Subject: [PATCH 117/146] rc --- package-lock.json | 4 ++-- package.json | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/package-lock.json b/package-lock.json index 9a90a643..8bc19544 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "@splitsoftware/splitio-commons", - "version": "2.0.0-rc.0", + "version": "2.0.0-rc.1", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "@splitsoftware/splitio-commons", - "version": "2.0.0-rc.0", + "version": "2.0.0-rc.1", "license": "Apache-2.0", "dependencies": { "tslib": "^2.3.1" diff --git a/package.json b/package.json index f5818af3..870e561c 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@splitsoftware/splitio-commons", - "version": "2.0.0-rc.0", + "version": "2.0.0-rc.1", "description": "Split JavaScript SDK common components", "main": "cjs/index.js", "module": "esm/index.js", From af7981f7f43708acf35eeef84906cae45436269b Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Fri, 18 Oct 2024 18:11:47 -0300 Subject: [PATCH 118/146] Polishing --- src/sync/polling/updaters/segmentChangesUpdater.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/sync/polling/updaters/segmentChangesUpdater.ts b/src/sync/polling/updaters/segmentChangesUpdater.ts index a643826b..770f68c6 100644 --- a/src/sync/polling/updaters/segmentChangesUpdater.ts +++ b/src/sync/polling/updaters/segmentChangesUpdater.ts @@ -38,7 +38,7 @@ export function segmentChangesUpdaterFactory( segmentChangesFetcher(since, segmentName, noCache, till).then((changes) => { return Promise.all(changes.map(x => { log.debug(`${LOG_PREFIX_SYNC_SEGMENTS}Processing ${segmentName} with till = ${x.till}. Added: ${x.added.length}. Removed: ${x.removed.length}`); - return segments.update(x.name, x.added, x.removed, x.till); + return segments.update(segmentName, x.added, x.removed, x.till); })).then((updates) => { return updates.some(update => update); }); From 9c381e17f70a55c8d4232b1ba73651593edd03c4 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Fri, 25 Oct 2024 02:31:45 -0300 Subject: [PATCH 119/146] Remove sync.localhostMode option and validation utils --- CHANGES.txt | 1 + src/logger/constants.ts | 1 - src/logger/messages/error.ts | 1 - src/sync/offline/LocalhostFromObject.ts | 10 +-- src/types.ts | 13 +--- .../__tests__/settings.mocks.ts | 2 - src/utils/settingsValidation/index.ts | 5 +- .../localhost/__tests__/index.spec.ts | 62 ------------------- .../settingsValidation/localhost/builtin.ts | 16 ----- .../settingsValidation/localhost/pluggable.ts | 22 ------- src/utils/settingsValidation/types.ts | 2 - 11 files changed, 5 insertions(+), 130 deletions(-) delete mode 100644 src/utils/settingsValidation/localhost/__tests__/index.spec.ts delete mode 100644 src/utils/settingsValidation/localhost/builtin.ts delete mode 100644 src/utils/settingsValidation/localhost/pluggable.ts diff --git a/CHANGES.txt b/CHANGES.txt index d2b60b08..c1852500 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -11,6 +11,7 @@ - Removed the migration logic for the old format of MySegments keys in LocalStorage introduced in JavaScript SDK v10.17.3. - Removed the `sdkClientMethodCSWithTT` function, which handled the logic to bound an optional traffic type to SDK clients. Client-side SDK implementations must use `sdkClientMethodCS` module, which, unlike the previous function, does not allow passing a traffic type but simplifies the SDK API. - Removed internal ponyfills for `Map` and `Set` global objects, dropping support for IE and other outdated browsers. The SDK now requires the runtime environment to support these features natively or to provide a polyfill. + - Removed the `sync.localhostMode` configuration option to plug the LocalhostMode module. 1.17.0 (September 6, 2024) - Added `sync.requestOptions.getHeaderOverrides` configuration option to enhance SDK HTTP request Headers for Authorization Frameworks. diff --git a/src/logger/constants.ts b/src/logger/constants.ts index 293def7e..520a5707 100644 --- a/src/logger/constants.ts +++ b/src/logger/constants.ts @@ -109,7 +109,6 @@ export const ERROR_EMPTY = 319; export const ERROR_EMPTY_ARRAY = 320; export const ERROR_INVALID_CONFIG_PARAM = 321; export const ERROR_HTTP = 322; -export const ERROR_LOCALHOST_MODULE_REQUIRED = 323; export const ERROR_STORAGE_INVALID = 324; export const ERROR_NOT_BOOLEAN = 325; export const ERROR_MIN_CONFIG_PARAM = 326; diff --git a/src/logger/messages/error.ts b/src/logger/messages/error.ts index ca02daf0..2c0b0c63 100644 --- a/src/logger/messages/error.ts +++ b/src/logger/messages/error.ts @@ -32,7 +32,6 @@ export const codesError: [number, string][] = [ [c.ERROR_NOT_BOOLEAN, '%s: provided param must be a boolean value.'], // initialization / settings validation [c.ERROR_INVALID_CONFIG_PARAM, c.LOG_PREFIX_SETTINGS + ': you passed an invalid "%s" config param. It should be one of the following values: %s. Defaulting to "%s".'], - [c.ERROR_LOCALHOST_MODULE_REQUIRED, c.LOG_PREFIX_SETTINGS + ': an invalid value was received for "sync.localhostMode" config. A valid entity should be provided for localhost mode.'], [c.ERROR_STORAGE_INVALID, c.LOG_PREFIX_SETTINGS+': the provided storage is invalid.%s Falling back into default MEMORY storage'], [c.ERROR_MIN_CONFIG_PARAM, c.LOG_PREFIX_SETTINGS + ': the provided "%s" config param is lower than allowed. Setting to the minimum value %s seconds'], [c.ERROR_TOO_MANY_SETS, c.LOG_PREFIX_SETTINGS + ': the amount of flag sets provided are big causing uri length error.'], diff --git a/src/sync/offline/LocalhostFromObject.ts b/src/sync/offline/LocalhostFromObject.ts index b823e7ee..bd3d2209 100644 --- a/src/sync/offline/LocalhostFromObject.ts +++ b/src/sync/offline/LocalhostFromObject.ts @@ -1,12 +1,6 @@ import { splitsParserFromSettingsFactory } from './splitsParser/splitsParserFromSettings'; import { syncManagerOfflineFactory } from './syncManagerOffline'; -import { SplitIO } from '../../types'; -// Singleton instance of the factory function for offline SyncManager from object (a.k.a. localhostFromObject) +// Singleton instance of the factory function for offline SyncManager from object // SDK instances instantiate their SyncManagers with the same factory -const localhostFromObject = syncManagerOfflineFactory(splitsParserFromSettingsFactory) as SplitIO.LocalhostFactory; -localhostFromObject.type = 'LocalhostFromObject'; - -export function LocalhostFromObject(): SplitIO.LocalhostFactory { - return localhostFromObject; -} +export const localhostFromObjectFactory = syncManagerOfflineFactory(splitsParserFromSettingsFactory); diff --git a/src/types.ts b/src/types.ts index 3647b804..d221df57 100644 --- a/src/types.ts +++ b/src/types.ts @@ -1,11 +1,8 @@ +/* eslint-disable no-use-before-define */ import { ISplitFiltersValidation } from './dtos/types'; import { IIntegration, IIntegrationFactoryParams } from './integrations/types'; import { ILogger } from './logger/types'; -import { ISdkFactoryContext } from './sdkFactory/types'; -/* eslint-disable no-use-before-define */ - import { IStorageFactoryParams, IStorageSync, IStorageAsync, IStorageSyncFactory, IStorageAsyncFactory } from './storages/types'; -import { ISyncManagerCS } from './sync/types'; /** * Reduced version of NodeJS.EventEmitter interface with the minimal methods used by the SDK @@ -116,7 +113,6 @@ export interface ISettings { splitFilters: SplitIO.SplitFilter[], impressionsMode: SplitIO.ImpressionsMode, __splitFiltersValidation: ISplitFiltersValidation, - localhostMode?: SplitIO.LocalhostFactory, enabled: boolean, flagSpecVersion: string, requestOptions?: { @@ -666,13 +662,6 @@ export namespace SplitIO { * @typedef {Promise} SplitNamesAsync */ export type SplitNamesAsync = Promise; - /** - * Localhost mode factory. - */ - export type LocalhostFactory = { - type: 'LocalhostFromObject' | 'LocalhostFromFile' - (params: ISdkFactoryContext): ISyncManagerCS - } /** * Impression listener interface. This is the interface that needs to be implemented * by the element you provide to the SDK as impression listener. diff --git a/src/utils/settingsValidation/__tests__/settings.mocks.ts b/src/utils/settingsValidation/__tests__/settings.mocks.ts index d4e29e51..a2a3fb14 100644 --- a/src/utils/settingsValidation/__tests__/settings.mocks.ts +++ b/src/utils/settingsValidation/__tests__/settings.mocks.ts @@ -1,7 +1,6 @@ import { InMemoryStorageCSFactory } from '../../../storages/inMemory/InMemoryStorageCS'; import { ISettings } from '../../../types'; import { loggerMock } from '../../../logger/__tests__/sdkLogger.mock'; -import { LocalhostFromObject } from '../../../sync/offline/LocalhostFromObject'; export const settingsWithKey = { core: { @@ -62,7 +61,6 @@ export const fullSettings: ISettings = { sync: { splitFilters: [], impressionsMode: 'OPTIMIZED', - localhostMode: LocalhostFromObject(), __splitFiltersValidation: { validFilters: [], queryString: null, diff --git a/src/utils/settingsValidation/index.ts b/src/utils/settingsValidation/index.ts index 6438f512..98883911 100644 --- a/src/utils/settingsValidation/index.ts +++ b/src/utils/settingsValidation/index.ts @@ -80,7 +80,6 @@ export const base = { splitFilters: undefined, // impressions collection mode impressionsMode: OPTIMIZED, - localhostMode: undefined, enabled: true, flagSpecVersion: FLAG_SPEC_VERSION }, @@ -102,7 +101,7 @@ function fromSecondsToMillis(n: number) { */ export function settingsValidation(config: unknown, validationParams: ISettingsValidationParams) { - const { defaults, runtime, storage, integrations, logger, localhost, consent, flagSpec } = validationParams; + const { defaults, runtime, storage, integrations, logger, consent, flagSpec } = validationParams; // creates a settings object merging base, defaults and config objects. const withDefaults = merge({}, base, defaults, config) as ISettings; @@ -180,8 +179,6 @@ export function settingsValidation(config: unknown, validationParams: ISettingsV // @ts-ignore, modify readonly prop if (integrations) withDefaults.integrations = integrations(withDefaults); - if (localhost) sync.localhostMode = localhost(withDefaults); - // validate push options if (withDefaults.streamingEnabled !== false) { // @ts-ignore, modify readonly prop withDefaults.streamingEnabled = true; diff --git a/src/utils/settingsValidation/localhost/__tests__/index.spec.ts b/src/utils/settingsValidation/localhost/__tests__/index.spec.ts deleted file mode 100644 index 663530dc..00000000 --- a/src/utils/settingsValidation/localhost/__tests__/index.spec.ts +++ /dev/null @@ -1,62 +0,0 @@ -import { validateLocalhost } from '../pluggable'; -import { validateLocalhostWithDefault } from '../builtin'; -import { LocalhostFromObject } from '../../../../sync/offline/LocalhostFromObject'; -import { loggerMock as log } from '../../../../logger/__tests__/sdkLogger.mock'; - -const localhostModeObject = LocalhostFromObject(); - -describe('validateLocalhost, for slim SplitFactory', () => { - - afterEach(() => { - log.error.mockClear(); - }); - - test('if mode is LOCALHOST_MODE and localhostMode object is invalid or not provided, returns undefined and logs an error', () => { - expect(validateLocalhost({ log, sync: {}, mode: 'localhost' })).toBe(undefined); - expect(validateLocalhost({ log, sync: { localhostMode: null }, mode: 'localhost' })).toBe(undefined); - expect(validateLocalhost({ log, sync: { localhostMode: () => { } }, mode: 'localhost' })).toBe(undefined); - expect(log.error).toBeCalledTimes(3); // logs error if provided object is invalid - }); - - test('if mode is LOCALHOST_MODE and localhostMode object is valid, returns the provided object', () => { - expect(validateLocalhost({ log, sync: { localhostMode: localhostModeObject }, mode: 'localhost' })).toBe(localhostModeObject); - expect(log.error).not.toBeCalled(); - }); - - test('if mode is not LOCALHOST_MODE, returns the provided object (it is not validated)', () => { - expect(validateLocalhost({ log, sync: {}, mode: 'standalone' })).toBe(undefined); - expect(validateLocalhost({ log, sync: { localhostMode: 'INVALID_BUT_IGNORED' }, mode: 'standalone' })).toBe('INVALID_BUT_IGNORED'); - expect(log.error).not.toBeCalled(); - }); - -}); - -describe('validateLocalhostWithDefault, for full SplitFactory', () => { - - afterEach(() => { - log.error.mockClear(); - }); - - test('if mode is LOCALHOST_MODE and localhostMode object is not provided, returns default without logging an error', () => { - expect(validateLocalhostWithDefault({ log, sync: {}, mode: 'localhost' })).toBe(localhostModeObject); - expect(validateLocalhostWithDefault({ log, sync: { localhostMode: null }, mode: 'localhost' })).toBe(localhostModeObject); - expect(log.error).not.toBeCalled(); - }); - - test('if mode is LOCALHOST_MODE and localhostMode object is invalid, returns default and logs an error', () => { - expect(validateLocalhostWithDefault({ log, sync: { localhostMode: () => { } }, mode: 'localhost' })).toBe(localhostModeObject); - expect(log.error).toBeCalledTimes(1); // logs error if provided object is invalid - }); - - test('if mode is LOCALHOST_MODE and localhostMode object is valid, returns the provided object', () => { - expect(validateLocalhostWithDefault({ log, sync: { localhostMode: localhostModeObject }, mode: 'localhost' })).toBe(localhostModeObject); - expect(log.error).not.toBeCalled(); - }); - - test('if mode is not LOCALHOST_MODE, returns the provided object or the default one. Provided object is not validated and so no errors are logged', () => { - expect(validateLocalhostWithDefault({ log, sync: {}, mode: 'standalone' })).toBe(localhostModeObject); - expect(validateLocalhostWithDefault({ log, sync: { localhostMode: 'INVALID_BUT_IGNORED' }, mode: 'standalone' })).toBe('INVALID_BUT_IGNORED'); - expect(log.error).not.toBeCalled(); - }); - -}); diff --git a/src/utils/settingsValidation/localhost/builtin.ts b/src/utils/settingsValidation/localhost/builtin.ts deleted file mode 100644 index 35a30145..00000000 --- a/src/utils/settingsValidation/localhost/builtin.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { ILogger } from '../../../logger/types'; -import { SDKMode, } from '../../../types'; -import { LocalhostFromObject } from '../../../sync/offline/LocalhostFromObject'; -import { validateLocalhost } from './pluggable'; - -/** - * This function validates `settings.sync.localhostMode` object - * - * @param {any} settings config object provided by the user to initialize the sdk - * - * @returns {Object} provided localhost mode module at `settings.sync.localhostMode` if valid, or a default LocalhostFromObject instance if not provided or invalid - */ -export function validateLocalhostWithDefault(settings: { log: ILogger, sync: { localhostMode?: any }, mode: SDKMode }) { - if (!settings.sync.localhostMode) return LocalhostFromObject(); - return validateLocalhost(settings) || LocalhostFromObject(); -} diff --git a/src/utils/settingsValidation/localhost/pluggable.ts b/src/utils/settingsValidation/localhost/pluggable.ts deleted file mode 100644 index 3231e612..00000000 --- a/src/utils/settingsValidation/localhost/pluggable.ts +++ /dev/null @@ -1,22 +0,0 @@ -import { ERROR_LOCALHOST_MODULE_REQUIRED } from '../../../logger/constants'; -import { ILogger } from '../../../logger/types'; -import { SDKMode, } from '../../../types'; -import { LOCALHOST_MODE } from '../../constants'; - -/** - * This function validates `settings.sync.localhostMode` object - * - * @param {any} settings config object provided by the user to initialize the sdk - * - * @returns {Object | undefined} provided localhost mode module at `settings.sync.localhostMode`, or undefined if it is not provided or invalid - */ -export function validateLocalhost(settings: { log: ILogger, sync: { localhostMode?: any}, mode: SDKMode }) { - const localhostMode = settings.sync.localhostMode; - - // localhostMode.type is used for internal validation. Not considered part of the public API, and might be updated eventually. - if (settings.mode === LOCALHOST_MODE && (typeof localhostMode !== 'function' || localhostMode.type !== 'LocalhostFromObject')) { - settings.log.error(ERROR_LOCALHOST_MODULE_REQUIRED); - return undefined; - } - return localhostMode; -} diff --git a/src/utils/settingsValidation/types.ts b/src/utils/settingsValidation/types.ts index 32145836..7ced1b33 100644 --- a/src/utils/settingsValidation/types.ts +++ b/src/utils/settingsValidation/types.ts @@ -20,8 +20,6 @@ export interface ISettingsValidationParams { integrations?: (settings: ISettings) => ISettings['integrations'], /** Logger validator (`settings.debug`) */ logger: (settings: ISettings) => ISettings['log'], - /** Localhost mode validator (`settings.sync.localhostMode`) */ - localhost?: (settings: ISettings) => ISettings['sync']['localhostMode'], /** User consent validator (`settings.userConsent`) */ consent?: (settings: ISettings) => ISettings['userConsent'], /** Flag spec version validation. Configurable by the JS Synchronizer but not by the SDKs */ From 35953b753b08df2a0b64bf0bd7f51b50283491e0 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Fri, 25 Oct 2024 12:40:00 -0300 Subject: [PATCH 120/146] 1st step: move SplitIO namespace definition from Browser SDK to JS-Commons --- .gitignore | 1 - package.json | 2 +- temp.ts | 25 + types/index.d.ts | 5 + types/splitio.d.ts | 1553 ++++++++++++++++++++++++++++++++++++++++++++ 5 files changed, 1584 insertions(+), 2 deletions(-) create mode 100644 temp.ts create mode 100644 types/index.d.ts create mode 100644 types/splitio.d.ts diff --git a/.gitignore b/.gitignore index b09085de..34d8005c 100644 --- a/.gitignore +++ b/.gitignore @@ -9,7 +9,6 @@ ## transpiled code /esm /cjs -/types ## coverage info /coverage diff --git a/package.json b/package.json index 870e561c..f85509e2 100644 --- a/package.json +++ b/package.json @@ -20,7 +20,7 @@ "check:lint": "eslint src --ext .js,.ts", "check:types": "tsc --noEmit", "build": "npm run build:cjs && npm run build:esm", - "build:esm": "rimraf esm && tsc -m es2015 --outDir esm -d true --declarationDir types", + "build:esm": "rimraf esm && tsc -m es2015 --outDir esm", "build:cjs": "rimraf cjs && tsc -m CommonJS --outDir cjs", "test": "jest", "test:coverage": "jest --coverage", diff --git a/temp.ts b/temp.ts new file mode 100644 index 00000000..859fa873 --- /dev/null +++ b/temp.ts @@ -0,0 +1,25 @@ + /** + * Custom function called before each request, allowing you to add or update headers in SDK HTTP requests. + * Some headers, such as `SplitSDKVersion`, are required by the SDK and cannot be overridden. + * To pass multiple headers with the same name, combine their values into a single line, separated by commas. Example: `{ 'Authorization': 'value1, value2' }` + * Or provide keys with different case since headers are case-insensitive. Example: `{ 'authorization': 'value1', 'Authorization': 'value2' }` + * + * NOTE: to pass custom headers to the streaming connection in Browser, you should polyfill the `window.EventSource` object with a library that supports headers, + * like https://www.npmjs.com/package/event-source-polyfill, since native EventSource does not support them and will be ignored. + * + * @property getHeaderOverrides + * @default undefined + * + * @param context - The context for the request. + * @param context.headers - The current headers in the request. + * @returns A set of headers to be merged with the current headers. + * + * @example + * const getHeaderOverrides = (context) => { + * return { + * 'Authorization': context.headers['Authorization'] + ', other-value', + * 'custom-header': 'custom-value' + * }; + * }; + */ + getHeaderOverrides?: (context: { headers: Record }) => Record \ No newline at end of file diff --git a/types/index.d.ts b/types/index.d.ts new file mode 100644 index 00000000..714efbf1 --- /dev/null +++ b/types/index.d.ts @@ -0,0 +1,5 @@ +// Declaration file for JavaScript Browser Split Software SDK +// Project: http://www.split.io/ +// Definitions by: Nico Zelaya + +/// diff --git a/types/splitio.d.ts b/types/splitio.d.ts new file mode 100644 index 00000000..d8840bd4 --- /dev/null +++ b/types/splitio.d.ts @@ -0,0 +1,1553 @@ +// Type definitions for JavaScript Browser Split Software SDK +// Project: http://www.split.io/ +// Definitions by: Nico Zelaya + +export as namespace SplitIO; +export = SplitIO; + +/** + * EventEmitter interface based on a subset of the NodeJS.EventEmitter methods. + */ +interface IEventEmitter { + addListener(event: string, listener: (...args: any[]) => void): this + on(event: string, listener: (...args: any[]) => void): this + once(event: string, listener: (...args: any[]) => void): this + removeListener(event: string, listener: (...args: any[]) => void): this + off(event: string, listener: (...args: any[]) => void): this + removeAllListeners(event?: string): this + emit(event: string, ...args: any[]): boolean +} +/** + * @typedef {Object} EventConsts + * @property {string} SDK_READY The ready event. + * @property {string} SDK_READY_FROM_CACHE The ready event when fired with cached data. + * @property {string} SDK_READY_TIMED_OUT The timeout event. + * @property {string} SDK_UPDATE The update event. + */ +type EventConsts = { + SDK_READY: 'init::ready', + SDK_READY_FROM_CACHE: 'init::cache-ready', + SDK_READY_TIMED_OUT: 'init::timeout', + SDK_UPDATE: 'state::update' +}; +/** + * SDK Modes. + * @typedef {string} SDKMode + */ +type SDKMode = 'standalone' | 'localhost' | 'consumer' | 'consumer_partial'; +/** + * Storage types. + * @typedef {string} StorageType + */ +type StorageType = 'MEMORY' | 'LOCALSTORAGE'; +/** + * Settings interface. This is a representation of the settings the SDK expose, that's why + * most of it's props are readonly. Only features should be rewritten when localhost mode is active. + * @interface ISettings + */ +interface ISettings { + readonly core: { + authorizationKey: string, + key: SplitIO.SplitKey, + labelsEnabled: boolean, + IPAddressesEnabled: boolean + }, + readonly mode: SDKMode, + readonly scheduler: { + featuresRefreshRate: number, + impressionsRefreshRate: number, + impressionsQueueSize: number, + telemetryRefreshRate: number, + segmentsRefreshRate: number, + offlineRefreshRate: number, + eventsPushRate: number, + eventsQueueSize: number, + pushRetryBackoffBase: number + }, + readonly startup: { + readyTimeout: number, + requestTimeoutBeforeReady: number, + retriesOnFailureBeforeReady: number, + eventsFirstPushWindow: number + }, + readonly storage?: SplitIO.StorageSyncFactory | SplitIO.StorageAsyncFactory, + readonly urls: { + events: string, + sdk: string, + auth: string, + streaming: string, + telemetry: string + }, + readonly integrations?: SplitIO.IntegrationFactory[], + readonly debug: boolean | LogLevel | SplitIO.ILogger, + readonly version: string, + /** + * Mocked features map. + */ + features?: SplitIO.MockedFeaturesMap, + readonly streamingEnabled: boolean, + readonly sync: { + splitFilters: SplitIO.SplitFilter[], + impressionsMode: SplitIO.ImpressionsMode, + enabled: boolean, + flagSpecVersion: string, + requestOptions?: { + getHeaderOverrides?: (context: { headers: Record }) => Record + }, + }, + readonly userConsent: SplitIO.ConsentStatus +} +/** + * Log levels. + * @typedef {string} LogLevel + */ +type LogLevel = 'DEBUG' | 'INFO' | 'WARN' | 'ERROR' | 'NONE'; +/** + * Logger API + * @interface ILoggerAPI + */ +interface ILoggerAPI { + /** + * Enables SDK logging to the console. + * @function enable + * @returns {void} + */ + enable(): void, + /** + * Disables SDK logging. + * @function disable + * @returns {void} + */ + disable(): void, + /** + * Sets a log level for the SDK logs. + * @function setLogLevel + * @returns {void} + */ + setLogLevel(logLevel: LogLevel): void, + /** + * Log level constants. Use this to pass them to setLogLevel function. + */ + LogLevel: { + [level in LogLevel]: LogLevel + } +} +/** + * User consent API + * @interface IUserConsentAPI + */ +interface IUserConsentAPI { + /** + * Sets or updates the user consent status. Possible values are `true` and `false`, which represent user consent `'GRANTED'` and `'DECLINED'` respectively. + * - `true ('GRANTED')`: the user has granted consent for tracking events and impressions. The SDK will send them to Split cloud. + * - `false ('DECLINED')`: the user has declined consent for tracking events and impressions. The SDK will not send them to Split cloud. + * + * NOTE: calling this method updates the user consent at a factory level, affecting all clients of the same factory. + * + * @function setStatus + * @param {boolean} userConsent The user consent status, true for 'GRANTED' and false for 'DECLINED'. + * @returns {boolean} Whether the provided param is a valid value (i.e., a boolean value) or not. + */ + setStatus(userConsent: boolean): boolean; + /** + * Gets the user consent status. + * + * @function getStatus + * @returns {ConsentStatus} The user consent status. + */ + getStatus(): SplitIO.ConsentStatus; + /** + * Consent status constants. Use this to compare with the getStatus function result. + */ + Status: { + [status in SplitIO.ConsentStatus]: SplitIO.ConsentStatus + } +} +/** + * Common settings between Browser and NodeJS settings interface. + * @interface ISharedSettings + */ +interface ISharedSettings { + /** + * Boolean value to indicate whether the logger should be enabled or disabled by default, or a log level string or a Logger object. + * Passing a logger object is required to get descriptive log messages. Otherwise most logs will print with message codes. + * @see {@link https://help.split.io/hc/en-us/articles/360058730852-Browser-SDK#logging} + * + * Examples: + * ```typescript + * config.debug = true + * config.debug = 'WARN' + * config.debug = ErrorLogger() + * ``` + * @property {boolean | LogLevel | ILogger} debug + * @default false + */ + debug?: boolean | LogLevel | SplitIO.ILogger, + /** + * The impression listener, which is optional. Whatever you provide here needs to comply with the SplitIO.IImpressionListener interface, + * which will check for the logImpression method. + * @property {IImpressionListener} impressionListener + * @default undefined + */ + impressionListener?: SplitIO.IImpressionListener, + /** + * Boolean flag to enable the streaming service as default synchronization mechanism. In the event of any issue with streaming, + * the SDK would fallback to the polling mechanism. If false, the SDK would poll for changes as usual without attempting to use streaming. + * @property {boolean} streamingEnabled + * @default true + */ + streamingEnabled?: boolean, + /** + * SDK synchronization settings. + * @property {Object} sync + */ + sync?: { + /** + * List of feature flag filters. These filters are used to fetch a subset of the feature flag definitions in your environment, in order to reduce the delay of the SDK to be ready. + * This configuration is only meaningful when the SDK is working in "standalone" mode. + * + * Example: + * `splitFilter: [ + * { type: 'byName', values: ['my_feature_flag_1', 'my_feature_flag_2'] }, // will fetch feature flags named 'my_feature_flag_1' and 'my_feature_flag_2' + * ]` + * @property {SplitIO.SplitFilter[]} splitFilters + */ + splitFilters?: SplitIO.SplitFilter[] + /** + * Impressions Collection Mode. Option to determine how impressions are going to be sent to Split servers. + * Possible values are 'DEBUG', 'OPTIMIZED', and 'NONE'. + * - DEBUG: will send all the impressions generated (recommended only for debugging purposes). + * - OPTIMIZED: will send unique impressions to Split servers, avoiding a considerable amount of traffic that duplicated impressions could generate. + * - NONE: will send unique keys evaluated per feature to Split servers instead of full blown impressions, avoiding a considerable amount of traffic that impressions could generate. + * + * @property {String} impressionsMode + * @default 'OPTIMIZED' + */ + impressionsMode?: SplitIO.ImpressionsMode, + /** + * Controls the SDK continuous synchronization flags. + * + * When `true` a running SDK will process rollout plan updates performed on the UI (default). + * When false it'll just fetch all data upon init. + * + * @property {boolean} enabled + * @default true + */ + enabled?: boolean + /** + * Custom options object for HTTP(S) requests in the Browser. + * If provided, this object is merged with the options object passed by the SDK for EventSource and Fetch calls. + * This configuration has no effect in "consumer" mode, as no HTTP(S) requests are made by the SDK. + */ + requestOptions?: { + /** + * Custom function called before each request, allowing you to add or update headers in SDK HTTP requests. + * Some headers, such as `SplitSDKVersion`, are required by the SDK and cannot be overridden. + * To pass multiple headers with the same name, combine their values into a single line, separated by commas. Example: `{ 'Authorization': 'value1, value2' }` + * Or provide keys with different case since headers are case-insensitive. Example: `{ 'authorization': 'value1', 'Authorization': 'value2' }` + * + * NOTE: to pass custom headers to the streaming connection in Browser, you should polyfill the `window.EventSource` object with a library that supports headers, + * like https://www.npmjs.com/package/event-source-polyfill, since native EventSource does not support them and will be ignored. + * + * @property getHeaderOverrides + * @default undefined + * + * @param context - The context for the request. + * @param context.headers - The current headers in the request. + * @returns A set of headers to be merged with the current headers. + * + * @example + * const getHeaderOverrides = (context) => { + * return { + * 'Authorization': context.headers['Authorization'] + ', other-value', + * 'custom-header': 'custom-value' + * }; + * }; + */ + getHeaderOverrides?: (context: { headers: Record }) => Record + }, + } +} +/** + * Common API for entities that expose status handlers. + * @interface IStatusInterface + * @extends IEventEmitter + */ +interface IStatusInterface extends IEventEmitter { + /** + * Constant object containing the SDK events for you to use. + * @property {EventConsts} Event + */ + Event: EventConsts, + /** + * Returns a promise that resolves once the SDK has finished loading (`SDK_READY` event emitted) or rejected if the SDK has timedout (`SDK_READY_TIMED_OUT` event emitted). + * As it's meant to provide similar flexibility to the event approach, given that the SDK might be eventually ready after a timeout event, the `ready` method will return a resolved promise once the SDK is ready. + * + * Caveats: the method was designed to avoid an unhandled Promise rejection if the rejection case is not handled, so that `onRejected` handler is optional when using promises. + * However, when using async/await syntax, the rejection should be explicitly propagated like in the following example: + * ``` + * try { + * await client.ready().catch((e) => { throw e; }); + * // SDK is ready + * } catch(e) { + * // SDK has timedout + * } + * ``` + * + * @function ready + * @returns {Promise} + */ + ready(): Promise +} +/** + * Common definitions between clients for different environments interface. + * @interface IBasicClient + * @extends IStatusInterface + */ +interface IBasicClient extends IStatusInterface { + /** + * Destroys the client instance. + * + * In 'standalone' and 'partial consumer' modes, this method will flush any pending impressions and events. + * In 'standalone' mode, it also stops the synchronization of feature flag definitions with the backend. + * In 'consumer' and 'partial consumer' modes, this method also disconnects the SDK from the Pluggable storage. + * + * @function destroy + * @returns {Promise} A promise that resolves once the client is destroyed. + */ + destroy(): Promise +} +/** + * Common definitions between SDK instances for different environments interface. + * @interface IBasicSDK + */ +interface IBasicSDK { + /** + * Current settings of the SDK instance. + * @property settings + */ + settings: ISettings, + /** + * Logger API. + * @property Logger + */ + Logger: ILoggerAPI, + /** + * User consent API. + * @property UserConsent + */ + UserConsent: IUserConsentAPI, + /** + * Destroys all the clients created by this factory. + * @function destroy + * @returns {Promise} + */ + destroy(): Promise +} +/****** Exposed namespace ******/ +/** + * Types and interfaces for @splitsoftware/splitio-browserjs package for usage when integrating javascript browser sdk on typescript apps. + * For the SDK package information see {@link https://www.npmjs.com/package/@splitsoftware/splitio-browserjs} + */ +declare namespace SplitIO { + /** + * Feature flag treatment value, returned by getTreatment. + * @typedef {string} Treatment + */ + type Treatment = string; + /** + * Feature flag treatment promise that resolves to actual treatment value. + * @typedef {Promise} AsyncTreatment + */ + type AsyncTreatment = Promise; + /** + * An object with the treatments for a bulk of feature flags, returned by getTreatments. For example: + * { + * feature1: 'on', + * feature2: 'off + * } + * @typedef {Object.} Treatments + */ + type Treatments = { + [featureName: string]: Treatment + }; + /** + * Feature flags treatments promise that resolves to the actual SplitIO.Treatments object. + * @typedef {Promise} AsyncTreatments + */ + type AsyncTreatments = Promise; + /** + * Feature flag evaluation result with treatment and configuration, returned by getTreatmentWithConfig. + * @typedef {Object} TreatmentWithConfig + * @property {string} treatment The treatment string + * @property {string | null} config The stringified version of the JSON config defined for that treatment, null if there is no config for the resulting treatment. + */ + type TreatmentWithConfig = { + treatment: string, + config: string | null + }; + /** + * Feature flag treatment promise that resolves to actual treatment with config value. + * @typedef {Promise} AsyncTreatmentWithConfig + */ + type AsyncTreatmentWithConfig = Promise; + /** + * An object with the treatments with configs for a bulk of feature flags, returned by getTreatmentsWithConfig. + * Each existing configuration is a stringified version of the JSON you defined on the Split user interface. For example: + * { + * feature1: { treatment: 'on', config: null } + * feature2: { treatment: 'off', config: '{"bannerText":"Click here."}' } + * } + * @typedef {Object.} Treatments + */ + type TreatmentsWithConfig = { + [featureName: string]: TreatmentWithConfig + }; + /** + * Feature flags treatments promise that resolves to the actual SplitIO.TreatmentsWithConfig object. + * @typedef {Promise} AsyncTreatmentsWithConfig + */ + type AsyncTreatmentsWithConfig = Promise; + /** + * Possible Split SDK events. + * @typedef {string} Event + */ + type Event = 'init::timeout' | 'init::ready' | 'init::cache-ready' | 'state::update'; + /** + * Attributes should be on object with values of type string or number (dates should be sent as millis since epoch). + * @typedef {Object.} Attributes + * @see {@link https://help.split.io/hc/en-us/articles/360058730852-Browser-SDK#attribute-syntax} + */ + type Attributes = { + [attributeName: string]: AttributeType + }; + /** + * Type of an attribute value + * @typedef {string | number | boolean | Array} AttributeType + */ + type AttributeType = string | number | boolean | Array; + /** + * Properties should be an object with values of type string, number, boolean or null. Size limit of ~31kb. + * @typedef {Object.} Attributes + * @see {@link https://help.split.io/hc/en-us/articles/360058730852-Browser-SDK#track + */ + type Properties = { + [propertyName: string]: string | number | boolean | null + }; + /** + * The customer identifier represented by a string. + * @typedef {string} SplitKey + */ + type SplitKey = string; + /** + * Path to file with mocked features (for node). + * @typedef {string} MockedFeaturesFilePath + */ + type MockedFeaturesFilePath = string; + /** + * Object with mocked features mapping (for browser). We need to specify the featureName as key, and the mocked treatment as value. + * @typedef {Object} MockedFeaturesMap + */ + type MockedFeaturesMap = { + [featureName: string]: string | TreatmentWithConfig + }; + /** + * Object with information about an impression. It contains the generated impression DTO as well as + * complementary information around where and how it was generated in that way. + * @typedef {Object} ImpressionData + */ + type ImpressionData = { + impression: { + feature: string, + keyName: string, + treatment: string, + time: number, + bucketingKey?: string, + label: string, + changeNumber: number, + pt?: number, + }, + attributes?: SplitIO.Attributes, + ip: string, + hostname: string, + sdkLanguageVersion: string + }; + /** + * Data corresponding to one feature flag view. + * @typedef {Object} SplitView + */ + type SplitView = { + /** + * The name of the feature flag. + * @property {string} name + */ + name: string, + /** + * The traffic type of the feature flag. + * @property {string} trafficType + */ + trafficType: string, + /** + * Whether the feature flag is killed or not. + * @property {boolean} killed + */ + killed: boolean, + /** + * The list of treatments available for the feature flag. + * @property {Array} treatments + */ + treatments: Array, + /** + * Current change number of the feature flag. + * @property {number} changeNumber + */ + changeNumber: number, + /** + * Map of configurations per treatment. + * Each existing configuration is a stringified version of the JSON you defined on the Split user interface. + * @property {Object.} configs + */ + configs: { + [treatmentName: string]: string + }, + /** + * List of sets of the feature flag. + * @property {string[]} sets + */ + sets: string[], + /** + * The default treatment of the feature flag. + * @property {string} defaultTreatment + */ + defaultTreatment: string, + }; + /** + * A promise that resolves to a feature flag view. + * @typedef {Promise} SplitView + */ + type SplitViewAsync = Promise; + /** + * An array containing the SplitIO.SplitView elements. + */ + type SplitViews = Array; + /** + * A promise that resolves to an SplitIO.SplitViews array. + * @typedef {Promise} SplitViewsAsync + */ + type SplitViewsAsync = Promise; + /** + * An array of feature flag names. + * @typedef {Array} SplitNames + */ + type SplitNames = Array; + /** + * A promise that resolves to an array of feature flag names. + * @typedef {Promise} SplitNamesAsync + */ + type SplitNamesAsync = Promise; + /** + * Storage for synchronous (standalone) SDK. + * Its interface details are not part of the public API. + */ + type StorageSync = {}; + /** + * Storage builder for synchronous (standalone) SDK. + * By returning undefined, the SDK will use the default IN MEMORY storage. + * Input parameter details are not part of the public API. + */ + type StorageSyncFactory = { + readonly type: StorageType + (params: {}): (StorageSync | undefined) + } + /** + * Configuration params for `InLocalStorage` + */ + type InLocalStorageOptions = { + /** + * Optional prefix to prevent any kind of data collision when having multiple factories using the same storage type. + * @property {string} prefix + * @default 'SPLITIO' + */ + prefix?: string + } + /** + * Storage for asynchronous (consumer) SDK. + * Its interface details are not part of the public API. + */ + type StorageAsync = {} + /** + * Storage builder for asynchronous (consumer) SDK. + * Input parameter details are not part of the public API. + */ + type StorageAsyncFactory = { + readonly type: 'PLUGGABLE' + (params: {}): StorageAsync + } + /** + * Configuration params for `PluggableStorage` + */ + type PluggableStorageOptions = { + /** + * Optional prefix to prevent any kind of data collision when having multiple factories using the same storage wrapper. + * @property {string} prefix + * @default 'SPLITIO' + */ + prefix?: string, + /** + * Storage wrapper. + * @property {Object} wrapper + */ + wrapper: Object + } + /** + * Impression listener interface. This is the interface that needs to be implemented + * by the element you provide to the SDK as impression listener. + * @interface IImpressionListener + * @see {@link https://help.split.io/hc/en-us/articles/360058730852-Browser-SDK#listener} + */ + interface IImpressionListener { + logImpression(data: SplitIO.ImpressionData): void + } + /** + * SDK integration instance. + * Its interface details are not part of the public API. + */ + type Integration = {}; + /** + * SDK integration factory. + * By returning an integration, the SDK will queue events and impressions into it. + * Input parameter details are not part of the public API. + */ + type IntegrationFactory = { + readonly type: string + (params: {}): (Integration | void) + } + /** + * A pair of user key and it's trafficType, required for tracking valid Split events. + * @typedef {Object} Identity + * @property {string} key The user key. + * @property {string} trafficType The key traffic type. + */ + type Identity = { + key: string; + trafficType: string; + }; + /** + * Object with information about a Split event. + * @typedef {Object} EventData + */ + type EventData = { + eventTypeId: string; + value?: number; + properties?: Properties; + trafficTypeName?: string; + key?: string; + timestamp?: number; + }; + /** + * Object representing the data sent by Split (events and impressions). + * @typedef {Object} IntegrationData + * @property {string} type The type of Split data, either 'IMPRESSION' or 'EVENT'. + * @property {ImpressionData | EventData} payload The data instance itself. + */ + type IntegrationData = { type: 'IMPRESSION', payload: SplitIO.ImpressionData } | { type: 'EVENT', payload: SplitIO.EventData }; + /** + * Available URL settings for the SDKs. + */ + type UrlSettings = { + /** + * String property to override the base URL where the SDK will get rollout plan related data, like feature flags and segments definitions. + * @property {string} sdk + * @default 'https://sdk.split.io/api' + */ + sdk?: string, + /** + * String property to override the base URL where the SDK will post event-related information like impressions. + * @property {string} events + * @default 'https://events.split.io/api' + */ + events?: string, + /** + * String property to override the base URL where the SDK will get authorization tokens to be used with functionality that requires it, like streaming. + * @property {string} auth + * @default 'https://auth.split.io/api' + */ + auth?: string, + /** + * String property to override the base URL where the SDK will connect to receive streaming updates. + * @property {string} streaming + * @default 'https://streaming.split.io' + */ + streaming?: string, + /** + * String property to override the base URL where the SDK will post telemetry data. + * @property {string} telemetry + * @default 'https://telemetry.split.io/api' + */ + telemetry?: string + }; + + /** + * SplitFilter type. + * + * @typedef {string} SplitFilterType + */ + type SplitFilterType = 'bySet' | 'byName' | 'byPrefix'; + /** + * Defines a feature flag filter, described by a type and list of values. + */ + interface SplitFilter { + /** + * Type of the filter. + * + * @property {SplitFilterType} type + */ + type: SplitFilterType, + /** + * List of values: feature flag names for 'byName' filter type, and feature flag name prefixes for 'byPrefix' type. + * + * @property {string[]} values + */ + values: string[], + } + /** + * ImpressionsMode type + * @typedef {string} ImpressionsMode + */ + type ImpressionsMode = 'OPTIMIZED' | 'DEBUG' | 'NONE'; + /** + * User consent status. + * @typedef {string} ConsentStatus + */ + type ConsentStatus = 'GRANTED' | 'DECLINED' | 'UNKNOWN'; + /** + * Logger + * Its interface details are not part of the public API. It shouldn't be used directly. + * @interface ILogger + */ + interface ILogger { + setLogLevel(logLevel: LogLevel): void + } + /** + * Common settings interface for SDK instances created on the browser. + * @interface IBrowserBasicSettings + * @extends ISharedSettings + */ + interface IBrowserBasicSettings extends ISharedSettings { + /** + * SDK Core settings for the browser. + * @property {Object} core + */ + core: { + /** + * Your SDK key. + * @see {@link https://help.split.io/hc/en-us/articles/360019916211-API-keys} + * @property {string} authorizationKey + */ + authorizationKey: string, + /** + * Customer identifier. Whatever this means to you. + * @see {@link https://help.split.io/hc/en-us/articles/360019916311-Traffic-type} + * @property {SplitKey} key + */ + key: SplitKey, + /** + * Disable labels from being sent to Split backend. Labels may contain sensitive information. + * @property {boolean} labelsEnabled + * @default true + */ + labelsEnabled?: boolean + }, + /** + * List of URLs that the SDK will use as base for it's synchronization functionalities, applicable only when running as standalone. + * Do not change these settings unless you're working an advanced use case, like connecting to the Split proxy. + * @property {Object} urls + */ + urls?: UrlSettings, + /** + * Defines an optional list of factory functions used to instantiate SDK integrations. + * + * NOTE: at the moment there are not integrations to plug in Browser SDK. + * + * @property {Object} integrations + */ + integrations?: IntegrationFactory[], + /** + * User consent status. Possible values are `'GRANTED'`, which is the default, `'DECLINED'` or `'UNKNOWN'`. + * - `'GRANTED'`: the user grants consent for tracking events and impressions. The SDK sends them to Split cloud. + * - `'DECLINED'`: the user declines consent for tracking events and impressions. The SDK does not send them to Split cloud. + * - `'UNKNOWN'`: the user neither grants nor declines consent for tracking events and impressions. The SDK tracks them in its internal storage, and eventually either sends + * them or not if the consent status is updated to 'GRANTED' or 'DECLINED' respectively. The status can be updated at any time with the `UserConsent.setStatus` factory method. + * + * @typedef {string} userConsent + * @default 'GRANTED' + */ + userConsent?: ConsentStatus + } + /** + * Settings interface for SDK instances created on the browser. + * @interface IBrowserSettings + * @extends ISharedSettings + * @see {@link https://help.split.io/hc/en-us/articles/360058730852-Browser-SDK#configuration} + */ + interface IBrowserSettings extends IBrowserBasicSettings { + /** + * The SDK mode. When using the default in memory storage or `InLocalStorage` as storage, the only possible value is "standalone", which is the default. + * For "localhost" mode, use "localhost" as authorizationKey. + * + * @property {'standalone'} mode + * @default 'standalone' + */ + mode?: 'standalone', + /** + * Mocked features map. For testing purposes only. For using this you should specify "localhost" as authorizationKey on core settings. + * @see {@link https://help.split.io/hc/en-us/articles/360058730852-Browser-SDK#localhost-mode} + */ + features?: MockedFeaturesMap, + /** + * Defines the factory function to instantiate the storage. If not provided, the default IN MEMORY storage is used. + * + * Example: + * ```typescript + * SplitFactory({ + * ... + * storage: InLocalStorage() + * }) + * ``` + * @property {Object} storage + */ + storage?: StorageSyncFactory, + /** + * SDK Startup settings for the Browser. + * @property {Object} startup + */ + startup?: { + /** + * Maximum amount of time used before notify a timeout. + * @property {number} readyTimeout + * @default 1.5 + */ + readyTimeout?: number, + /** + * Time to wait for a request before the SDK is ready. If this time expires, JS Sdk will retry 'retriesOnFailureBeforeReady' times before notifying its failure to be 'ready'. + * @property {number} requestTimeoutBeforeReady + * @default 1.5 + */ + requestTimeoutBeforeReady?: number, + /** + * How many quick retries we will do while starting up the SDK. + * @property {number} retriesOnFailureBeforeReady + * @default 1 + */ + retriesOnFailureBeforeReady?: number, + /** + * For SDK posts the queued events data in bulks with a given rate, but the first push window is defined separately, + * to better control on browsers. This number defines that window before the first events push. + * + * @property {number} eventsFirstPushWindow + * @default 10 + */ + eventsFirstPushWindow?: number, + }, + /** + * SDK scheduler settings. + * @property {Object} scheduler + */ + scheduler?: { + /** + * The SDK polls Split servers for changes to feature flag definitions. This parameter controls this polling period in seconds. + * @property {number} featuresRefreshRate + * @default 60 + */ + featuresRefreshRate?: number, + /** + * The SDK sends information on who got what treatment at what time back to Split servers to power analytics. This parameter controls how often this data is sent to Split servers. The parameter should be in seconds. + * @property {number} impressionsRefreshRate + * @default 60 + */ + impressionsRefreshRate?: number, + /** + * The maximum number of impression items we want to queue. If we queue more values, it will trigger a flush and reset the timer. + * If you use a 0 here, the queue will have no maximum size. + * @property {number} impressionsQueueSize + * @default 30000 + */ + impressionsQueueSize?: number, + /** + * The SDK sends diagnostic metrics to Split servers. This parameters controls this metric flush period in seconds. + * @property {number} telemetryRefreshRate + * @default 3600 + */ + telemetryRefreshRate?: number, + /** + * The SDK polls Split servers for changes to segment definitions. This parameter controls this polling period in seconds. + * @property {number} segmentsRefreshRate + * @default 60 + */ + segmentsRefreshRate?: number, + /** + * The SDK posts the queued events data in bulks. This parameter controls the posting rate in seconds. + * @property {number} eventsPushRate + * @default 60 + */ + eventsPushRate?: number, + /** + * The maximum number of event items we want to queue. If we queue more values, it will trigger a flush and reset the timer. + * If you use a 0 here, the queue will have no maximum size. + * @property {number} eventsQueueSize + * @default 500 + */ + eventsQueueSize?: number, + /** + * For mocking/testing only. The SDK will refresh the features mocked data when mode is set to "localhost" by defining the key. + * For more information see {@link https://help.split.io/hc/en-us/articles/360058730852-Browser-SDK#localhost-mode} + * @property {number} offlineRefreshRate + * @default 15 + */ + offlineRefreshRate?: number, + /** + * When using streaming mode, seconds to wait before re attempting to connect for push notifications. + * Next attempts follow intervals in power of two: base seconds, base x 2 seconds, base x 4 seconds, ... + * @property {number} pushRetryBackoffBase + * @default 1 + */ + pushRetryBackoffBase?: number, + } + } + /** + * Settings interface with async storage for SDK instances created on the browser. + * If your storage is synchronous (by defaut we use memory, which is sync) use SplitIO.IBrowserSettings instead. + * @interface IBrowserAsyncSettings + * @extends IBrowserBasicSettings + * @see {@link https://help.split.io/hc/en-us/articles/360058730852-Browser-SDK#configuration} + */ + interface IBrowserAsyncSettings extends IBrowserBasicSettings { + /** + * The SDK mode. When using `PluggableStorage` as storage, the possible values are "consumer" and "consumer_partial". + * + * @see {@link https://help.split.io/hc/en-us/articles/360058730852-Browser-SDK#sharing-state-with-a-pluggable-storage} + * + * @property {'consumer' | 'consumer_partial'} mode + */ + mode: 'consumer' | 'consumer_partial', + /** + * Defines the factory function to instantiate the storage. + * + * Example: + * ```typescript + * SplitFactory({ + * ... + * storage: PluggableStorage({ wrapper: SomeWrapper }) + * }) + * ``` + * @property {Object} storage + */ + storage: StorageAsyncFactory, + /** + * SDK Startup settings for the Browser. + * @property {Object} startup + */ + startup?: { + /** + * Maximum amount of time used before notify a timeout. + * @property {number} readyTimeout + * @default 1.5 + */ + readyTimeout?: number, + /** + * For SDK posts the queued events data in bulks with a given rate, but the first push window is defined separately, + * to better control on browsers. This number defines that window before the first events push. + * + * NOTE: this param is ignored in 'consumer' mode. + * @property {number} eventsFirstPushWindow + * @default 10 + */ + eventsFirstPushWindow?: number, + }, + /** + * SDK scheduler settings. + * @property {Object} scheduler + */ + scheduler?: { + /** + * The SDK sends information on who got what treatment at what time back to Split servers to power analytics. This parameter controls how often this data is sent to Split servers. The parameter should be in seconds. + * + * NOTE: this param is ignored in 'consumer' mode. + * @property {number} impressionsRefreshRate + * @default 60 + */ + impressionsRefreshRate?: number, + /** + * The maximum number of impression items we want to queue. If we queue more values, it will trigger a flush and reset the timer. + * If you use a 0 here, the queue will have no maximum size. + * @property {number} impressionsQueueSize + * @default 30000 + */ + impressionsQueueSize?: number, + /** + * The SDK sends diagnostic metrics to Split servers. This parameters controls this metric flush period in seconds. + * @property {number} telemetryRefreshRate + * @default 3600 + */ + telemetryRefreshRate?: number, + /** + * The SDK posts the queued events data in bulks. This parameter controls the posting rate in seconds. + * + * NOTE: this param is ignored in 'consumer' mode. + * @property {number} eventsPushRate + * @default 60 + */ + eventsPushRate?: number, + /** + * The maximum number of event items we want to queue. If we queue more values, it will trigger a flush and reset the timer. + * If you use a 0 here, the queue will have no maximum size. + * + * NOTE: this param is ignored in 'consumer' mode. + * @property {number} eventsQueueSize + * @default 500 + */ + eventsQueueSize?: number, + } + } + /** + * This represents the interface for the SDK instance with synchronous storage and client-side API, + * i.e., where client instances have a bound user key. + * @interface ISDK + * @extends IBasicSDK + */ + interface ISDK extends IBasicSDK { + /** + * Returns the default client instance of the SDK, associated with the key provided on settings. + * @function client + * @returns {IClient} The client instance. + */ + client(): IClient, + /** + * Returns a shared client of the SDK, associated with the given key. + * @function client + * @param {SplitKey} key The key for the new client instance. + * @returns {IClient} The client instance. + */ + client(key: SplitKey): IClient, + /** + * Returns a manager instance of the SDK to explore available information. + * @function manager + * @returns {IManager} The manager instance. + */ + manager(): IManager + } + /** + * This represents the interface for the SDK instance with asynchronous storage and client-side API, + * i.e., where client instances have a bound user key. + * @interface IAsyncSDK + * @extends IBasicSDK + */ + interface IAsyncSDK extends IBasicSDK { + /** + * Returns the default client instance of the SDK, associated with the key provided on settings. + * @function client + * @returns {IAsyncClient} The asynchronous client instance. + */ + client(): IAsyncClient, + /** + * Returns a shared client of the SDK, associated with the given key. + * @function client + * @param {SplitKey} key The key for the new client instance. + * @returns {IAsyncClient} The asynchronous client instance. + */ + client(key: SplitKey): IAsyncClient, + /** + * Returns a manager instance of the SDK to explore available information. + * @function manager + * @returns {IManager} The manager instance. + */ + manager(): IAsyncManager + } + /** + * This represents the interface for the Client instance with synchronous storage for server-side SDK, where we don't have only one key. + * @interface IClient + * @extends IBasicClient + */ + interface IClientSS extends IBasicClient { + /** + * Returns a Treatment value, which is the treatment string for the given feature. + * @function getTreatment + * @param {string} key - The string key representing the consumer. + * @param {string} featureFlagName - The string that represents the feature flag we want to get the treatment. + * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. + * @returns {Treatment} The treatment string. + */ + getTreatment(key: SplitKey, featureFlagName: string, attributes?: Attributes): Treatment, + /** + * Returns a TreatmentWithConfig value, which is an object with both treatment and config string for the given feature. + * @function getTreatmentWithConfig + * @param {string} key - The string key representing the consumer. + * @param {string} featureFlagName - The string that represents the feature flag we want to get the treatment. + * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. + * @returns {TreatmentWithConfig} The TreatmentWithConfig, the object containing the treatment string and the + * configuration stringified JSON (or null if there was no config for that treatment). + */ + getTreatmentWithConfig(key: SplitKey, featureFlagName: string, attributes?: Attributes): TreatmentWithConfig, + /** + * Returns a Treatments value, which is an object map with the treatments for the given features. + * @function getTreatments + * @param {string} key - The string key representing the consumer. + * @param {Array} featureFlagNames - An array of the feature flag names we want to get the treatments. + * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. + * @returns {Treatments} The treatments object map. + */ + getTreatments(key: SplitKey, featureFlagNames: string[], attributes?: Attributes): Treatments, + /** + * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the given features. + * @function getTreatmentsWithConfig + * @param {string} key - The string key representing the consumer. + * @param {Array} featureFlagNames - An array of the feature flag names we want to get the treatments. + * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. + * @returns {TreatmentsWithConfig} The map with all the TreatmentWithConfig objects + */ + getTreatmentsWithConfig(key: SplitKey, featureFlagNames: string[], attributes?: Attributes): TreatmentsWithConfig, + /** + * Returns a Treatments value, which is an object map with the treatments for the feature flags related to the given flag set. + * @function getTreatmentsByFlagSet + * @param {string} key - The string key representing the consumer. + * @param {string} flagSet - The flag set name we want to get the treatments. + * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. + * @returns {Treatments} The map with all the Treatment objects + */ + getTreatmentsByFlagSet(key: SplitKey, flagSet: string, attributes?: Attributes): Treatments, + /** + * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the feature flags related to the given flag set. + * @function getTreatmentsWithConfigByFlagSet + * @param {string} key - The string key representing the consumer. + * @param {string} flagSet - The flag set name we want to get the treatments. + * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. + * @returns {TreatmentsWithConfig} The map with all the TreatmentWithConfig objects + */ + getTreatmentsWithConfigByFlagSet(key: SplitKey, flagSet: string, attributes?: Attributes): TreatmentsWithConfig, + /** + * Returns a Treatments value, which is an object with both treatment and config string for to the feature flags related to the given flag sets. + * @function getTreatmentsByFlagSets + * @param {string} key - The string key representing the consumer. + * @param {Array} flagSets - An array of the flag set names we want to get the treatments. + * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. + * @returns {Treatments} The map with all the Treatment objects + */ + getTreatmentsByFlagSets(key: SplitKey, flagSets: string[], attributes?: Attributes): Treatments, + /** + * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the feature flags related to the given flag sets. + * @function getTreatmentsWithConfigByFlagSets + * @param {string} key - The string key representing the consumer. + * @param {Array} flagSets - An array of the flag set names we want to get the treatments. + * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. + * @returns {TreatmentsWithConfig} The map with all the TreatmentWithConfig objects + */ + getTreatmentsWithConfigByFlagSets(key: SplitKey, flagSets: string[], attributes?: Attributes): TreatmentsWithConfig, + /** + * Tracks an event to be fed to the results product on Split user interface. + * @function track + * @param {SplitKey} key - The key that identifies the entity related to this event. + * @param {string} trafficType - The traffic type of the entity related to this event. + * @param {string} eventType - The event type corresponding to this event. + * @param {number=} value - The value of this event. + * @param {Properties=} properties - The properties of this event. Values can be string, number, boolean or null. + * @returns {boolean} Whether the event was added to the queue successfully or not. + */ + track(key: SplitIO.SplitKey, trafficType: string, eventType: string, value?: number, properties?: Properties): boolean, + } + /** + * This represents the interface for the Client instance with asynchronous storage for server-side SDK, where we don't have only one key. + * @interface IAsyncClient + * @extends IBasicClient + */ + interface IAsyncClientSS extends IBasicClient { + /** + * Returns a Treatment value, which will be (or eventually be) the treatment string for the given feature. + * For usage on NodeJS as we don't have only one key. + * NOTE: Treatment will be a promise only in async storages, like REDIS. + * @function getTreatment + * @param {string} key - The string key representing the consumer. + * @param {string} featureFlagName - The string that represents the feature flag we want to get the treatment. + * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. + * @returns {AsyncTreatment} Treatment promise that resolves to the treatment string. + */ + getTreatment(key: SplitKey, featureFlagName: string, attributes?: Attributes): AsyncTreatment, + /** + * Returns a TreatmentWithConfig value, which will be (or eventually be) an object with both treatment and config string for the given feature. + * For usage on NodeJS as we don't have only one key. + * NOTE: Treatment will be a promise only in async storages, like REDIS. + * @function getTreatmentWithConfig + * @param {string} key - The string key representing the consumer. + * @param {string} featureFlagName - The string that represents the feature flag we want to get the treatment. + * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. + * @returns {AsyncTreatmentWithConfig} TreatmentWithConfig promise that resolves to the TreatmentWithConfig object. + */ + getTreatmentWithConfig(key: SplitKey, featureFlagName: string, attributes?: Attributes): AsyncTreatmentWithConfig, + /** + * Returns a Treatments value, which will be (or eventually be) an object map with the treatments for the given features. + * For usage on NodeJS as we don't have only one key. + * @function getTreatments + * @param {string} key - The string key representing the consumer. + * @param {Array} featureFlagNames - An array of the feature flag names we want to get the treatments. + * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. + * @returns {AsyncTreatments} Treatments promise that resolves to the treatments object map. + */ + getTreatments(key: SplitKey, featureFlagNames: string[], attributes?: Attributes): AsyncTreatments, + /** + * Returns a TreatmentsWithConfig value, which will be (or eventually be) an object map with the TreatmentWithConfig (an object with both treatment and config string) for the given features. + * For usage on NodeJS as we don't have only one key. + * @function getTreatmentsWithConfig + * @param {string} key - The string key representing the consumer. + * @param {Array} featureFlagNames - An array of the feature flag names we want to get the treatments. + * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. + * @returns {AsyncTreatmentsWithConfig} TreatmentsWithConfig promise that resolves to the map of TreatmentsWithConfig objects. + */ + getTreatmentsWithConfig(key: SplitKey, featureFlagNames: string[], attributes?: Attributes): AsyncTreatmentsWithConfig, + /** + * Returns a Treatments value, which is an object map with the treatments for the feature flags related to the given flag set. + * For usage on NodeJS as we don't have only one key. + * @function getTreatmentsByFlagSet + * @param {string} key - The string key representing the consumer. + * @param {string} flagSet - The flag set name we want to get the treatments. + * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. + * @returns {AsyncTreatments} Treatments promise that resolves to the treatments object map. + */ + getTreatmentsByFlagSet(key: SplitKey, flagSet: string, attributes?: Attributes): AsyncTreatments, + /** + * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the feature flags related to the given flag set. + * For usage on NodeJS as we don't have only one key. + * @function getTreatmentsWithConfigByFlagSet + * @param {string} key - The string key representing the consumer. + * @param {string} flagSet - The flag set name we want to get the treatments. + * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. + * @returns {AsyncTreatmentsWithConfig} TreatmentsWithConfig promise that resolves to the map of TreatmentsWithConfig objects. + */ + getTreatmentsWithConfigByFlagSet(key: SplitKey, flagSet: string, attributes?: Attributes): AsyncTreatmentWithConfig, + /** + * Returns a Treatments value, which is an object with both treatment and config string for to the feature flags related to the given flag sets. + * For usage on NodeJS as we don't have only one key. + * @function getTreatmentsByFlagSets + * @param {string} key - The string key representing the consumer. + * @param {Array} flagSets - An array of the flag set names we want to get the treatments. + * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. + * @returns {AsyncTreatments} Treatments promise that resolves to the treatments object map. + */ + getTreatmentsByFlagSets(key: SplitKey, flagSets: string[], attributes?: Attributes): AsyncTreatments, + /** + * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the feature flags related to the given flag sets. + * For usage on NodeJS as we don't have only one key. + * @function getTreatmentsWithConfigByFlagSets + * @param {string} key - The string key representing the consumer. + * @param {Array} flagSets - An array of the flag set names we want to get the treatments. + * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. + * @returns {AsyncTreatmentsWithConfig} TreatmentsWithConfig promise that resolves to the map of TreatmentsWithConfig objects. + */ + getTreatmentsWithConfigByFlagSets(key: SplitKey, flagSets: string[], attributes?: Attributes): AsyncTreatmentWithConfig, + /** + * Tracks an event to be fed to the results product on Split user interface, and returns a promise to signal when the event was successfully queued (or not). + * @function track + * @param {SplitKey} key - The key that identifies the entity related to this event. + * @param {string} trafficType - The traffic type of the entity related to this event. + * @param {string} eventType - The event type corresponding to this event. + * @param {number=} value - The value of this event. + * @param {Properties=} properties - The properties of this event. Values can be string, number, boolean or null. + * @returns {Promise} A promise that resolves to a boolean indicating if the event was added to the queue successfully or not. + */ + track(key: SplitIO.SplitKey, trafficType: string, eventType: string, value?: number, properties?: Properties): Promise + } + /** + * This represents the interface for the Client instance with synchronous storage for client-side SDK, where each client has associated a key. + * @interface IClient + * @extends IBasicClient + */ + interface IClient extends IBasicClient { + /** + * Returns a Treatment value, which is the treatment string for the given feature. + * @function getTreatment + * @param {string} featureFlagName - The string that represents the feature flag we want to get the treatment. + * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. + * @returns {Treatment} The treatment string. + */ + getTreatment(featureFlagName: string, attributes?: Attributes): Treatment, + /** + * Returns a TreatmentWithConfig value, which is an object with both treatment and config string for the given feature. + * @function getTreatmentWithConfig + * @param {string} featureFlagName - The string that represents the feature flag we want to get the treatment. + * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. + * @returns {TreatmentWithConfig} The map containing the treatment and the configuration stringified JSON (or null if there was no config for that treatment). + */ + getTreatmentWithConfig(featureFlagName: string, attributes?: Attributes): TreatmentWithConfig, + /** + * Returns a Treatments value, which is an object map with the treatments for the given features. + * @function getTreatments + * @param {Array} featureFlagNames - An array of the feature flag names we want to get the treatments. + * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. + * @returns {Treatments} The treatments object map. + */ + getTreatments(featureFlagNames: string[], attributes?: Attributes): Treatments, + /** + * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the given features. + * @function getTreatmentsWithConfig + * @param {Array} featureFlagNames - An array of the feature flag names we want to get the treatments. + * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. + * @returns {TreatmentsWithConfig} The map with all the TreatmentWithConfig objects + */ + getTreatmentsWithConfig(featureFlagNames: string[], attributes?: Attributes): TreatmentsWithConfig, + /** + * Returns a Treatments value, which is an object map with the treatments for the feature flags related to the given flag set. + * @function getTreatmentsByFlagSet + * @param {string} flagSet - The flag set name we want to get the treatments. + * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. + * @returns {Treatments} The map with all the Treatments objects + */ + getTreatmentsByFlagSet(flagSet: string, attributes?: Attributes): Treatments, + /** + * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the feature flags related to the given flag set. + * @function getTreatmentsWithConfigByFlagSet + * @param {string} flagSet - The flag set name we want to get the treatments. + * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. + * @returns {TreatmentsWithConfig} The map with all the TreatmentWithConfig objects + */ + getTreatmentsWithConfigByFlagSet(flagSet: string, attributes?: Attributes): TreatmentsWithConfig, + /** + * Returns a Returns a Treatments value, which is an object with both treatment and config string for to the feature flags related to the given flag sets. + * @function getTreatmentsByFlagSets + * @param {Array} flagSets - An array of the flag set names we want to get the treatments. + * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. + * @returns {Treatments} The map with all the Treatments objects + */ + getTreatmentsByFlagSets(flagSets: string[], attributes?: Attributes): Treatments, + /** + * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the feature flags related to the given flag sets. + * @function getTreatmentsWithConfigByFlagSets + * @param {Array} flagSets - An array of the flag set names we want to get the treatments. + * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. + * @returns {TreatmentsWithConfig} The map with all the TreatmentWithConfig objects + */ + getTreatmentsWithConfigByFlagSets(flagSets: string[], attributes?: Attributes): TreatmentsWithConfig, + /** + * Tracks an event to be fed to the results product on Split user interface. + * @function track + * @param {string} trafficType - The traffic type of the entity related to this event. + * @param {string} eventType - The event type corresponding to this event. + * @param {number=} value - The value of this event. + * @param {Properties=} properties - The properties of this event. Values can be string, number, boolean or null. + * @returns {boolean} Whether the event was added to the queue successfully or not. + */ + track(trafficType: string, eventType: string, value?: number, properties?: Properties): boolean, + /** + * Add an attribute to client's in memory attributes storage. + * + * @param {string} attributeName Attribute name + * @param {AttributeType} attributeValue Attribute value + * @returns {boolean} true if the attribute was stored and false otherwise + */ + setAttribute(attributeName: string, attributeValue: AttributeType): boolean, + /** + * Returns the attribute with the given name. + * + * @param {string} attributeName Attribute name + * @returns {AttributeType} Attribute with the given name + */ + getAttribute(attributeName: string): AttributeType, + /** + * Removes from client's in memory attributes storage the attribute with the given name. + * + * @param {string} attributeName + * @returns {boolean} true if attribute was removed and false otherwise + */ + removeAttribute(attributeName: string): boolean, + /** + * Add to client's in memory attributes storage the attributes in 'attributes'. + * + * @param {Attributes} attributes Object with attributes to store + * @returns true if attributes were stored an false otherwise + */ + setAttributes(attributes: Attributes): boolean, + /** + * Return all the attributes stored in client's in memory attributes storage. + * + * @returns {Attributes} returns all the stored attributes + */ + getAttributes(): Attributes, + /** + * Remove all the stored attributes in the client's in memory attribute storage. + * + * @returns {boolean} true if all attribute were removed and false otherwise + */ + clearAttributes(): boolean + } + /** + * This represents the interface for the Client instance with asynchronous storage for client-side SDK, where each client has associated a key. + * @interface IAsyncClient + * @extends IBasicClient + */ + interface IAsyncClient extends IBasicClient { + /** + * Returns a Treatment value, which will be (or eventually be) the treatment string for the given feature. + * @function getTreatment + * @param {string} featureFlagName - The string that represents the feature flag we want to get the treatment. + * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. + * @returns {AsyncTreatment} Treatment promise that resolves to the treatment string. + */ + getTreatment(featureFlagName: string, attributes?: Attributes): AsyncTreatment, + /** + * Returns a TreatmentWithConfig value, which will be (or eventually be) an object with both treatment and config string for the given feature. + * @function getTreatmentWithConfig + * @param {string} featureFlagName - The string that represents the feature flag we want to get the treatment. + * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. + * @returns {AsyncTreatmentWithConfig} TreatmentWithConfig promise that resolves to the TreatmentWithConfig object. + */ + getTreatmentWithConfig(featureFlagName: string, attributes?: Attributes): AsyncTreatmentWithConfig, + /** + * Returns a Treatments value, which will be (or eventually be) an object map with the treatments for the given features. + * @function getTreatments + * @param {Array} featureFlagNames - An array of the feature flag names we want to get the treatments. + * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. + * @returns {AsyncTreatments} Treatments promise that resolves to the treatments object map. + */ + getTreatments(featureFlagNames: string[], attributes?: Attributes): AsyncTreatments, + /** + * Returns a TreatmentsWithConfig value, which will be (or eventually be) an object map with the TreatmentWithConfig (an object with both treatment and config string) for the given features. + * @function getTreatmentsWithConfig + * @param {Array} featureFlagNames - An array of the feature flag names we want to get the treatments. + * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. + * @returns {AsyncTreatmentsWithConfig} TreatmentsWithConfig promise that resolves to the TreatmentsWithConfig object. + */ + getTreatmentsWithConfig(featureFlagNames: string[], attributes?: Attributes): AsyncTreatmentsWithConfig, + /** + * Returns a Treatments value, which is an object map with the treatments for the feature flags related to the given flag set. + * @function getTreatmentsByFlagSet + * @param {string} flagSet - The flag set name we want to get the treatments. + * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. + * @returns {AsyncTreatments} Treatments promise that resolves to the treatments object map. + */ + getTreatmentsByFlagSet(flagSet: string, attributes?: Attributes): AsyncTreatments, + /** + * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the feature flags related to the given flag set. + * @function getTreatmentsWithConfigByFlagSet + * @param {string} flagSet - The flag set name we want to get the treatments. + * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. + * @returns {AsyncTreatmentsWithConfig} TreatmentsWithConfig promise that resolves to the TreatmentsWithConfig object. + */ + getTreatmentsWithConfigByFlagSet(flagSet: string, attributes?: Attributes): AsyncTreatmentsWithConfig, + /** + * Returns a Returns a Treatments value, which is an object with both treatment and config string for to the feature flags related to the given flag sets. + * @function getTreatmentsByFlagSets + * @param {Array} flagSets - An array of the flag set names we want to get the treatments. + * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. + * @returns {AsyncTreatments} Treatments promise that resolves to the treatments object map. + */ + getTreatmentsByFlagSets(flagSets: string[], attributes?: Attributes): AsyncTreatments, + /** + * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the feature flags related to the given flag sets. + * @function getTreatmentsWithConfigByFlagSets + * @param {Array} flagSets - An array of the flag set names we want to get the treatments. + * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. + * @returns {AsyncTreatmentsWithConfig} TreatmentsWithConfig promise that resolves to the TreatmentsWithConfig object. + */ + getTreatmentsWithConfigByFlagSets(flagSets: string[], attributes?: Attributes): AsyncTreatmentsWithConfig, + /** + * Tracks an event to be fed to the results product on Split user interface, and returns a promise to signal when the event was successfully queued (or not). + * @function track + * @param {string} trafficType - The traffic type of the entity related to this event. + * @param {string} eventType - The event type corresponding to this event. + * @param {number=} value - The value of this event. + * @param {Properties=} properties - The properties of this event. Values can be string, number, boolean or null. + * @returns {boolean} A promise that resolves to a boolean indicating if the event was added to the queue successfully or not. + */ + track(trafficType: string, eventType: string, value?: number, properties?: Properties): Promise, + /** + * Add an attribute to client's in memory attributes storage. + * + * @param {string} attributeName Attribute name + * @param {AttributeType} attributeValue Attribute value + * @returns {boolean} true if the attribute was stored and false otherwise + */ + setAttribute(attributeName: string, attributeValue: AttributeType): boolean, + /** + * Returns the attribute with the given name. + * + * @param {string} attributeName Attribute name + * @returns {AttributeType} Attribute with the given name + */ + getAttribute(attributeName: string): AttributeType, + /** + * Removes from client's in memory attributes storage the attribute with the given name. + * + * @param {string} attributeName + * @returns {boolean} true if attribute was removed and false otherwise + */ + removeAttribute(attributeName: string): boolean, + /** + * Add to client's in memory attributes storage the attributes in 'attributes'. + * + * @param {Attributes} attributes Object with attributes to store + * @returns true if attributes were stored an false otherwise + */ + setAttributes(attributes: Attributes): boolean, + /** + * Return all the attributes stored in client's in memory attributes storage. + * + * @returns {Attributes} returns all the stored attributes + */ + getAttributes(): Attributes, + /** + * Remove all the stored attributes in the client's in memory attribute storage. + * + * @returns {boolean} true if all attribute were removed and false otherwise + */ + clearAttributes(): boolean + } + /** + * Representation of a manager instance with synchronous storage of the SDK. + * @interface IManager + * @extends IStatusInterface + */ + interface IManager extends IStatusInterface { + /** + * Get the array of feature flag names. + * @function names + * @returns {SplitNames} The list of feature flag names. + */ + names(): SplitNames, + /** + * Get the array of feature flags data in SplitView format. + * @function splits + * @returns {SplitViews} The list of SplitIO.SplitView. + */ + splits(): SplitViews, + /** + * Get the data of a split in SplitView format. + * @function split + * @param {string} featureFlagName The name of the feature flag we want to get info of. + * @returns {SplitView} The SplitIO.SplitView of the given split. + */ + split(featureFlagName: string): SplitView, + } + /** + * Representation of a manager instance with asynchronous storage of the SDK. + * @interface IAsyncManager + * @extends IStatusInterface + */ + interface IAsyncManager extends IStatusInterface { + /** + * Get the array of feature flag names. + * @function names + * @returns {SplitNamesAsync} A promise that resolves to the list of feature flag names. + */ + names(): SplitNamesAsync, + /** + * Get the array of feature flags data in SplitView format. + * @function splits + * @returns {SplitViewsAsync} A promise that resolves to the SplitIO.SplitView list. + */ + splits(): SplitViewsAsync, + /** + * Get the data of a split in SplitView format. + * @function split + * @param {string} featureFlagName The name of the feature flag we want to get info of. + * @returns {SplitViewAsync} A promise that resolves to the SplitIO.SplitView value. + */ + split(featureFlagName: string): SplitViewAsync, + } +} From b415e4314b57ea1b87021297b72811f4f5ab3b37 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Fri, 25 Oct 2024 12:40:56 -0300 Subject: [PATCH 121/146] rc --- package-lock.json | 4 ++-- package.json | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/package-lock.json b/package-lock.json index 8bc19544..08f93a03 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "@splitsoftware/splitio-commons", - "version": "2.0.0-rc.1", + "version": "2.0.0-rc.3", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "@splitsoftware/splitio-commons", - "version": "2.0.0-rc.1", + "version": "2.0.0-rc.3", "license": "Apache-2.0", "dependencies": { "tslib": "^2.3.1" diff --git a/package.json b/package.json index 870e561c..4be1b3eb 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@splitsoftware/splitio-commons", - "version": "2.0.0-rc.1", + "version": "2.0.0-rc.3", "description": "Split JavaScript SDK common components", "main": "cjs/index.js", "module": "esm/index.js", From c600e3d1e4bb2f35d33e8bdc8ce0536871e61369 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Fri, 25 Oct 2024 13:26:21 -0300 Subject: [PATCH 122/146] 2nd step: move SplitIO namespace definition from React Native SDK to JS-Commons and merge --- types/splitio.d.ts | 59 +++++++++++++++++++++++----------------------- 1 file changed, 30 insertions(+), 29 deletions(-) diff --git a/types/splitio.d.ts b/types/splitio.d.ts index d8840bd4..583dbba4 100644 --- a/types/splitio.d.ts +++ b/types/splitio.d.ts @@ -1,6 +1,5 @@ -// Type definitions for JavaScript Browser Split Software SDK +// Type definitions for Split Software SDKs // Project: http://www.split.io/ -// Definitions by: Nico Zelaya export as namespace SplitIO; export = SplitIO; @@ -164,14 +163,14 @@ interface IUserConsentAPI { } } /** - * Common settings between Browser and NodeJS settings interface. + * Common settings interface between SDK settings interface. * @interface ISharedSettings */ interface ISharedSettings { /** * Boolean value to indicate whether the logger should be enabled or disabled by default, or a log level string or a Logger object. * Passing a logger object is required to get descriptive log messages. Otherwise most logs will print with message codes. - * @see {@link https://help.split.io/hc/en-us/articles/360058730852-Browser-SDK#logging} + * @see {@link https://help.split.io/hc/en-us/articles/360058730852-Browser-SDK#logging}. * * Examples: * ```typescript @@ -235,7 +234,7 @@ interface ISharedSettings { */ enabled?: boolean /** - * Custom options object for HTTP(S) requests in the Browser. + * Custom options object for HTTP(S) requests. * If provided, this object is merged with the options object passed by the SDK for EventSource and Fetch calls. * This configuration has no effect in "consumer" mode, as no HTTP(S) requests are made by the SDK. */ @@ -346,8 +345,7 @@ interface IBasicSDK { } /****** Exposed namespace ******/ /** - * Types and interfaces for @splitsoftware/splitio-browserjs package for usage when integrating javascript browser sdk on typescript apps. - * For the SDK package information see {@link https://www.npmjs.com/package/@splitsoftware/splitio-browserjs} + * Shared types and interfaces for `@splitsoftware` packages for usage when integrating JavaScript SDKs with TypeScript. */ declare namespace SplitIO { /** @@ -445,7 +443,7 @@ declare namespace SplitIO { */ type MockedFeaturesFilePath = string; /** - * Object with mocked features mapping (for browser). We need to specify the featureName as key, and the mocked treatment as value. + * Object with mocked features mapping for client-side (e.g., Browser or React Native). We need to specify the featureName as key, and the mocked treatment as value. * @typedef {Object} MockedFeaturesMap */ type MockedFeaturesMap = { @@ -729,13 +727,14 @@ declare namespace SplitIO { setLogLevel(logLevel: LogLevel): void } /** - * Common settings interface for SDK instances created on the browser. - * @interface IBrowserBasicSettings + * Common settings interface for SDK instances created for client-side. + * + * @interface IClientSideBasicSettings * @extends ISharedSettings */ - interface IBrowserBasicSettings extends ISharedSettings { + interface IClientSideBasicSettings extends ISharedSettings { /** - * SDK Core settings for the browser. + * SDK Core settings for client-side. * @property {Object} core */ core: { @@ -767,7 +766,7 @@ declare namespace SplitIO { /** * Defines an optional list of factory functions used to instantiate SDK integrations. * - * NOTE: at the moment there are not integrations to plug in Browser SDK. + * NOTE: at the moment there are not integrations to plug in. * * @property {Object} integrations */ @@ -785,12 +784,13 @@ declare namespace SplitIO { userConsent?: ConsentStatus } /** - * Settings interface for SDK instances created on the browser. - * @interface IBrowserSettings - * @extends ISharedSettings + * Settings interface for SDK instances created for client-side with synchronous storage (e.g., Browser or React Native). + * + * @interface IClientSideSettings + * @extends IClientSideBasicSettings * @see {@link https://help.split.io/hc/en-us/articles/360058730852-Browser-SDK#configuration} */ - interface IBrowserSettings extends IBrowserBasicSettings { + interface IClientSideSettings extends IClientSideBasicSettings { /** * The SDK mode. When using the default in memory storage or `InLocalStorage` as storage, the only possible value is "standalone", which is the default. * For "localhost" mode, use "localhost" as authorizationKey. @@ -807,8 +807,10 @@ declare namespace SplitIO { /** * Defines the factory function to instantiate the storage. If not provided, the default IN MEMORY storage is used. * + * NOTE: at the moment there are not storages to plug in React Native SDK, only `InLocalStorage` for Browser SDK. + * * Example: - * ```typescript + * ``` * SplitFactory({ * ... * storage: InLocalStorage() @@ -818,7 +820,7 @@ declare namespace SplitIO { */ storage?: StorageSyncFactory, /** - * SDK Startup settings for the Browser. + * SDK Startup settings. * @property {Object} startup */ startup?: { @@ -842,7 +844,7 @@ declare namespace SplitIO { retriesOnFailureBeforeReady?: number, /** * For SDK posts the queued events data in bulks with a given rate, but the first push window is defined separately, - * to better control on browsers. This number defines that window before the first events push. + * to better control on browsers or mobile. This number defines that window before the first events push. * * @property {number} eventsFirstPushWindow * @default 10 @@ -915,13 +917,12 @@ declare namespace SplitIO { } } /** - * Settings interface with async storage for SDK instances created on the browser. - * If your storage is synchronous (by defaut we use memory, which is sync) use SplitIO.IBrowserSettings instead. - * @interface IBrowserAsyncSettings - * @extends IBrowserBasicSettings - * @see {@link https://help.split.io/hc/en-us/articles/360058730852-Browser-SDK#configuration} + * Settings interface with async storage for SDK instances created for client-side (e.g., Serverless environments). + * If your storage is synchronous (by defaut we use memory, which is sync) use SplitIO.IClientSideSettings instead. + * @interface IClientSideAsyncSettings + * @extends IClientSideBasicSettings */ - interface IBrowserAsyncSettings extends IBrowserBasicSettings { + interface IClientSideAsyncSettings extends IClientSideBasicSettings { /** * The SDK mode. When using `PluggableStorage` as storage, the possible values are "consumer" and "consumer_partial". * @@ -944,7 +945,7 @@ declare namespace SplitIO { */ storage: StorageAsyncFactory, /** - * SDK Startup settings for the Browser. + * SDK Startup settings. * @property {Object} startup */ startup?: { @@ -956,7 +957,7 @@ declare namespace SplitIO { readyTimeout?: number, /** * For SDK posts the queued events data in bulks with a given rate, but the first push window is defined separately, - * to better control on browsers. This number defines that window before the first events push. + * to better control on browsers or mobile. This number defines that window before the first events push. * * NOTE: this param is ignored in 'consumer' mode. * @property {number} eventsFirstPushWindow @@ -1156,7 +1157,7 @@ declare namespace SplitIO { } /** * This represents the interface for the Client instance with asynchronous storage for server-side SDK, where we don't have only one key. - * @interface IAsyncClient + * @interface IAsyncClientSS * @extends IBasicClient */ interface IAsyncClientSS extends IBasicClient { From 6905ae6a3e88969649d0fbef270f9ee6f24d6b59 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Fri, 25 Oct 2024 15:48:48 -0300 Subject: [PATCH 123/146] Polishing --- types/splitio.d.ts | 623 ++++++++++++++++++++++----------------------- 1 file changed, 308 insertions(+), 315 deletions(-) diff --git a/types/splitio.d.ts b/types/splitio.d.ts index 583dbba4..fde0963e 100644 --- a/types/splitio.d.ts +++ b/types/splitio.d.ts @@ -8,13 +8,13 @@ export = SplitIO; * EventEmitter interface based on a subset of the NodeJS.EventEmitter methods. */ interface IEventEmitter { - addListener(event: string, listener: (...args: any[]) => void): this - on(event: string, listener: (...args: any[]) => void): this - once(event: string, listener: (...args: any[]) => void): this - removeListener(event: string, listener: (...args: any[]) => void): this - off(event: string, listener: (...args: any[]) => void): this - removeAllListeners(event?: string): this - emit(event: string, ...args: any[]): boolean + addListener(event: string, listener: (...args: any[]) => void): this; + on(event: string, listener: (...args: any[]) => void): this; + once(event: string, listener: (...args: any[]) => void): this; + removeListener(event: string, listener: (...args: any[]) => void): this; + off(event: string, listener: (...args: any[]) => void): this; + removeAllListeners(event?: string): this; + emit(event: string, ...args: any[]): boolean; } /** * @typedef {Object} EventConsts @@ -24,10 +24,10 @@ interface IEventEmitter { * @property {string} SDK_UPDATE The update event. */ type EventConsts = { - SDK_READY: 'init::ready', - SDK_READY_FROM_CACHE: 'init::cache-ready', - SDK_READY_TIMED_OUT: 'init::timeout', - SDK_UPDATE: 'state::update' + SDK_READY: 'init::ready'; + SDK_READY_FROM_CACHE: 'init::cache-ready'; + SDK_READY_TIMED_OUT: 'init::timeout'; + SDK_UPDATE: 'state::update'; }; /** * SDK Modes. @@ -46,55 +46,55 @@ type StorageType = 'MEMORY' | 'LOCALSTORAGE'; */ interface ISettings { readonly core: { - authorizationKey: string, - key: SplitIO.SplitKey, - labelsEnabled: boolean, - IPAddressesEnabled: boolean - }, - readonly mode: SDKMode, + authorizationKey: string; + key: SplitIO.SplitKey; + labelsEnabled: boolean; + IPAddressesEnabled: boolean; + }; + readonly mode: SDKMode; readonly scheduler: { - featuresRefreshRate: number, - impressionsRefreshRate: number, - impressionsQueueSize: number, - telemetryRefreshRate: number, - segmentsRefreshRate: number, - offlineRefreshRate: number, - eventsPushRate: number, - eventsQueueSize: number, - pushRetryBackoffBase: number - }, + featuresRefreshRate: number; + impressionsRefreshRate: number; + impressionsQueueSize: number; + telemetryRefreshRate: number; + segmentsRefreshRate: number; + offlineRefreshRate: number; + eventsPushRate: number; + eventsQueueSize: number; + pushRetryBackoffBase: number; + }; readonly startup: { - readyTimeout: number, - requestTimeoutBeforeReady: number, - retriesOnFailureBeforeReady: number, - eventsFirstPushWindow: number - }, - readonly storage?: SplitIO.StorageSyncFactory | SplitIO.StorageAsyncFactory, + readyTimeout: number; + requestTimeoutBeforeReady: number; + retriesOnFailureBeforeReady: number; + eventsFirstPushWindow: number; + }; + readonly storage?: SplitIO.StorageSyncFactory | SplitIO.StorageAsyncFactory; readonly urls: { - events: string, - sdk: string, - auth: string, - streaming: string, - telemetry: string - }, - readonly integrations?: SplitIO.IntegrationFactory[], - readonly debug: boolean | LogLevel | SplitIO.ILogger, - readonly version: string, + events: string; + sdk: string; + auth: string; + streaming: string; + telemetry: string; + }; + readonly integrations?: SplitIO.IntegrationFactory[]; + readonly debug: boolean | LogLevel | SplitIO.ILogger; + readonly version: string; /** * Mocked features map. */ - features?: SplitIO.MockedFeaturesMap, - readonly streamingEnabled: boolean, + features?: SplitIO.MockedFeaturesMap; + readonly streamingEnabled: boolean; readonly sync: { - splitFilters: SplitIO.SplitFilter[], - impressionsMode: SplitIO.ImpressionsMode, - enabled: boolean, - flagSpecVersion: string, + splitFilters: SplitIO.SplitFilter[]; + impressionsMode: SplitIO.ImpressionsMode; + enabled: boolean; + flagSpecVersion: string; requestOptions?: { - getHeaderOverrides?: (context: { headers: Record }) => Record - }, - }, - readonly userConsent: SplitIO.ConsentStatus + getHeaderOverrides?: (context: { headers: Record }) => Record; + }; + }; + readonly userConsent: SplitIO.ConsentStatus; } /** * Log levels. @@ -111,25 +111,25 @@ interface ILoggerAPI { * @function enable * @returns {void} */ - enable(): void, + enable(): void; /** * Disables SDK logging. * @function disable * @returns {void} */ - disable(): void, + disable(): void; /** * Sets a log level for the SDK logs. * @function setLogLevel * @returns {void} */ - setLogLevel(logLevel: LogLevel): void, + setLogLevel(logLevel: LogLevel): void; /** * Log level constants. Use this to pass them to setLogLevel function. */ LogLevel: { - [level in LogLevel]: LogLevel - } + [level in LogLevel]: LogLevel; + }; } /** * User consent API @@ -159,113 +159,8 @@ interface IUserConsentAPI { * Consent status constants. Use this to compare with the getStatus function result. */ Status: { - [status in SplitIO.ConsentStatus]: SplitIO.ConsentStatus - } -} -/** - * Common settings interface between SDK settings interface. - * @interface ISharedSettings - */ -interface ISharedSettings { - /** - * Boolean value to indicate whether the logger should be enabled or disabled by default, or a log level string or a Logger object. - * Passing a logger object is required to get descriptive log messages. Otherwise most logs will print with message codes. - * @see {@link https://help.split.io/hc/en-us/articles/360058730852-Browser-SDK#logging}. - * - * Examples: - * ```typescript - * config.debug = true - * config.debug = 'WARN' - * config.debug = ErrorLogger() - * ``` - * @property {boolean | LogLevel | ILogger} debug - * @default false - */ - debug?: boolean | LogLevel | SplitIO.ILogger, - /** - * The impression listener, which is optional. Whatever you provide here needs to comply with the SplitIO.IImpressionListener interface, - * which will check for the logImpression method. - * @property {IImpressionListener} impressionListener - * @default undefined - */ - impressionListener?: SplitIO.IImpressionListener, - /** - * Boolean flag to enable the streaming service as default synchronization mechanism. In the event of any issue with streaming, - * the SDK would fallback to the polling mechanism. If false, the SDK would poll for changes as usual without attempting to use streaming. - * @property {boolean} streamingEnabled - * @default true - */ - streamingEnabled?: boolean, - /** - * SDK synchronization settings. - * @property {Object} sync - */ - sync?: { - /** - * List of feature flag filters. These filters are used to fetch a subset of the feature flag definitions in your environment, in order to reduce the delay of the SDK to be ready. - * This configuration is only meaningful when the SDK is working in "standalone" mode. - * - * Example: - * `splitFilter: [ - * { type: 'byName', values: ['my_feature_flag_1', 'my_feature_flag_2'] }, // will fetch feature flags named 'my_feature_flag_1' and 'my_feature_flag_2' - * ]` - * @property {SplitIO.SplitFilter[]} splitFilters - */ - splitFilters?: SplitIO.SplitFilter[] - /** - * Impressions Collection Mode. Option to determine how impressions are going to be sent to Split servers. - * Possible values are 'DEBUG', 'OPTIMIZED', and 'NONE'. - * - DEBUG: will send all the impressions generated (recommended only for debugging purposes). - * - OPTIMIZED: will send unique impressions to Split servers, avoiding a considerable amount of traffic that duplicated impressions could generate. - * - NONE: will send unique keys evaluated per feature to Split servers instead of full blown impressions, avoiding a considerable amount of traffic that impressions could generate. - * - * @property {String} impressionsMode - * @default 'OPTIMIZED' - */ - impressionsMode?: SplitIO.ImpressionsMode, - /** - * Controls the SDK continuous synchronization flags. - * - * When `true` a running SDK will process rollout plan updates performed on the UI (default). - * When false it'll just fetch all data upon init. - * - * @property {boolean} enabled - * @default true - */ - enabled?: boolean - /** - * Custom options object for HTTP(S) requests. - * If provided, this object is merged with the options object passed by the SDK for EventSource and Fetch calls. - * This configuration has no effect in "consumer" mode, as no HTTP(S) requests are made by the SDK. - */ - requestOptions?: { - /** - * Custom function called before each request, allowing you to add or update headers in SDK HTTP requests. - * Some headers, such as `SplitSDKVersion`, are required by the SDK and cannot be overridden. - * To pass multiple headers with the same name, combine their values into a single line, separated by commas. Example: `{ 'Authorization': 'value1, value2' }` - * Or provide keys with different case since headers are case-insensitive. Example: `{ 'authorization': 'value1', 'Authorization': 'value2' }` - * - * NOTE: to pass custom headers to the streaming connection in Browser, you should polyfill the `window.EventSource` object with a library that supports headers, - * like https://www.npmjs.com/package/event-source-polyfill, since native EventSource does not support them and will be ignored. - * - * @property getHeaderOverrides - * @default undefined - * - * @param context - The context for the request. - * @param context.headers - The current headers in the request. - * @returns A set of headers to be merged with the current headers. - * - * @example - * const getHeaderOverrides = (context) => { - * return { - * 'Authorization': context.headers['Authorization'] + ', other-value', - * 'custom-header': 'custom-value' - * }; - * }; - */ - getHeaderOverrides?: (context: { headers: Record }) => Record - }, - } + [status in SplitIO.ConsentStatus]: SplitIO.ConsentStatus; + }; } /** * Common API for entities that expose status handlers. @@ -277,7 +172,7 @@ interface IStatusInterface extends IEventEmitter { * Constant object containing the SDK events for you to use. * @property {EventConsts} Event */ - Event: EventConsts, + Event: EventConsts; /** * Returns a promise that resolves once the SDK has finished loading (`SDK_READY` event emitted) or rejected if the SDK has timedout (`SDK_READY_TIMED_OUT` event emitted). * As it's meant to provide similar flexibility to the event approach, given that the SDK might be eventually ready after a timeout event, the `ready` method will return a resolved promise once the SDK is ready. @@ -296,7 +191,7 @@ interface IStatusInterface extends IEventEmitter { * @function ready * @returns {Promise} */ - ready(): Promise + ready(): Promise; } /** * Common definitions between clients for different environments interface. @@ -314,7 +209,7 @@ interface IBasicClient extends IStatusInterface { * @function destroy * @returns {Promise} A promise that resolves once the client is destroyed. */ - destroy(): Promise + destroy(): Promise; } /** * Common definitions between SDK instances for different environments interface. @@ -325,23 +220,23 @@ interface IBasicSDK { * Current settings of the SDK instance. * @property settings */ - settings: ISettings, + settings: ISettings; /** * Logger API. * @property Logger */ - Logger: ILoggerAPI, + Logger: ILoggerAPI; /** * User consent API. * @property UserConsent */ - UserConsent: IUserConsentAPI, + UserConsent: IUserConsentAPI; /** * Destroys all the clients created by this factory. * @function destroy * @returns {Promise} */ - destroy(): Promise + destroy(): Promise; } /****** Exposed namespace ******/ /** @@ -367,7 +262,7 @@ declare namespace SplitIO { * @typedef {Object.} Treatments */ type Treatments = { - [featureName: string]: Treatment + [featureName: string]: Treatment; }; /** * Feature flags treatments promise that resolves to the actual SplitIO.Treatments object. @@ -381,8 +276,8 @@ declare namespace SplitIO { * @property {string | null} config The stringified version of the JSON config defined for that treatment, null if there is no config for the resulting treatment. */ type TreatmentWithConfig = { - treatment: string, - config: string | null + treatment: string; + config: string | null; }; /** * Feature flag treatment promise that resolves to actual treatment with config value. @@ -399,7 +294,7 @@ declare namespace SplitIO { * @typedef {Object.} Treatments */ type TreatmentsWithConfig = { - [featureName: string]: TreatmentWithConfig + [featureName: string]: TreatmentWithConfig; }; /** * Feature flags treatments promise that resolves to the actual SplitIO.TreatmentsWithConfig object. @@ -417,7 +312,7 @@ declare namespace SplitIO { * @see {@link https://help.split.io/hc/en-us/articles/360058730852-Browser-SDK#attribute-syntax} */ type Attributes = { - [attributeName: string]: AttributeType + [attributeName: string]: AttributeType; }; /** * Type of an attribute value @@ -430,7 +325,7 @@ declare namespace SplitIO { * @see {@link https://help.split.io/hc/en-us/articles/360058730852-Browser-SDK#track */ type Properties = { - [propertyName: string]: string | number | boolean | null + [propertyName: string]: string | number | boolean | null; }; /** * The customer identifier represented by a string. @@ -447,7 +342,7 @@ declare namespace SplitIO { * @typedef {Object} MockedFeaturesMap */ type MockedFeaturesMap = { - [featureName: string]: string | TreatmentWithConfig + [featureName: string]: string | TreatmentWithConfig; }; /** * Object with information about an impression. It contains the generated impression DTO as well as @@ -456,19 +351,19 @@ declare namespace SplitIO { */ type ImpressionData = { impression: { - feature: string, - keyName: string, - treatment: string, - time: number, - bucketingKey?: string, - label: string, - changeNumber: number, - pt?: number, - }, - attributes?: SplitIO.Attributes, - ip: string, - hostname: string, - sdkLanguageVersion: string + feature: string; + keyName: string; + treatment: string; + time: number; + bucketingKey?: string; + label: string; + changeNumber: number; + pt?: number; + }; + attributes?: SplitIO.Attributes; + ip: string; + hostname: string; + sdkLanguageVersion: string; }; /** * Data corresponding to one feature flag view. @@ -479,45 +374,45 @@ declare namespace SplitIO { * The name of the feature flag. * @property {string} name */ - name: string, + name: string; /** * The traffic type of the feature flag. * @property {string} trafficType */ - trafficType: string, + trafficType: string; /** * Whether the feature flag is killed or not. * @property {boolean} killed */ - killed: boolean, + killed: boolean; /** * The list of treatments available for the feature flag. * @property {Array} treatments */ - treatments: Array, + treatments: Array; /** * Current change number of the feature flag. * @property {number} changeNumber */ - changeNumber: number, + changeNumber: number; /** * Map of configurations per treatment. * Each existing configuration is a stringified version of the JSON you defined on the Split user interface. * @property {Object.} configs */ configs: { - [treatmentName: string]: string - }, + [treatmentName: string]: string; + }; /** * List of sets of the feature flag. * @property {string[]} sets */ - sets: string[], + sets: string[]; /** * The default treatment of the feature flag. * @property {string} defaultTreatment */ - defaultTreatment: string, + defaultTreatment: string; }; /** * A promise that resolves to a feature flag view. @@ -554,8 +449,8 @@ declare namespace SplitIO { * Input parameter details are not part of the public API. */ type StorageSyncFactory = { - readonly type: StorageType - (params: {}): (StorageSync | undefined) + readonly type: StorageType; + (params: {}): (StorageSync | undefined); } /** * Configuration params for `InLocalStorage` @@ -566,7 +461,7 @@ declare namespace SplitIO { * @property {string} prefix * @default 'SPLITIO' */ - prefix?: string + prefix?: string; } /** * Storage for asynchronous (consumer) SDK. @@ -578,8 +473,8 @@ declare namespace SplitIO { * Input parameter details are not part of the public API. */ type StorageAsyncFactory = { - readonly type: 'PLUGGABLE' - (params: {}): StorageAsync + readonly type: 'PLUGGABLE'; + (params: {}): StorageAsync; } /** * Configuration params for `PluggableStorage` @@ -590,12 +485,12 @@ declare namespace SplitIO { * @property {string} prefix * @default 'SPLITIO' */ - prefix?: string, + prefix?: string; /** * Storage wrapper. * @property {Object} wrapper */ - wrapper: Object + wrapper: Object; } /** * Impression listener interface. This is the interface that needs to be implemented @@ -604,7 +499,7 @@ declare namespace SplitIO { * @see {@link https://help.split.io/hc/en-us/articles/360058730852-Browser-SDK#listener} */ interface IImpressionListener { - logImpression(data: SplitIO.ImpressionData): void + logImpression(data: SplitIO.ImpressionData): void; } /** * SDK integration instance. @@ -617,8 +512,8 @@ declare namespace SplitIO { * Input parameter details are not part of the public API. */ type IntegrationFactory = { - readonly type: string - (params: {}): (Integration | void) + readonly type: string; + (params: {}): (Integration | void); } /** * A pair of user key and it's trafficType, required for tracking valid Split events. @@ -658,31 +553,31 @@ declare namespace SplitIO { * @property {string} sdk * @default 'https://sdk.split.io/api' */ - sdk?: string, + sdk?: string; /** * String property to override the base URL where the SDK will post event-related information like impressions. * @property {string} events * @default 'https://events.split.io/api' */ - events?: string, + events?: string; /** * String property to override the base URL where the SDK will get authorization tokens to be used with functionality that requires it, like streaming. * @property {string} auth * @default 'https://auth.split.io/api' */ - auth?: string, + auth?: string; /** * String property to override the base URL where the SDK will connect to receive streaming updates. * @property {string} streaming * @default 'https://streaming.split.io' */ - streaming?: string, + streaming?: string; /** * String property to override the base URL where the SDK will post telemetry data. * @property {string} telemetry * @default 'https://telemetry.split.io/api' */ - telemetry?: string + telemetry?: string; }; /** @@ -700,13 +595,13 @@ declare namespace SplitIO { * * @property {SplitFilterType} type */ - type: SplitFilterType, + type: SplitFilterType; /** * List of values: feature flag names for 'byName' filter type, and feature flag name prefixes for 'byPrefix' type. * * @property {string[]} values */ - values: string[], + values: string[]; } /** * ImpressionsMode type @@ -724,15 +619,14 @@ declare namespace SplitIO { * @interface ILogger */ interface ILogger { - setLogLevel(logLevel: LogLevel): void + setLogLevel(logLevel: LogLevel): void; } /** * Common settings interface for SDK instances created for client-side. * * @interface IClientSideBasicSettings - * @extends ISharedSettings */ - interface IClientSideBasicSettings extends ISharedSettings { + interface IClientSideBasicSettings { /** * SDK Core settings for client-side. * @property {Object} core @@ -743,26 +637,125 @@ declare namespace SplitIO { * @see {@link https://help.split.io/hc/en-us/articles/360019916211-API-keys} * @property {string} authorizationKey */ - authorizationKey: string, + authorizationKey: string; /** * Customer identifier. Whatever this means to you. * @see {@link https://help.split.io/hc/en-us/articles/360019916311-Traffic-type} * @property {SplitKey} key */ - key: SplitKey, + key: SplitKey; /** * Disable labels from being sent to Split backend. Labels may contain sensitive information. * @property {boolean} labelsEnabled * @default true */ - labelsEnabled?: boolean - }, + labelsEnabled?: boolean; + }; + /** + * Boolean value to indicate whether the logger should be enabled or disabled by default, or a log level string or a Logger object. + * Passing a logger object is required to get descriptive log messages. Otherwise most logs will print with message codes. + * @see {@link https://help.split.io/hc/en-us/articles/360058730852-Browser-SDK#logging}. + * + * Examples: + * ``` + * config.debug = true + * config.debug = 'WARN' + * config.debug = ErrorLogger() + * ``` + * @property {boolean | LogLevel | ILogger} debug + * @default false + */ + debug?: boolean | LogLevel | SplitIO.ILogger; + /** + * The impression listener, which is optional. Whatever you provide here needs to comply with the SplitIO.IImpressionListener interface, + * which will check for the logImpression method. + * @property {IImpressionListener} impressionListener + * @default undefined + */ + impressionListener?: SplitIO.IImpressionListener; + /** + * Boolean flag to enable the streaming service as default synchronization mechanism. In the event of any issue with streaming, + * the SDK would fallback to the polling mechanism. If false, the SDK would poll for changes as usual without attempting to use streaming. + * @property {boolean} streamingEnabled + * @default true + */ + streamingEnabled?: boolean; + /** + * SDK synchronization settings. + * @property {Object} sync + */ + sync?: { + /** + * List of feature flag filters. These filters are used to fetch a subset of the feature flag definitions in your environment, in order to reduce the delay of the SDK to be ready. + * This configuration is only meaningful when the SDK is working in "standalone" mode. + * + * Example: + * `splitFilter: [ + * { type: 'byName', values: ['my_feature_flag_1', 'my_feature_flag_2'] }, // will fetch feature flags named 'my_feature_flag_1' and 'my_feature_flag_2' + * ]` + * @property {SplitIO.SplitFilter[]} splitFilters + */ + splitFilters?: SplitIO.SplitFilter[] + /** + * Impressions Collection Mode. Option to determine how impressions are going to be sent to Split servers. + * Possible values are 'DEBUG', 'OPTIMIZED', and 'NONE'. + * - DEBUG: will send all the impressions generated (recommended only for debugging purposes). + * - OPTIMIZED: will send unique impressions to Split servers, avoiding a considerable amount of traffic that duplicated impressions could generate. + * - NONE: will send unique keys evaluated per feature to Split servers instead of full blown impressions, avoiding a considerable amount of traffic that impressions could generate. + * + * @property {string} impressionsMode + * @default 'OPTIMIZED' + */ + impressionsMode?: SplitIO.ImpressionsMode; + /** + * Controls the SDK continuous synchronization flags. + * + * When `true` a running SDK will process rollout plan updates performed on the UI (default). + * When false it'll just fetch all data upon init. + * + * @property {boolean} enabled + * @default true + */ + enabled?: boolean + /** + * Custom options object for HTTP(S) requests. + * If provided, this object is merged with the options object passed by the SDK for EventSource and Fetch calls. + * This configuration has no effect in "consumer" mode, as no HTTP(S) requests are made by the SDK. + */ + requestOptions?: { + /** + * Custom function called before each request, allowing you to add or update headers in SDK HTTP requests. + * Some headers, such as `SplitSDKVersion`, are required by the SDK and cannot be overridden. + * To pass multiple headers with the same name, combine their values into a single line, separated by commas. Example: `{ 'Authorization': 'value1, value2' }` + * Or provide keys with different case since headers are case-insensitive. Example: `{ 'authorization': 'value1', 'Authorization': 'value2' }` + * + * NOTE: to pass custom headers to the streaming connection in Browser, you should polyfill the `window.EventSource` object with a library that supports headers, + * like https://www.npmjs.com/package/event-source-polyfill, since native EventSource does not support them and will be ignored. + * + * @property getHeaderOverrides + * @default undefined + * + * @param context - The context for the request. + * @param context.headers - The current headers in the request. + * @returns A set of headers to be merged with the current headers. + * + * @example + * const getHeaderOverrides = (context) => { + * return { + * 'Authorization': context.headers['Authorization'] + ', other-value', + * 'custom-header': 'custom-value' + * }; + * }; + */ + getHeaderOverrides?: (context: { headers: Record }) => Record; + }; + }; /** * List of URLs that the SDK will use as base for it's synchronization functionalities, applicable only when running as standalone. * Do not change these settings unless you're working an advanced use case, like connecting to the Split proxy. * @property {Object} urls */ - urls?: UrlSettings, + urls?: UrlSettings; /** * Defines an optional list of factory functions used to instantiate SDK integrations. * @@ -770,7 +763,7 @@ declare namespace SplitIO { * * @property {Object} integrations */ - integrations?: IntegrationFactory[], + integrations?: IntegrationFactory[]; /** * User consent status. Possible values are `'GRANTED'`, which is the default, `'DECLINED'` or `'UNKNOWN'`. * - `'GRANTED'`: the user grants consent for tracking events and impressions. The SDK sends them to Split cloud. @@ -781,7 +774,7 @@ declare namespace SplitIO { * @typedef {string} userConsent * @default 'GRANTED' */ - userConsent?: ConsentStatus + userConsent?: ConsentStatus; } /** * Settings interface for SDK instances created for client-side with synchronous storage (e.g., Browser or React Native). @@ -798,12 +791,12 @@ declare namespace SplitIO { * @property {'standalone'} mode * @default 'standalone' */ - mode?: 'standalone', + mode?: 'standalone'; /** * Mocked features map. For testing purposes only. For using this you should specify "localhost" as authorizationKey on core settings. * @see {@link https://help.split.io/hc/en-us/articles/360058730852-Browser-SDK#localhost-mode} */ - features?: MockedFeaturesMap, + features?: MockedFeaturesMap; /** * Defines the factory function to instantiate the storage. If not provided, the default IN MEMORY storage is used. * @@ -818,7 +811,7 @@ declare namespace SplitIO { * ``` * @property {Object} storage */ - storage?: StorageSyncFactory, + storage?: StorageSyncFactory; /** * SDK Startup settings. * @property {Object} startup @@ -829,19 +822,19 @@ declare namespace SplitIO { * @property {number} readyTimeout * @default 1.5 */ - readyTimeout?: number, + readyTimeout?: number; /** * Time to wait for a request before the SDK is ready. If this time expires, JS Sdk will retry 'retriesOnFailureBeforeReady' times before notifying its failure to be 'ready'. * @property {number} requestTimeoutBeforeReady * @default 1.5 */ - requestTimeoutBeforeReady?: number, + requestTimeoutBeforeReady?: number; /** * How many quick retries we will do while starting up the SDK. * @property {number} retriesOnFailureBeforeReady * @default 1 */ - retriesOnFailureBeforeReady?: number, + retriesOnFailureBeforeReady?: number; /** * For SDK posts the queued events data in bulks with a given rate, but the first push window is defined separately, * to better control on browsers or mobile. This number defines that window before the first events push. @@ -849,8 +842,8 @@ declare namespace SplitIO { * @property {number} eventsFirstPushWindow * @default 10 */ - eventsFirstPushWindow?: number, - }, + eventsFirstPushWindow?: number; + }; /** * SDK scheduler settings. * @property {Object} scheduler @@ -861,59 +854,59 @@ declare namespace SplitIO { * @property {number} featuresRefreshRate * @default 60 */ - featuresRefreshRate?: number, + featuresRefreshRate?: number; /** * The SDK sends information on who got what treatment at what time back to Split servers to power analytics. This parameter controls how often this data is sent to Split servers. The parameter should be in seconds. * @property {number} impressionsRefreshRate * @default 60 */ - impressionsRefreshRate?: number, + impressionsRefreshRate?: number; /** * The maximum number of impression items we want to queue. If we queue more values, it will trigger a flush and reset the timer. * If you use a 0 here, the queue will have no maximum size. * @property {number} impressionsQueueSize * @default 30000 */ - impressionsQueueSize?: number, + impressionsQueueSize?: number; /** * The SDK sends diagnostic metrics to Split servers. This parameters controls this metric flush period in seconds. * @property {number} telemetryRefreshRate * @default 3600 */ - telemetryRefreshRate?: number, + telemetryRefreshRate?: number; /** * The SDK polls Split servers for changes to segment definitions. This parameter controls this polling period in seconds. * @property {number} segmentsRefreshRate * @default 60 */ - segmentsRefreshRate?: number, + segmentsRefreshRate?: number; /** * The SDK posts the queued events data in bulks. This parameter controls the posting rate in seconds. * @property {number} eventsPushRate * @default 60 */ - eventsPushRate?: number, + eventsPushRate?: number; /** * The maximum number of event items we want to queue. If we queue more values, it will trigger a flush and reset the timer. * If you use a 0 here, the queue will have no maximum size. * @property {number} eventsQueueSize * @default 500 */ - eventsQueueSize?: number, + eventsQueueSize?: number; /** * For mocking/testing only. The SDK will refresh the features mocked data when mode is set to "localhost" by defining the key. * For more information see {@link https://help.split.io/hc/en-us/articles/360058730852-Browser-SDK#localhost-mode} * @property {number} offlineRefreshRate * @default 15 */ - offlineRefreshRate?: number, + offlineRefreshRate?: number; /** * When using streaming mode, seconds to wait before re attempting to connect for push notifications. * Next attempts follow intervals in power of two: base seconds, base x 2 seconds, base x 4 seconds, ... * @property {number} pushRetryBackoffBase * @default 1 */ - pushRetryBackoffBase?: number, + pushRetryBackoffBase?: number; } } /** @@ -930,12 +923,12 @@ declare namespace SplitIO { * * @property {'consumer' | 'consumer_partial'} mode */ - mode: 'consumer' | 'consumer_partial', + mode: 'consumer' | 'consumer_partial'; /** * Defines the factory function to instantiate the storage. * * Example: - * ```typescript + * ``` * SplitFactory({ * ... * storage: PluggableStorage({ wrapper: SomeWrapper }) @@ -943,7 +936,7 @@ declare namespace SplitIO { * ``` * @property {Object} storage */ - storage: StorageAsyncFactory, + storage: StorageAsyncFactory; /** * SDK Startup settings. * @property {Object} startup @@ -954,7 +947,7 @@ declare namespace SplitIO { * @property {number} readyTimeout * @default 1.5 */ - readyTimeout?: number, + readyTimeout?: number; /** * For SDK posts the queued events data in bulks with a given rate, but the first push window is defined separately, * to better control on browsers or mobile. This number defines that window before the first events push. @@ -963,8 +956,8 @@ declare namespace SplitIO { * @property {number} eventsFirstPushWindow * @default 10 */ - eventsFirstPushWindow?: number, - }, + eventsFirstPushWindow?: number; + }; /** * SDK scheduler settings. * @property {Object} scheduler @@ -977,20 +970,20 @@ declare namespace SplitIO { * @property {number} impressionsRefreshRate * @default 60 */ - impressionsRefreshRate?: number, + impressionsRefreshRate?: number; /** * The maximum number of impression items we want to queue. If we queue more values, it will trigger a flush and reset the timer. * If you use a 0 here, the queue will have no maximum size. * @property {number} impressionsQueueSize * @default 30000 */ - impressionsQueueSize?: number, + impressionsQueueSize?: number; /** * The SDK sends diagnostic metrics to Split servers. This parameters controls this metric flush period in seconds. * @property {number} telemetryRefreshRate * @default 3600 */ - telemetryRefreshRate?: number, + telemetryRefreshRate?: number; /** * The SDK posts the queued events data in bulks. This parameter controls the posting rate in seconds. * @@ -998,7 +991,7 @@ declare namespace SplitIO { * @property {number} eventsPushRate * @default 60 */ - eventsPushRate?: number, + eventsPushRate?: number; /** * The maximum number of event items we want to queue. If we queue more values, it will trigger a flush and reset the timer. * If you use a 0 here, the queue will have no maximum size. @@ -1007,7 +1000,7 @@ declare namespace SplitIO { * @property {number} eventsQueueSize * @default 500 */ - eventsQueueSize?: number, + eventsQueueSize?: number; } } /** @@ -1022,20 +1015,20 @@ declare namespace SplitIO { * @function client * @returns {IClient} The client instance. */ - client(): IClient, + client(): IClient; /** * Returns a shared client of the SDK, associated with the given key. * @function client * @param {SplitKey} key The key for the new client instance. * @returns {IClient} The client instance. */ - client(key: SplitKey): IClient, + client(key: SplitKey): IClient; /** * Returns a manager instance of the SDK to explore available information. * @function manager * @returns {IManager} The manager instance. */ - manager(): IManager + manager(): IManager; } /** * This represents the interface for the SDK instance with asynchronous storage and client-side API, @@ -1049,20 +1042,20 @@ declare namespace SplitIO { * @function client * @returns {IAsyncClient} The asynchronous client instance. */ - client(): IAsyncClient, + client(): IAsyncClient; /** * Returns a shared client of the SDK, associated with the given key. * @function client * @param {SplitKey} key The key for the new client instance. * @returns {IAsyncClient} The asynchronous client instance. */ - client(key: SplitKey): IAsyncClient, + client(key: SplitKey): IAsyncClient; /** * Returns a manager instance of the SDK to explore available information. * @function manager * @returns {IManager} The manager instance. */ - manager(): IAsyncManager + manager(): IAsyncManager; } /** * This represents the interface for the Client instance with synchronous storage for server-side SDK, where we don't have only one key. @@ -1078,7 +1071,7 @@ declare namespace SplitIO { * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. * @returns {Treatment} The treatment string. */ - getTreatment(key: SplitKey, featureFlagName: string, attributes?: Attributes): Treatment, + getTreatment(key: SplitKey, featureFlagName: string, attributes?: Attributes): Treatment; /** * Returns a TreatmentWithConfig value, which is an object with both treatment and config string for the given feature. * @function getTreatmentWithConfig @@ -1088,7 +1081,7 @@ declare namespace SplitIO { * @returns {TreatmentWithConfig} The TreatmentWithConfig, the object containing the treatment string and the * configuration stringified JSON (or null if there was no config for that treatment). */ - getTreatmentWithConfig(key: SplitKey, featureFlagName: string, attributes?: Attributes): TreatmentWithConfig, + getTreatmentWithConfig(key: SplitKey, featureFlagName: string, attributes?: Attributes): TreatmentWithConfig; /** * Returns a Treatments value, which is an object map with the treatments for the given features. * @function getTreatments @@ -1097,7 +1090,7 @@ declare namespace SplitIO { * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. * @returns {Treatments} The treatments object map. */ - getTreatments(key: SplitKey, featureFlagNames: string[], attributes?: Attributes): Treatments, + getTreatments(key: SplitKey, featureFlagNames: string[], attributes?: Attributes): Treatments; /** * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the given features. * @function getTreatmentsWithConfig @@ -1106,7 +1099,7 @@ declare namespace SplitIO { * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. * @returns {TreatmentsWithConfig} The map with all the TreatmentWithConfig objects */ - getTreatmentsWithConfig(key: SplitKey, featureFlagNames: string[], attributes?: Attributes): TreatmentsWithConfig, + getTreatmentsWithConfig(key: SplitKey, featureFlagNames: string[], attributes?: Attributes): TreatmentsWithConfig; /** * Returns a Treatments value, which is an object map with the treatments for the feature flags related to the given flag set. * @function getTreatmentsByFlagSet @@ -1115,7 +1108,7 @@ declare namespace SplitIO { * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. * @returns {Treatments} The map with all the Treatment objects */ - getTreatmentsByFlagSet(key: SplitKey, flagSet: string, attributes?: Attributes): Treatments, + getTreatmentsByFlagSet(key: SplitKey, flagSet: string, attributes?: Attributes): Treatments; /** * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the feature flags related to the given flag set. * @function getTreatmentsWithConfigByFlagSet @@ -1124,7 +1117,7 @@ declare namespace SplitIO { * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. * @returns {TreatmentsWithConfig} The map with all the TreatmentWithConfig objects */ - getTreatmentsWithConfigByFlagSet(key: SplitKey, flagSet: string, attributes?: Attributes): TreatmentsWithConfig, + getTreatmentsWithConfigByFlagSet(key: SplitKey, flagSet: string, attributes?: Attributes): TreatmentsWithConfig; /** * Returns a Treatments value, which is an object with both treatment and config string for to the feature flags related to the given flag sets. * @function getTreatmentsByFlagSets @@ -1133,7 +1126,7 @@ declare namespace SplitIO { * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. * @returns {Treatments} The map with all the Treatment objects */ - getTreatmentsByFlagSets(key: SplitKey, flagSets: string[], attributes?: Attributes): Treatments, + getTreatmentsByFlagSets(key: SplitKey, flagSets: string[], attributes?: Attributes): Treatments; /** * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the feature flags related to the given flag sets. * @function getTreatmentsWithConfigByFlagSets @@ -1142,7 +1135,7 @@ declare namespace SplitIO { * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. * @returns {TreatmentsWithConfig} The map with all the TreatmentWithConfig objects */ - getTreatmentsWithConfigByFlagSets(key: SplitKey, flagSets: string[], attributes?: Attributes): TreatmentsWithConfig, + getTreatmentsWithConfigByFlagSets(key: SplitKey, flagSets: string[], attributes?: Attributes): TreatmentsWithConfig; /** * Tracks an event to be fed to the results product on Split user interface. * @function track @@ -1153,7 +1146,7 @@ declare namespace SplitIO { * @param {Properties=} properties - The properties of this event. Values can be string, number, boolean or null. * @returns {boolean} Whether the event was added to the queue successfully or not. */ - track(key: SplitIO.SplitKey, trafficType: string, eventType: string, value?: number, properties?: Properties): boolean, + track(key: SplitIO.SplitKey, trafficType: string, eventType: string, value?: number, properties?: Properties): boolean; } /** * This represents the interface for the Client instance with asynchronous storage for server-side SDK, where we don't have only one key. @@ -1171,7 +1164,7 @@ declare namespace SplitIO { * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. * @returns {AsyncTreatment} Treatment promise that resolves to the treatment string. */ - getTreatment(key: SplitKey, featureFlagName: string, attributes?: Attributes): AsyncTreatment, + getTreatment(key: SplitKey, featureFlagName: string, attributes?: Attributes): AsyncTreatment; /** * Returns a TreatmentWithConfig value, which will be (or eventually be) an object with both treatment and config string for the given feature. * For usage on NodeJS as we don't have only one key. @@ -1182,7 +1175,7 @@ declare namespace SplitIO { * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. * @returns {AsyncTreatmentWithConfig} TreatmentWithConfig promise that resolves to the TreatmentWithConfig object. */ - getTreatmentWithConfig(key: SplitKey, featureFlagName: string, attributes?: Attributes): AsyncTreatmentWithConfig, + getTreatmentWithConfig(key: SplitKey, featureFlagName: string, attributes?: Attributes): AsyncTreatmentWithConfig; /** * Returns a Treatments value, which will be (or eventually be) an object map with the treatments for the given features. * For usage on NodeJS as we don't have only one key. @@ -1192,7 +1185,7 @@ declare namespace SplitIO { * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. * @returns {AsyncTreatments} Treatments promise that resolves to the treatments object map. */ - getTreatments(key: SplitKey, featureFlagNames: string[], attributes?: Attributes): AsyncTreatments, + getTreatments(key: SplitKey, featureFlagNames: string[], attributes?: Attributes): AsyncTreatments; /** * Returns a TreatmentsWithConfig value, which will be (or eventually be) an object map with the TreatmentWithConfig (an object with both treatment and config string) for the given features. * For usage on NodeJS as we don't have only one key. @@ -1202,7 +1195,7 @@ declare namespace SplitIO { * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. * @returns {AsyncTreatmentsWithConfig} TreatmentsWithConfig promise that resolves to the map of TreatmentsWithConfig objects. */ - getTreatmentsWithConfig(key: SplitKey, featureFlagNames: string[], attributes?: Attributes): AsyncTreatmentsWithConfig, + getTreatmentsWithConfig(key: SplitKey, featureFlagNames: string[], attributes?: Attributes): AsyncTreatmentsWithConfig; /** * Returns a Treatments value, which is an object map with the treatments for the feature flags related to the given flag set. * For usage on NodeJS as we don't have only one key. @@ -1212,7 +1205,7 @@ declare namespace SplitIO { * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. * @returns {AsyncTreatments} Treatments promise that resolves to the treatments object map. */ - getTreatmentsByFlagSet(key: SplitKey, flagSet: string, attributes?: Attributes): AsyncTreatments, + getTreatmentsByFlagSet(key: SplitKey, flagSet: string, attributes?: Attributes): AsyncTreatments; /** * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the feature flags related to the given flag set. * For usage on NodeJS as we don't have only one key. @@ -1222,7 +1215,7 @@ declare namespace SplitIO { * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. * @returns {AsyncTreatmentsWithConfig} TreatmentsWithConfig promise that resolves to the map of TreatmentsWithConfig objects. */ - getTreatmentsWithConfigByFlagSet(key: SplitKey, flagSet: string, attributes?: Attributes): AsyncTreatmentWithConfig, + getTreatmentsWithConfigByFlagSet(key: SplitKey, flagSet: string, attributes?: Attributes): AsyncTreatmentWithConfig; /** * Returns a Treatments value, which is an object with both treatment and config string for to the feature flags related to the given flag sets. * For usage on NodeJS as we don't have only one key. @@ -1232,7 +1225,7 @@ declare namespace SplitIO { * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. * @returns {AsyncTreatments} Treatments promise that resolves to the treatments object map. */ - getTreatmentsByFlagSets(key: SplitKey, flagSets: string[], attributes?: Attributes): AsyncTreatments, + getTreatmentsByFlagSets(key: SplitKey, flagSets: string[], attributes?: Attributes): AsyncTreatments; /** * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the feature flags related to the given flag sets. * For usage on NodeJS as we don't have only one key. @@ -1242,7 +1235,7 @@ declare namespace SplitIO { * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. * @returns {AsyncTreatmentsWithConfig} TreatmentsWithConfig promise that resolves to the map of TreatmentsWithConfig objects. */ - getTreatmentsWithConfigByFlagSets(key: SplitKey, flagSets: string[], attributes?: Attributes): AsyncTreatmentWithConfig, + getTreatmentsWithConfigByFlagSets(key: SplitKey, flagSets: string[], attributes?: Attributes): AsyncTreatmentWithConfig; /** * Tracks an event to be fed to the results product on Split user interface, and returns a promise to signal when the event was successfully queued (or not). * @function track @@ -1253,7 +1246,7 @@ declare namespace SplitIO { * @param {Properties=} properties - The properties of this event. Values can be string, number, boolean or null. * @returns {Promise} A promise that resolves to a boolean indicating if the event was added to the queue successfully or not. */ - track(key: SplitIO.SplitKey, trafficType: string, eventType: string, value?: number, properties?: Properties): Promise + track(key: SplitIO.SplitKey, trafficType: string, eventType: string, value?: number, properties?: Properties): Promise; } /** * This represents the interface for the Client instance with synchronous storage for client-side SDK, where each client has associated a key. @@ -1268,7 +1261,7 @@ declare namespace SplitIO { * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. * @returns {Treatment} The treatment string. */ - getTreatment(featureFlagName: string, attributes?: Attributes): Treatment, + getTreatment(featureFlagName: string, attributes?: Attributes): Treatment; /** * Returns a TreatmentWithConfig value, which is an object with both treatment and config string for the given feature. * @function getTreatmentWithConfig @@ -1276,7 +1269,7 @@ declare namespace SplitIO { * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. * @returns {TreatmentWithConfig} The map containing the treatment and the configuration stringified JSON (or null if there was no config for that treatment). */ - getTreatmentWithConfig(featureFlagName: string, attributes?: Attributes): TreatmentWithConfig, + getTreatmentWithConfig(featureFlagName: string, attributes?: Attributes): TreatmentWithConfig; /** * Returns a Treatments value, which is an object map with the treatments for the given features. * @function getTreatments @@ -1284,7 +1277,7 @@ declare namespace SplitIO { * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. * @returns {Treatments} The treatments object map. */ - getTreatments(featureFlagNames: string[], attributes?: Attributes): Treatments, + getTreatments(featureFlagNames: string[], attributes?: Attributes): Treatments; /** * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the given features. * @function getTreatmentsWithConfig @@ -1292,7 +1285,7 @@ declare namespace SplitIO { * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. * @returns {TreatmentsWithConfig} The map with all the TreatmentWithConfig objects */ - getTreatmentsWithConfig(featureFlagNames: string[], attributes?: Attributes): TreatmentsWithConfig, + getTreatmentsWithConfig(featureFlagNames: string[], attributes?: Attributes): TreatmentsWithConfig; /** * Returns a Treatments value, which is an object map with the treatments for the feature flags related to the given flag set. * @function getTreatmentsByFlagSet @@ -1300,7 +1293,7 @@ declare namespace SplitIO { * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. * @returns {Treatments} The map with all the Treatments objects */ - getTreatmentsByFlagSet(flagSet: string, attributes?: Attributes): Treatments, + getTreatmentsByFlagSet(flagSet: string, attributes?: Attributes): Treatments; /** * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the feature flags related to the given flag set. * @function getTreatmentsWithConfigByFlagSet @@ -1308,7 +1301,7 @@ declare namespace SplitIO { * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. * @returns {TreatmentsWithConfig} The map with all the TreatmentWithConfig objects */ - getTreatmentsWithConfigByFlagSet(flagSet: string, attributes?: Attributes): TreatmentsWithConfig, + getTreatmentsWithConfigByFlagSet(flagSet: string, attributes?: Attributes): TreatmentsWithConfig; /** * Returns a Returns a Treatments value, which is an object with both treatment and config string for to the feature flags related to the given flag sets. * @function getTreatmentsByFlagSets @@ -1316,7 +1309,7 @@ declare namespace SplitIO { * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. * @returns {Treatments} The map with all the Treatments objects */ - getTreatmentsByFlagSets(flagSets: string[], attributes?: Attributes): Treatments, + getTreatmentsByFlagSets(flagSets: string[], attributes?: Attributes): Treatments; /** * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the feature flags related to the given flag sets. * @function getTreatmentsWithConfigByFlagSets @@ -1324,7 +1317,7 @@ declare namespace SplitIO { * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. * @returns {TreatmentsWithConfig} The map with all the TreatmentWithConfig objects */ - getTreatmentsWithConfigByFlagSets(flagSets: string[], attributes?: Attributes): TreatmentsWithConfig, + getTreatmentsWithConfigByFlagSets(flagSets: string[], attributes?: Attributes): TreatmentsWithConfig; /** * Tracks an event to be fed to the results product on Split user interface. * @function track @@ -1334,7 +1327,7 @@ declare namespace SplitIO { * @param {Properties=} properties - The properties of this event. Values can be string, number, boolean or null. * @returns {boolean} Whether the event was added to the queue successfully or not. */ - track(trafficType: string, eventType: string, value?: number, properties?: Properties): boolean, + track(trafficType: string, eventType: string, value?: number, properties?: Properties): boolean; /** * Add an attribute to client's in memory attributes storage. * @@ -1342,40 +1335,40 @@ declare namespace SplitIO { * @param {AttributeType} attributeValue Attribute value * @returns {boolean} true if the attribute was stored and false otherwise */ - setAttribute(attributeName: string, attributeValue: AttributeType): boolean, + setAttribute(attributeName: string, attributeValue: AttributeType): boolean; /** * Returns the attribute with the given name. * * @param {string} attributeName Attribute name * @returns {AttributeType} Attribute with the given name */ - getAttribute(attributeName: string): AttributeType, + getAttribute(attributeName: string): AttributeType; /** * Removes from client's in memory attributes storage the attribute with the given name. * * @param {string} attributeName * @returns {boolean} true if attribute was removed and false otherwise */ - removeAttribute(attributeName: string): boolean, + removeAttribute(attributeName: string): boolean; /** * Add to client's in memory attributes storage the attributes in 'attributes'. * * @param {Attributes} attributes Object with attributes to store * @returns true if attributes were stored an false otherwise */ - setAttributes(attributes: Attributes): boolean, + setAttributes(attributes: Attributes): boolean; /** * Return all the attributes stored in client's in memory attributes storage. * * @returns {Attributes} returns all the stored attributes */ - getAttributes(): Attributes, + getAttributes(): Attributes; /** * Remove all the stored attributes in the client's in memory attribute storage. * * @returns {boolean} true if all attribute were removed and false otherwise */ - clearAttributes(): boolean + clearAttributes(): boolean; } /** * This represents the interface for the Client instance with asynchronous storage for client-side SDK, where each client has associated a key. @@ -1390,7 +1383,7 @@ declare namespace SplitIO { * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. * @returns {AsyncTreatment} Treatment promise that resolves to the treatment string. */ - getTreatment(featureFlagName: string, attributes?: Attributes): AsyncTreatment, + getTreatment(featureFlagName: string, attributes?: Attributes): AsyncTreatment; /** * Returns a TreatmentWithConfig value, which will be (or eventually be) an object with both treatment and config string for the given feature. * @function getTreatmentWithConfig @@ -1398,7 +1391,7 @@ declare namespace SplitIO { * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. * @returns {AsyncTreatmentWithConfig} TreatmentWithConfig promise that resolves to the TreatmentWithConfig object. */ - getTreatmentWithConfig(featureFlagName: string, attributes?: Attributes): AsyncTreatmentWithConfig, + getTreatmentWithConfig(featureFlagName: string, attributes?: Attributes): AsyncTreatmentWithConfig; /** * Returns a Treatments value, which will be (or eventually be) an object map with the treatments for the given features. * @function getTreatments @@ -1406,7 +1399,7 @@ declare namespace SplitIO { * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. * @returns {AsyncTreatments} Treatments promise that resolves to the treatments object map. */ - getTreatments(featureFlagNames: string[], attributes?: Attributes): AsyncTreatments, + getTreatments(featureFlagNames: string[], attributes?: Attributes): AsyncTreatments; /** * Returns a TreatmentsWithConfig value, which will be (or eventually be) an object map with the TreatmentWithConfig (an object with both treatment and config string) for the given features. * @function getTreatmentsWithConfig @@ -1414,7 +1407,7 @@ declare namespace SplitIO { * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. * @returns {AsyncTreatmentsWithConfig} TreatmentsWithConfig promise that resolves to the TreatmentsWithConfig object. */ - getTreatmentsWithConfig(featureFlagNames: string[], attributes?: Attributes): AsyncTreatmentsWithConfig, + getTreatmentsWithConfig(featureFlagNames: string[], attributes?: Attributes): AsyncTreatmentsWithConfig; /** * Returns a Treatments value, which is an object map with the treatments for the feature flags related to the given flag set. * @function getTreatmentsByFlagSet @@ -1422,7 +1415,7 @@ declare namespace SplitIO { * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. * @returns {AsyncTreatments} Treatments promise that resolves to the treatments object map. */ - getTreatmentsByFlagSet(flagSet: string, attributes?: Attributes): AsyncTreatments, + getTreatmentsByFlagSet(flagSet: string, attributes?: Attributes): AsyncTreatments; /** * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the feature flags related to the given flag set. * @function getTreatmentsWithConfigByFlagSet @@ -1430,7 +1423,7 @@ declare namespace SplitIO { * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. * @returns {AsyncTreatmentsWithConfig} TreatmentsWithConfig promise that resolves to the TreatmentsWithConfig object. */ - getTreatmentsWithConfigByFlagSet(flagSet: string, attributes?: Attributes): AsyncTreatmentsWithConfig, + getTreatmentsWithConfigByFlagSet(flagSet: string, attributes?: Attributes): AsyncTreatmentsWithConfig; /** * Returns a Returns a Treatments value, which is an object with both treatment and config string for to the feature flags related to the given flag sets. * @function getTreatmentsByFlagSets @@ -1438,7 +1431,7 @@ declare namespace SplitIO { * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. * @returns {AsyncTreatments} Treatments promise that resolves to the treatments object map. */ - getTreatmentsByFlagSets(flagSets: string[], attributes?: Attributes): AsyncTreatments, + getTreatmentsByFlagSets(flagSets: string[], attributes?: Attributes): AsyncTreatments; /** * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the feature flags related to the given flag sets. * @function getTreatmentsWithConfigByFlagSets @@ -1446,7 +1439,7 @@ declare namespace SplitIO { * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. * @returns {AsyncTreatmentsWithConfig} TreatmentsWithConfig promise that resolves to the TreatmentsWithConfig object. */ - getTreatmentsWithConfigByFlagSets(flagSets: string[], attributes?: Attributes): AsyncTreatmentsWithConfig, + getTreatmentsWithConfigByFlagSets(flagSets: string[], attributes?: Attributes): AsyncTreatmentsWithConfig; /** * Tracks an event to be fed to the results product on Split user interface, and returns a promise to signal when the event was successfully queued (or not). * @function track @@ -1456,7 +1449,7 @@ declare namespace SplitIO { * @param {Properties=} properties - The properties of this event. Values can be string, number, boolean or null. * @returns {boolean} A promise that resolves to a boolean indicating if the event was added to the queue successfully or not. */ - track(trafficType: string, eventType: string, value?: number, properties?: Properties): Promise, + track(trafficType: string, eventType: string, value?: number, properties?: Properties): Promise; /** * Add an attribute to client's in memory attributes storage. * @@ -1464,40 +1457,40 @@ declare namespace SplitIO { * @param {AttributeType} attributeValue Attribute value * @returns {boolean} true if the attribute was stored and false otherwise */ - setAttribute(attributeName: string, attributeValue: AttributeType): boolean, + setAttribute(attributeName: string, attributeValue: AttributeType): boolean; /** * Returns the attribute with the given name. * * @param {string} attributeName Attribute name * @returns {AttributeType} Attribute with the given name */ - getAttribute(attributeName: string): AttributeType, + getAttribute(attributeName: string): AttributeType; /** * Removes from client's in memory attributes storage the attribute with the given name. * * @param {string} attributeName * @returns {boolean} true if attribute was removed and false otherwise */ - removeAttribute(attributeName: string): boolean, + removeAttribute(attributeName: string): boolean; /** * Add to client's in memory attributes storage the attributes in 'attributes'. * * @param {Attributes} attributes Object with attributes to store * @returns true if attributes were stored an false otherwise */ - setAttributes(attributes: Attributes): boolean, + setAttributes(attributes: Attributes): boolean; /** * Return all the attributes stored in client's in memory attributes storage. * * @returns {Attributes} returns all the stored attributes */ - getAttributes(): Attributes, + getAttributes(): Attributes; /** * Remove all the stored attributes in the client's in memory attribute storage. * * @returns {boolean} true if all attribute were removed and false otherwise */ - clearAttributes(): boolean + clearAttributes(): boolean; } /** * Representation of a manager instance with synchronous storage of the SDK. @@ -1510,20 +1503,20 @@ declare namespace SplitIO { * @function names * @returns {SplitNames} The list of feature flag names. */ - names(): SplitNames, + names(): SplitNames; /** * Get the array of feature flags data in SplitView format. * @function splits * @returns {SplitViews} The list of SplitIO.SplitView. */ - splits(): SplitViews, + splits(): SplitViews; /** * Get the data of a split in SplitView format. * @function split * @param {string} featureFlagName The name of the feature flag we want to get info of. * @returns {SplitView} The SplitIO.SplitView of the given split. */ - split(featureFlagName: string): SplitView, + split(featureFlagName: string): SplitView; } /** * Representation of a manager instance with asynchronous storage of the SDK. @@ -1536,19 +1529,19 @@ declare namespace SplitIO { * @function names * @returns {SplitNamesAsync} A promise that resolves to the list of feature flag names. */ - names(): SplitNamesAsync, + names(): SplitNamesAsync; /** * Get the array of feature flags data in SplitView format. * @function splits * @returns {SplitViewsAsync} A promise that resolves to the SplitIO.SplitView list. */ - splits(): SplitViewsAsync, + splits(): SplitViewsAsync; /** * Get the data of a split in SplitView format. * @function split * @param {string} featureFlagName The name of the feature flag we want to get info of. * @returns {SplitViewAsync} A promise that resolves to the SplitIO.SplitView value. */ - split(featureFlagName: string): SplitViewAsync, + split(featureFlagName: string): SplitViewAsync; } } From 61eaece14eced31dffce174fe5cfdff85d21bda0 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Fri, 25 Oct 2024 16:20:19 -0300 Subject: [PATCH 124/146] Rename IClientSS to INodeClient and IAsyncClientSS to INodeAsyncClient --- types/splitio.d.ts | 57 ++++++++++++++++++++++++++++------------------ 1 file changed, 35 insertions(+), 22 deletions(-) diff --git a/types/splitio.d.ts b/types/splitio.d.ts index fde0963e..5cf7dffe 100644 --- a/types/splitio.d.ts +++ b/types/splitio.d.ts @@ -1058,13 +1058,16 @@ declare namespace SplitIO { manager(): IAsyncManager; } /** - * This represents the interface for the Client instance with synchronous storage for server-side SDK, where we don't have only one key. - * @interface IClient + * This represents the interface for the Client instance on server-side, where the user key is not bound to the instance and must be provided on each method call. + * This interface is available in NodeJS, or when importing the 'server' sub-package of JS SDK (e.g., `import { SplitFactory } from '@splitsoftware/splitio/server'`). + * + * @interface INodeClient * @extends IBasicClient */ - interface IClientSS extends IBasicClient { + interface INodeClient extends IBasicClient { /** * Returns a Treatment value, which is the treatment string for the given feature. + * * @function getTreatment * @param {string} key - The string key representing the consumer. * @param {string} featureFlagName - The string that represents the feature flag we want to get the treatment. @@ -1074,6 +1077,7 @@ declare namespace SplitIO { getTreatment(key: SplitKey, featureFlagName: string, attributes?: Attributes): Treatment; /** * Returns a TreatmentWithConfig value, which is an object with both treatment and config string for the given feature. + * * @function getTreatmentWithConfig * @param {string} key - The string key representing the consumer. * @param {string} featureFlagName - The string that represents the feature flag we want to get the treatment. @@ -1084,6 +1088,7 @@ declare namespace SplitIO { getTreatmentWithConfig(key: SplitKey, featureFlagName: string, attributes?: Attributes): TreatmentWithConfig; /** * Returns a Treatments value, which is an object map with the treatments for the given features. + * * @function getTreatments * @param {string} key - The string key representing the consumer. * @param {Array} featureFlagNames - An array of the feature flag names we want to get the treatments. @@ -1093,6 +1098,7 @@ declare namespace SplitIO { getTreatments(key: SplitKey, featureFlagNames: string[], attributes?: Attributes): Treatments; /** * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the given features. + * * @function getTreatmentsWithConfig * @param {string} key - The string key representing the consumer. * @param {Array} featureFlagNames - An array of the feature flag names we want to get the treatments. @@ -1102,6 +1108,7 @@ declare namespace SplitIO { getTreatmentsWithConfig(key: SplitKey, featureFlagNames: string[], attributes?: Attributes): TreatmentsWithConfig; /** * Returns a Treatments value, which is an object map with the treatments for the feature flags related to the given flag set. + * * @function getTreatmentsByFlagSet * @param {string} key - The string key representing the consumer. * @param {string} flagSet - The flag set name we want to get the treatments. @@ -1111,6 +1118,7 @@ declare namespace SplitIO { getTreatmentsByFlagSet(key: SplitKey, flagSet: string, attributes?: Attributes): Treatments; /** * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the feature flags related to the given flag set. + * * @function getTreatmentsWithConfigByFlagSet * @param {string} key - The string key representing the consumer. * @param {string} flagSet - The flag set name we want to get the treatments. @@ -1119,7 +1127,8 @@ declare namespace SplitIO { */ getTreatmentsWithConfigByFlagSet(key: SplitKey, flagSet: string, attributes?: Attributes): TreatmentsWithConfig; /** - * Returns a Treatments value, which is an object with both treatment and config string for to the feature flags related to the given flag sets. + * Returns a Returns a Treatments value, which is an object with both treatment and config string for to the feature flags related to the given flag sets. + * * @function getTreatmentsByFlagSets * @param {string} key - The string key representing the consumer. * @param {Array} flagSets - An array of the flag set names we want to get the treatments. @@ -1129,6 +1138,7 @@ declare namespace SplitIO { getTreatmentsByFlagSets(key: SplitKey, flagSets: string[], attributes?: Attributes): Treatments; /** * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the feature flags related to the given flag sets. + * * @function getTreatmentsWithConfigByFlagSets * @param {string} key - The string key representing the consumer. * @param {Array} flagSets - An array of the flag set names we want to get the treatments. @@ -1138,9 +1148,10 @@ declare namespace SplitIO { getTreatmentsWithConfigByFlagSets(key: SplitKey, flagSets: string[], attributes?: Attributes): TreatmentsWithConfig; /** * Tracks an event to be fed to the results product on Split user interface. + * * @function track * @param {SplitKey} key - The key that identifies the entity related to this event. - * @param {string} trafficType - The traffic type of the entity related to this event. + * @param {string} trafficType - The traffic type of the entity related to this event. See {@link https://help.split.io/hc/en-us/articles/360019916311-Traffic-type} * @param {string} eventType - The event type corresponding to this event. * @param {number=} value - The value of this event. * @param {Properties=} properties - The properties of this event. Values can be string, number, boolean or null. @@ -1149,15 +1160,17 @@ declare namespace SplitIO { track(key: SplitIO.SplitKey, trafficType: string, eventType: string, value?: number, properties?: Properties): boolean; } /** - * This represents the interface for the Client instance with asynchronous storage for server-side SDK, where we don't have only one key. - * @interface IAsyncClientSS + * This represents the interface for the Client instance on server-side with asynchronous storage, like REDIS. + * User key is not bound to the instance and must be provided on each method call, which returns a promise. + * This interface is available in NodeJS, or when importing the 'server' sub-package in JS SDK (e.g., `import { SplitFactory } from '@splitsoftware/splitio/server'`). + * + * @interface INodeAsyncClient * @extends IBasicClient */ - interface IAsyncClientSS extends IBasicClient { + interface INodeAsyncClient extends IBasicClient { /** * Returns a Treatment value, which will be (or eventually be) the treatment string for the given feature. - * For usage on NodeJS as we don't have only one key. - * NOTE: Treatment will be a promise only in async storages, like REDIS. + * * @function getTreatment * @param {string} key - The string key representing the consumer. * @param {string} featureFlagName - The string that represents the feature flag we want to get the treatment. @@ -1167,8 +1180,7 @@ declare namespace SplitIO { getTreatment(key: SplitKey, featureFlagName: string, attributes?: Attributes): AsyncTreatment; /** * Returns a TreatmentWithConfig value, which will be (or eventually be) an object with both treatment and config string for the given feature. - * For usage on NodeJS as we don't have only one key. - * NOTE: Treatment will be a promise only in async storages, like REDIS. + * * @function getTreatmentWithConfig * @param {string} key - The string key representing the consumer. * @param {string} featureFlagName - The string that represents the feature flag we want to get the treatment. @@ -1178,7 +1190,7 @@ declare namespace SplitIO { getTreatmentWithConfig(key: SplitKey, featureFlagName: string, attributes?: Attributes): AsyncTreatmentWithConfig; /** * Returns a Treatments value, which will be (or eventually be) an object map with the treatments for the given features. - * For usage on NodeJS as we don't have only one key. + * * @function getTreatments * @param {string} key - The string key representing the consumer. * @param {Array} featureFlagNames - An array of the feature flag names we want to get the treatments. @@ -1188,7 +1200,7 @@ declare namespace SplitIO { getTreatments(key: SplitKey, featureFlagNames: string[], attributes?: Attributes): AsyncTreatments; /** * Returns a TreatmentsWithConfig value, which will be (or eventually be) an object map with the TreatmentWithConfig (an object with both treatment and config string) for the given features. - * For usage on NodeJS as we don't have only one key. + * * @function getTreatmentsWithConfig * @param {string} key - The string key representing the consumer. * @param {Array} featureFlagNames - An array of the feature flag names we want to get the treatments. @@ -1198,7 +1210,7 @@ declare namespace SplitIO { getTreatmentsWithConfig(key: SplitKey, featureFlagNames: string[], attributes?: Attributes): AsyncTreatmentsWithConfig; /** * Returns a Treatments value, which is an object map with the treatments for the feature flags related to the given flag set. - * For usage on NodeJS as we don't have only one key. + * * @function getTreatmentsByFlagSet * @param {string} key - The string key representing the consumer. * @param {string} flagSet - The flag set name we want to get the treatments. @@ -1208,17 +1220,17 @@ declare namespace SplitIO { getTreatmentsByFlagSet(key: SplitKey, flagSet: string, attributes?: Attributes): AsyncTreatments; /** * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the feature flags related to the given flag set. - * For usage on NodeJS as we don't have only one key. + * * @function getTreatmentsWithConfigByFlagSet * @param {string} key - The string key representing the consumer. * @param {string} flagSet - The flag set name we want to get the treatments. * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. * @returns {AsyncTreatmentsWithConfig} TreatmentsWithConfig promise that resolves to the map of TreatmentsWithConfig objects. */ - getTreatmentsWithConfigByFlagSet(key: SplitKey, flagSet: string, attributes?: Attributes): AsyncTreatmentWithConfig; + getTreatmentsWithConfigByFlagSet(key: SplitKey, flagSet: string, attributes?: Attributes): AsyncTreatmentsWithConfig; /** - * Returns a Treatments value, which is an object with both treatment and config string for to the feature flags related to the given flag sets. - * For usage on NodeJS as we don't have only one key. + * Returns a Returns a Treatments value, which is an object with both treatment and config string for to the feature flags related to the given flag sets. + * * @function getTreatmentsByFlagSets * @param {string} key - The string key representing the consumer. * @param {Array} flagSets - An array of the flag set names we want to get the treatments. @@ -1228,19 +1240,20 @@ declare namespace SplitIO { getTreatmentsByFlagSets(key: SplitKey, flagSets: string[], attributes?: Attributes): AsyncTreatments; /** * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the feature flags related to the given flag sets. - * For usage on NodeJS as we don't have only one key. + * * @function getTreatmentsWithConfigByFlagSets * @param {string} key - The string key representing the consumer. * @param {Array} flagSets - An array of the flag set names we want to get the treatments. * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. * @returns {AsyncTreatmentsWithConfig} TreatmentsWithConfig promise that resolves to the map of TreatmentsWithConfig objects. */ - getTreatmentsWithConfigByFlagSets(key: SplitKey, flagSets: string[], attributes?: Attributes): AsyncTreatmentWithConfig; + getTreatmentsWithConfigByFlagSets(key: SplitKey, flagSets: string[], attributes?: Attributes): AsyncTreatmentsWithConfig; /** * Tracks an event to be fed to the results product on Split user interface, and returns a promise to signal when the event was successfully queued (or not). + * * @function track * @param {SplitKey} key - The key that identifies the entity related to this event. - * @param {string} trafficType - The traffic type of the entity related to this event. + * @param {string} trafficType - The traffic type of the entity related to this event. See {@link https://help.split.io/hc/en-us/articles/360019916311-Traffic-type} * @param {string} eventType - The event type corresponding to this event. * @param {number=} value - The value of this event. * @param {Properties=} properties - The properties of this event. Values can be string, number, boolean or null. From 7c3e6fa08ba96328c4138387fbd145f99e5341a7 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Fri, 25 Oct 2024 17:56:47 -0300 Subject: [PATCH 125/146] 3rc step: move SplitIO namespace definition from JS SDK to JS-Commons and merge --- package-lock.json | 1 + package.json | 1 + types/splitio.d.ts | 1119 ++++++++++++++++++++++++++++++++++++-------- 3 files changed, 928 insertions(+), 193 deletions(-) diff --git a/package-lock.json b/package-lock.json index 08f93a03..73ffb070 100644 --- a/package-lock.json +++ b/package-lock.json @@ -9,6 +9,7 @@ "version": "2.0.0-rc.3", "license": "Apache-2.0", "dependencies": { + "@types/ioredis": "^4.28.0", "tslib": "^2.3.1" }, "devDependencies": { diff --git a/package.json b/package.json index 2cf80e9f..9ab651a3 100644 --- a/package.json +++ b/package.json @@ -45,6 +45,7 @@ "bugs": "https://github.com/splitio/javascript-commons/issues", "homepage": "https://github.com/splitio/javascript-commons#readme", "dependencies": { + "@types/ioredis": "^4.28.0", "tslib": "^2.3.1" }, "peerDependencies": { diff --git a/types/splitio.d.ts b/types/splitio.d.ts index 5cf7dffe..f1a4b3fb 100644 --- a/types/splitio.d.ts +++ b/types/splitio.d.ts @@ -1,6 +1,9 @@ // Type definitions for Split Software SDKs // Project: http://www.split.io/ +import { RedisOptions } from 'ioredis'; +import { RequestOptions } from 'http'; + export as namespace SplitIO; export = SplitIO; @@ -16,6 +19,28 @@ interface IEventEmitter { removeAllListeners(event?: string): this; emit(event: string, ...args: any[]): boolean; } +/** + * NodeJS.EventEmitter interface + * @see {@link https://nodejs.org/api/events.html} + */ +interface EventEmitter extends IEventEmitter { + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + removeListener(event: string | symbol, listener: (...args: any[]) => void): this; + off(event: string | symbol, listener: (...args: any[]) => void): this; + removeAllListeners(event?: string | symbol): this; + emit(event: string | symbol, ...args: any[]): boolean; + setMaxListeners(n: number): this; + getMaxListeners(): number; + listeners(event: string | symbol): Function[]; + rawListeners(event: string | symbol): Function[]; + listenerCount(type: string | symbol): number; + // Added in Node 6... + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + eventNames(): Array; +} /** * @typedef {Object} EventConsts * @property {string} SDK_READY The ready event. @@ -38,7 +63,7 @@ type SDKMode = 'standalone' | 'localhost' | 'consumer' | 'consumer_partial'; * Storage types. * @typedef {string} StorageType */ -type StorageType = 'MEMORY' | 'LOCALSTORAGE'; +type StorageType = 'MEMORY' | 'LOCALSTORAGE' | 'REDIS' | 'PLUGGABLE'; /** * Settings interface. This is a representation of the settings the SDK expose, that's why * most of it's props are readonly. Only features should be rewritten when localhost mode is active. @@ -56,6 +81,10 @@ interface ISettings { featuresRefreshRate: number; impressionsRefreshRate: number; impressionsQueueSize: number; + /** + * @deprecated + */ + metricsRefreshRate?: number; telemetryRefreshRate: number; segmentsRefreshRate: number; offlineRefreshRate: number; @@ -69,7 +98,7 @@ interface ISettings { retriesOnFailureBeforeReady: number; eventsFirstPushWindow: number; }; - readonly storage?: SplitIO.StorageSyncFactory | SplitIO.StorageAsyncFactory; + readonly storage: SplitIO.StorageSyncFactory | SplitIO.StorageAsyncFactory | SplitIO.StorageOptions; readonly urls: { events: string; sdk: string; @@ -81,9 +110,9 @@ interface ISettings { readonly debug: boolean | LogLevel | SplitIO.ILogger; readonly version: string; /** - * Mocked features map. + * Mocked features map if using in client-side, or mocked features file path string if using in server-side (NodeJS). */ - features?: SplitIO.MockedFeaturesMap; + features: SplitIO.MockedFeaturesMap | SplitIO.MockedFeaturesFilePath; readonly streamingEnabled: boolean; readonly sync: { splitFilters: SplitIO.SplitFilter[]; @@ -94,7 +123,10 @@ interface ISettings { getHeaderOverrides?: (context: { headers: Record }) => Record; }; }; - readonly userConsent: SplitIO.ConsentStatus; + /** + * User consent status if using in client-side. Undefined if using in server-side (NodeJS). + */ + readonly userConsent?: SplitIO.ConsentStatus } /** * Log levels. @@ -165,9 +197,9 @@ interface IUserConsentAPI { /** * Common API for entities that expose status handlers. * @interface IStatusInterface - * @extends IEventEmitter + * @extends EventEmitter */ -interface IStatusInterface extends IEventEmitter { +interface IStatusInterface extends EventEmitter { /** * Constant object containing the SDK events for you to use. * @property {EventConsts} Event @@ -226,11 +258,6 @@ interface IBasicSDK { * @property Logger */ Logger: ILoggerAPI; - /** - * User consent API. - * @property UserConsent - */ - UserConsent: IUserConsentAPI; /** * Destroys all the clients created by this factory. * @function destroy @@ -265,7 +292,7 @@ declare namespace SplitIO { [featureName: string]: Treatment; }; /** - * Feature flags treatments promise that resolves to the actual SplitIO.Treatments object. + * Feature flag treatments promise that resolves to the actual SplitIO.Treatments object. * @typedef {Promise} AsyncTreatments */ type AsyncTreatments = Promise; @@ -297,7 +324,7 @@ declare namespace SplitIO { [featureName: string]: TreatmentWithConfig; }; /** - * Feature flags treatments promise that resolves to the actual SplitIO.TreatmentsWithConfig object. + * Feature flag treatments promise that resolves to the actual SplitIO.TreatmentsWithConfig object. * @typedef {Promise} AsyncTreatmentsWithConfig */ type AsyncTreatmentsWithConfig = Promise; @@ -307,9 +334,9 @@ declare namespace SplitIO { */ type Event = 'init::timeout' | 'init::ready' | 'init::cache-ready' | 'state::update'; /** - * Attributes should be on object with values of type string or number (dates should be sent as millis since epoch). + * Attributes should be on object with values of type string, boolean, number (dates should be sent as millis since epoch) or array of strings or numbers. * @typedef {Object.} Attributes - * @see {@link https://help.split.io/hc/en-us/articles/360058730852-Browser-SDK#attribute-syntax} + * @see {@link https://help.split.io/hc/en-us/articles/360020448791-JavaScript-SDK#attribute-syntax} */ type Attributes = { [attributeName: string]: AttributeType; @@ -321,17 +348,25 @@ declare namespace SplitIO { type AttributeType = string | number | boolean | Array; /** * Properties should be an object with values of type string, number, boolean or null. Size limit of ~31kb. - * @typedef {Object.} Attributes - * @see {@link https://help.split.io/hc/en-us/articles/360058730852-Browser-SDK#track + * @typedef {Object.} Properties + * @see {@link https://help.split.io/hc/en-us/articles/360020448791-JavaScript-SDK#track */ type Properties = { [propertyName: string]: string | number | boolean | null; }; /** - * The customer identifier represented by a string. - * @typedef {string} SplitKey + * The SplitKey object format. + * @typedef {Object.} SplitKeyObject + */ + type SplitKeyObject = { + matchingKey: string; + bucketingKey: string; + }; + /** + * The customer identifier. Could be a SplitKeyObject or a string. + * @typedef {SplitKeyObject|string} SplitKey */ - type SplitKey = string; + type SplitKey = SplitKeyObject | string; /** * Path to file with mocked features (for node). * @typedef {string} MockedFeaturesFilePath @@ -492,11 +527,34 @@ declare namespace SplitIO { */ wrapper: Object; } + /** + * Synchronous storage valid types for NodeJS. + * @typedef {string} NodeSyncStorage + */ + type NodeSyncStorage = 'MEMORY'; + /** + * Asynchronous storages valid types for NodeJS. + * @typedef {string} NodeAsyncStorage + */ + type NodeAsyncStorage = 'REDIS'; + /** + * Storage valid types for the browser. + * @typedef {string} BrowserStorage + */ + type BrowserStorage = 'MEMORY' | 'LOCALSTORAGE'; + /** + * Storage options for the SDK with no pluggable storage. + */ + type StorageOptions = { + type: NodeSyncStorage | NodeAsyncStorage | BrowserStorage; + prefix?: string; + options?: Object; + } /** * Impression listener interface. This is the interface that needs to be implemented * by the element you provide to the SDK as impression listener. * @interface IImpressionListener - * @see {@link https://help.split.io/hc/en-us/articles/360058730852-Browser-SDK#listener} + * @see {@link https://help.split.io/hc/en-us/articles/360020564931-Node-js-SDK#listener} */ interface IImpressionListener { logImpression(data: SplitIO.ImpressionData): void; @@ -1004,109 +1062,804 @@ declare namespace SplitIO { } } /** - * This represents the interface for the SDK instance with synchronous storage and client-side API, - * i.e., where client instances have a bound user key. - * @interface ISDK - * @extends IBasicSDK + * Common settings between Browser and NodeJS settings interface. + * @interface ISharedSettings */ - interface ISDK extends IBasicSDK { + interface ISharedSettings { /** - * Returns the default client instance of the SDK, associated with the key provided on settings. - * @function client - * @returns {IClient} The client instance. + * Boolean value to indicate whether the logger should be enabled or disabled, or a log level string. + * + * Examples: + * ``` + * config.debug = true + * config.debug = 'WARN' + * ``` + * @property {boolean | LogLevel} debug + * @default false */ - client(): IClient; + debug?: boolean | LogLevel, /** - * Returns a shared client of the SDK, associated with the given key. - * @function client - * @param {SplitKey} key The key for the new client instance. - * @returns {IClient} The client instance. + * The impression listener, which is optional. Whatever you provide here needs to comply with the SplitIO.IImpressionListener interface, + * which will check for the logImpression method. + * @property {IImpressionListener} impressionListener + * @default undefined */ - client(key: SplitKey): IClient; + impressionListener?: SplitIO.IImpressionListener, /** - * Returns a manager instance of the SDK to explore available information. - * @function manager - * @returns {IManager} The manager instance. + * Boolean flag to enable the streaming service as default synchronization mechanism. In the event of any issue with streaming, + * the SDK would fallback to the polling mechanism. If false, the SDK would poll for changes as usual without attempting to use streaming. + * @property {boolean} streamingEnabled + * @default true */ - manager(): IManager; + streamingEnabled?: boolean, + /** + * SDK synchronization settings. + * @property {Object} sync + */ + sync?: { + /** + * List of feature flag filters. These filters are used to fetch a subset of the feature flag definitions in your environment, in order to reduce the delay of the SDK to be ready. + * This configuration is only meaningful when the SDK is working in "standalone" mode. + * + * Example: + * `splitFilter: [ + * { type: 'byName', values: ['my_feature_flag_1', 'my_feature_flag_2'] }, // will fetch feature flags named 'my_feature_flag_1' and 'my_feature_flag_2' + * ]` + * @property {SplitIO.SplitFilter[]} splitFilters + */ + splitFilters?: SplitIO.SplitFilter[] + /** + * Impressions Collection Mode. Option to determine how impressions are going to be sent to Split servers. + * Possible values are 'DEBUG', 'OPTIMIZED', and 'NONE'. + * - DEBUG: will send all the impressions generated (recommended only for debugging purposes). + * - OPTIMIZED: will send unique impressions to Split servers, avoiding a considerable amount of traffic that duplicated impressions could generate. + * - NONE: will send unique keys evaluated per feature to Split servers instead of full blown impressions, avoiding a considerable amount of traffic that impressions could generate. + * + * @property {string} impressionsMode + * @default 'OPTIMIZED' + */ + impressionsMode?: SplitIO.ImpressionsMode, + /** + * Controls the SDK continuous synchronization flags. + * + * When `true` a running SDK will process rollout plan updates performed on the UI (default). + * When false it'll just fetch all data upon init. + * + * @property {boolean} enabled + * @default true + */ + enabled?: boolean + } } /** - * This represents the interface for the SDK instance with asynchronous storage and client-side API, - * i.e., where client instances have a bound user key. - * @interface IAsyncSDK - * @extends IBasicSDK + * Common settings interface for SDK instances on NodeJS. + * @interface INodeBasicSettings + * @extends ISharedSettings */ - interface IAsyncSDK extends IBasicSDK { + interface INodeBasicSettings extends ISharedSettings { /** - * Returns the default client instance of the SDK, associated with the key provided on settings. - * @function client - * @returns {IAsyncClient} The asynchronous client instance. + * SDK Startup settings for NodeJS. + * @property {Object} startup */ - client(): IAsyncClient; + startup?: { + /** + * Maximum amount of time used before notify a timeout. + * @property {number} readyTimeout + * @default 15 + */ + readyTimeout?: number, + /** + * Time to wait for a request before the SDK is ready. If this time expires, JS Sdk will retry 'retriesOnFailureBeforeReady' times before notifying its failure to be 'ready'. + * @property {number} requestTimeoutBeforeReady + * @default 15 + */ + requestTimeoutBeforeReady?: number, + /** + * How many quick retries we will do while starting up the SDK. + * @property {number} retriesOnFailureBeforeReady + * @default 1 + */ + retriesOnFailureBeforeReady?: number, + /** + * For SDK posts the queued events data in bulks with a given rate, but the first push window is defined separately, + * to better control on browsers. This number defines that window before the first events push. + * + * @property {number} eventsFirstPushWindow + * @default 0 + */ + eventsFirstPushWindow?: number, + }, /** - * Returns a shared client of the SDK, associated with the given key. - * @function client - * @param {SplitKey} key The key for the new client instance. - * @returns {IAsyncClient} The asynchronous client instance. + * SDK scheduler settings. + * @property {Object} scheduler */ - client(key: SplitKey): IAsyncClient; + scheduler?: { + /** + * The SDK polls Split servers for changes to feature flag definitions. This parameter controls this polling period in seconds. + * @property {number} featuresRefreshRate + * @default 60 + */ + featuresRefreshRate?: number, + /** + * The SDK sends information on who got what treatment at what time back to Split servers to power analytics. This parameter controls how often this data is sent to Split servers. The parameter should be in seconds. + * @property {number} impressionsRefreshRate + * @default 300 + */ + impressionsRefreshRate?: number, + /** + * The maximum number of impression items we want to queue. If we queue more values, it will trigger a flush and reset the timer. + * If you use a 0 here, the queue will have no maximum size. + * @property {number} impressionsQueueSize + * @default 30000 + */ + impressionsQueueSize?: number, + /** + * The SDK sends diagnostic metrics to Split servers. This parameters controls this metric flush period in seconds. + * @property {number} metricsRefreshRate + * @default 120 + * @deprecated This parameter is ignored now. Use `telemetryRefreshRate` instead. + */ + metricsRefreshRate?: number, + /** + * The SDK sends diagnostic metrics to Split servers. This parameters controls this metric flush period in seconds. + * @property {number} telemetryRefreshRate + * @default 3600 + */ + telemetryRefreshRate?: number, + /** + * The SDK polls Split servers for changes to segment definitions. This parameter controls this polling period in seconds. + * @property {number} segmentsRefreshRate + * @default 60 + */ + segmentsRefreshRate?: number, + /** + * The SDK posts the queued events data in bulks. This parameter controls the posting rate in seconds. + * @property {number} eventsPushRate + * @default 60 + */ + eventsPushRate?: number, + /** + * The maximum number of event items we want to queue. If we queue more values, it will trigger a flush and reset the timer. + * If you use a 0 here, the queue will have no maximum size. + * @property {number} eventsQueueSize + * @default 500 + */ + eventsQueueSize?: number, + /** + * For mocking/testing only. The SDK will refresh the features mocked data when mode is set to "localhost" by defining the key. + * For more information see {@link https://help.split.io/hc/en-us/articles/360020564931-Node-js-SDK#localhost-mode} + * @property {number} offlineRefreshRate + * @default 15 + */ + offlineRefreshRate?: number + /** + * When using streaming mode, seconds to wait before re attempting to connect for push notifications. + * Next attempts follow intervals in power of two: base seconds, base x 2 seconds, base x 4 seconds, ... + * @property {number} pushRetryBackoffBase + * @default 1 + */ + pushRetryBackoffBase?: number, + }, /** - * Returns a manager instance of the SDK to explore available information. - * @function manager - * @returns {IManager} The manager instance. + * SDK Core settings for NodeJS. + * @property {Object} core */ - manager(): IAsyncManager; - } - /** - * This represents the interface for the Client instance on server-side, where the user key is not bound to the instance and must be provided on each method call. - * This interface is available in NodeJS, or when importing the 'server' sub-package of JS SDK (e.g., `import { SplitFactory } from '@splitsoftware/splitio/server'`). - * - * @interface INodeClient - * @extends IBasicClient - */ - interface INodeClient extends IBasicClient { + core: { + /** + * Your SDK key. + * @see {@link https://help.split.io/hc/en-us/articles/360019916211-API-keys} + * @property {string} authorizationKey + */ + authorizationKey: string, + /** + * Disable labels from being sent to Split backend. Labels may contain sensitive information. + * @property {boolean} labelsEnabled + * @default true + */ + labelsEnabled?: boolean + /** + * Disable machine IP and Name from being sent to Split backend. + * @property {boolean} IPAddressesEnabled + * @default true + */ + IPAddressesEnabled?: boolean + }, /** - * Returns a Treatment value, which is the treatment string for the given feature. - * - * @function getTreatment - * @param {string} key - The string key representing the consumer. - * @param {string} featureFlagName - The string that represents the feature flag we want to get the treatment. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {Treatment} The treatment string. + * Defines which kind of storage we should instantiate. + * @property {Object} storage */ - getTreatment(key: SplitKey, featureFlagName: string, attributes?: Attributes): Treatment; + storage?: { + /** + * Storage type to be instantiated by the SDK. + * @property {StorageType} type + * @default 'MEMORY' + */ + type?: StorageType, + /** + * Options to be passed to the selected storage. + * @property {Object} options + */ + options?: Object, + /** + * Optional prefix to prevent any kind of data collision between SDK versions. + * @property {string} prefix + * @default 'SPLITIO' + */ + prefix?: string + }, /** - * Returns a TreatmentWithConfig value, which is an object with both treatment and config string for the given feature. - * - * @function getTreatmentWithConfig - * @param {string} key - The string key representing the consumer. - * @param {string} featureFlagName - The string that represents the feature flag we want to get the treatment. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {TreatmentWithConfig} The TreatmentWithConfig, the object containing the treatment string and the - * configuration stringified JSON (or null if there was no config for that treatment). + * The SDK mode. Possible values are "standalone", which is the default when using a synchronous storage, like 'MEMORY' and 'LOCALSTORAGE', + * and "consumer", which must be set when using an asynchronous storage, like 'REDIS'. For "localhost" mode, use "localhost" as authorizationKey. + * @property {SDKMode} mode + * @default 'standalone' */ - getTreatmentWithConfig(key: SplitKey, featureFlagName: string, attributes?: Attributes): TreatmentWithConfig; + mode?: SDKMode, /** - * Returns a Treatments value, which is an object map with the treatments for the given features. - * - * @function getTreatments - * @param {string} key - The string key representing the consumer. - * @param {Array} featureFlagNames - An array of the feature flag names we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {Treatments} The treatments object map. + * Mocked features file path. For testing purposes only. For using this you should specify "localhost" as authorizationKey on core settings. + * @see {@link https://help.split.io/hc/en-us/articles/360020564931-Node-js-SDK#localhost-mode} + * @property {MockedFeaturesFilePath} features + * @default '$HOME/.split' */ - getTreatments(key: SplitKey, featureFlagNames: string[], attributes?: Attributes): Treatments; + features?: SplitIO.MockedFeaturesFilePath, + } + /** + * Settings interface for SDK instances created on the browser + * @interface IBrowserSettings + * @extends ISharedSettings + * @see {@link https://help.split.io/hc/en-us/articles/360020448791-JavaScript-SDK#configuration} + */ + interface IBrowserSettings extends ISharedSettings { /** - * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the given features. - * - * @function getTreatmentsWithConfig - * @param {string} key - The string key representing the consumer. - * @param {Array} featureFlagNames - An array of the feature flag names we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {TreatmentsWithConfig} The map with all the TreatmentWithConfig objects + * SDK Startup settings for the Browser. + * @property {Object} startup */ - getTreatmentsWithConfig(key: SplitKey, featureFlagNames: string[], attributes?: Attributes): TreatmentsWithConfig; - /** + startup?: { + /** + * Maximum amount of time used before notify a timeout. + * @property {number} readyTimeout + * @default 1.5 + */ + readyTimeout?: number, + /** + * Time to wait for a request before the SDK is ready. If this time expires, JS Sdk will retry 'retriesOnFailureBeforeReady' times before notifying its failure to be 'ready'. + * @property {number} requestTimeoutBeforeReady + * @default 1.5 + */ + requestTimeoutBeforeReady?: number, + /** + * How many quick retries we will do while starting up the SDK. + * @property {number} retriesOnFailureBeforeReady + * @default 1 + */ + retriesOnFailureBeforeReady?: number, + /** + * For SDK posts the queued events data in bulks with a given rate, but the first push window is defined separately, + * to better control on browsers. This number defines that window before the first events push. + * + * @property {number} eventsFirstPushWindow + * @default 10 + */ + eventsFirstPushWindow?: number, + }, + /** + * SDK scheduler settings. + * @property {Object} scheduler + */ + scheduler?: { + /** + * The SDK polls Split servers for changes to feature flag definitions. This parameter controls this polling period in seconds. + * @property {number} featuresRefreshRate + * @default 60 + */ + featuresRefreshRate?: number, + /** + * The SDK sends information on who got what treatment at what time back to Split servers to power analytics. This parameter controls how often this data is sent to Split servers. The parameter should be in seconds. + * @property {number} impressionsRefreshRate + * @default 60 + */ + impressionsRefreshRate?: number, + /** + * The maximum number of impression items we want to queue. If we queue more values, it will trigger a flush and reset the timer. + * If you use a 0 here, the queue will have no maximum size. + * @property {number} impressionsQueueSize + * @default 30000 + */ + impressionsQueueSize?: number, + /** + * The SDK sends diagnostic metrics to Split servers. This parameters controls this metric flush period in seconds. + * @property {number} metricsRefreshRate + * @default 120 + * @deprecated This parameter is ignored now. Use `telemetryRefreshRate` instead. + */ + metricsRefreshRate?: number, + /** + * The SDK sends diagnostic metrics to Split servers. This parameters controls this metric flush period in seconds. + * @property {number} telemetryRefreshRate + * @default 3600 + */ + telemetryRefreshRate?: number, + /** + * The SDK polls Split servers for changes to segment definitions. This parameter controls this polling period in seconds. + * @property {number} segmentsRefreshRate + * @default 60 + */ + segmentsRefreshRate?: number, + /** + * The SDK posts the queued events data in bulks. This parameter controls the posting rate in seconds. + * @property {number} eventsPushRate + * @default 60 + */ + eventsPushRate?: number, + /** + * The maximum number of event items we want to queue. If we queue more values, it will trigger a flush and reset the timer. + * If you use a 0 here, the queue will have no maximum size. + * @property {number} eventsQueueSize + * @default 500 + */ + eventsQueueSize?: number, + /** + * For mocking/testing only. The SDK will refresh the features mocked data when mode is set to "localhost" by defining the key. + * For more information see {@link https://help.split.io/hc/en-us/articles/360020448791-JavaScript-SDK#localhost-mode} + * @property {number} offlineRefreshRate + * @default 15 + */ + offlineRefreshRate?: number, + /** + * When using streaming mode, seconds to wait before re attempting to connect for push notifications. + * Next attempts follow intervals in power of two: base seconds, base x 2 seconds, base x 4 seconds, ... + * @property {number} pushRetryBackoffBase + * @default 1 + */ + pushRetryBackoffBase?: number, + }, + /** + * SDK Core settings for the browser. + * @property {Object} core + */ + core: { + /** + * Your SDK key. + * @see {@link https://help.split.io/hc/en-us/articles/360019916211-API-keys} + * @property {string} authorizationKey + */ + authorizationKey: string, + /** + * Customer identifier. Whatever this means to you. + * @see {@link https://help.split.io/hc/en-us/articles/360019916311-Traffic-type} + * @property {SplitKey} key + */ + key: SplitKey, + /** + * Disable labels from being sent to Split backend. Labels may contain sensitive information. + * @property {boolean} labelsEnabled + * @default true + */ + labelsEnabled?: boolean + }, + /** + * Mocked features map. For testing purposes only. For using this you should specify "localhost" as authorizationKey on core settings. + * @see {@link https://help.split.io/hc/en-us/articles/360020448791-JavaScript-SDK#localhost-mode} + */ + features?: MockedFeaturesMap, + /** + * Defines which kind of storage we can instantiate on the browser. + * Possible storage types are 'MEMORY', which is the default, and 'LOCALSTORAGE'. + * @property {Object} storage + */ + storage?: { + /** + * Storage type to be instantiated by the SDK. + * @property {BrowserStorage} type + * @default 'MEMORY' + */ + type?: BrowserStorage, + /** + * Optional prefix to prevent any kind of data collision between SDK versions. + * @property {string} prefix + * @default 'SPLITIO' + */ + prefix?: string + }, + /** + * List of URLs that the SDK will use as base for it's synchronization functionalities, applicable only when running as standalone. + * Do not change these settings unless you're working an advanced use case, like connecting to the Split proxy. + * @property {Object} urls + */ + urls?: UrlSettings, + /** + * User consent status. Possible values are `'GRANTED'`, which is the default, `'DECLINED'` or `'UNKNOWN'`. + * - `'GRANTED'`: the user grants consent for tracking events and impressions. The SDK sends them to Split cloud. + * - `'DECLINED'`: the user declines consent for tracking events and impressions. The SDK does not send them to Split cloud. + * - `'UNKNOWN'`: the user neither grants nor declines consent for tracking events and impressions. The SDK tracks them in its internal storage, and eventually either sends + * them or not if the consent status is updated to 'GRANTED' or 'DECLINED' respectively. The status can be updated at any time with the `UserConsent.setStatus` factory method. + * + * @typedef {string} userConsent + * @default 'GRANTED' + */ + userConsent?: ConsentStatus, + sync?: ISharedSettings['sync'] & { + /** + * Custom options object for HTTP(S) requests in the Browser. + * If provided, this object is merged with the options object passed by the SDK for EventSource and Fetch calls. + */ + requestOptions?: { + /** + * Custom function called before each request, allowing you to add or update headers in SDK HTTP requests. + * Some headers, such as `SplitSDKVersion`, are required by the SDK and cannot be overridden. + * To pass multiple headers with the same name, combine their values into a single line, separated by commas. Example: `{ 'Authorization': 'value1, value2' }` + * Or provide keys with different case since headers are case-insensitive. Example: `{ 'authorization': 'value1', 'Authorization': 'value2' }` + * + * NOTE: to pass custom headers to the streaming connection in Browser, you should polyfill the `window.EventSource` object with a library that supports headers, + * like https://www.npmjs.com/package/event-source-polyfill, since native EventSource does not support them and will be ignored. + * + * @property getHeaderOverrides + * @default undefined + * + * @param context - The context for the request. + * @param context.headers - The current headers in the request. + * @returns A set of headers to be merged with the current headers. + * + * @example + * const getHeaderOverrides = (context) => { + * return { + * 'Authorization': context.headers['Authorization'] + ', other-value', + * 'custom-header': 'custom-value' + * }; + * }; + */ + getHeaderOverrides?: (context: { headers: Record }) => Record + }, + } + } + /** + * Settings interface for SDK instances created on NodeJS. + * If your storage is asynchronous (Redis for example) use SplitIO.INodeAsyncSettings instead. + * @interface INodeSettings + * @extends INodeBasicSettings + * @see {@link https://help.split.io/hc/en-us/articles/360020564931-Node-js-SDK#configuration} + */ + interface INodeSettings extends INodeBasicSettings { + /** + * List of URLs that the SDK will use as base for it's synchronization functionalities, applicable only when running as standalone. + * Do not change these settings unless you're working an advanced use case, like connecting to the Split proxy. + * @property {Object} urls + */ + urls?: UrlSettings, + /** + * Defines which kind of storage we can instantiate on NodeJS for 'standalone' mode. + * The only possible storage type is 'MEMORY', which is the default. + * @property {Object} storage + */ + storage?: { + /** + * Synchronous storage type to be instantiated by the SDK. + * @property {NodeSyncStorage} type + * @default 'MEMORY' + */ + type?: NodeSyncStorage, + /** + * Optional prefix to prevent any kind of data collision between SDK versions. + * @property {string} prefix + * @default 'SPLITIO' + */ + prefix?: string + }, + /** + * The SDK mode. When using the default 'MEMORY' storage, the only possible value is "standalone", which is the default. + * For "localhost" mode, use "localhost" as authorizationKey. + * + * @property {'standalone'} mode + * @default 'standalone' + */ + mode?: 'standalone' + sync?: INodeBasicSettings['sync'] & { + /** + * Custom options object for HTTP(S) requests in NodeJS. + * If provided, this object is merged with the options object passed by the SDK for EventSource and Node-Fetch calls. + * @see {@link https://www.npmjs.com/package/node-fetch#options} + */ + requestOptions?: { + /** + * Custom function called before each request, allowing you to add or update headers in SDK HTTP requests. + * Some headers, such as `SplitSDKVersion`, are required by the SDK and cannot be overridden. + * To pass multiple headers with the same name, combine their values into a single line, separated by commas. Example: `{ 'Authorization': 'value1, value2' }` + * Or provide keys with different case since headers are case-insensitive. Example: `{ 'authorization': 'value1', 'Authorization': 'value2' }` + * + * @property getHeaderOverrides + * @default undefined + * + * @param context - The context for the request. + * @param context.headers - The current headers in the request. + * @returns A set of headers to be merged with the current headers. + * + * @example + * const getHeaderOverrides = (context) => { + * return { + * 'Authorization': context.headers['Authorization'] + ', other-value', + * 'custom-header': 'custom-value' + * }; + * }; + */ + getHeaderOverrides?: (context: { headers: Record }) => Record + /** + * Custom NodeJS HTTP(S) Agent used by the SDK for HTTP(S) requests. + * + * You can use it, for example, for certificate pinning or setting a network proxy: + * + * ``` + * const { HttpsProxyAgent } = require('https-proxy-agent'); + * + * const proxyAgent = new HttpsProxyAgent(process.env.HTTPS_PROXY || 'http://10.10.1.10:1080'); + * + * const factory = SplitFactory({ + * ... + * sync: { + * requestOptions: { + * agent: proxyAgent + * } + * } + * }) + * ``` + * + * @see {@link https://nodejs.org/api/https.html#class-httpsagent} + * + * @property {http.Agent | https.Agent} agent + * @default undefined + */ + agent?: RequestOptions["agent"] + }, + } + } + /** + * Settings interface with async storage for SDK instances created on NodeJS. + * If your storage is synchronous (by defaut we use memory, which is sync) use SplitIO.INodeSettings instead. + * @interface INodeAsyncSettings + * @extends INodeBasicSettings + * @see {@link https://help.split.io/hc/en-us/articles/360020564931-Node-js-SDK#configuration} + */ + interface INodeAsyncSettings extends INodeBasicSettings { + /** + * Defines which kind of async storage we can instantiate on NodeJS for 'consumer' mode. + * The only possible storage type is 'REDIS'. + * @property {Object} storage + */ + storage: { + /** + * 'REDIS' storage type to be instantiated by the SDK. + * @property {NodeAsyncStorage} type + */ + type: NodeAsyncStorage, + /** + * Options to be passed to the Redis storage. Use it with storage type: 'REDIS'. + * @property {Object} options + */ + options?: { + /** + * Redis URL. If set, `host`, `port`, `db` and `pass` params will be ignored. + * + * Examples: + * ``` + * url: 'localhost' + * url: '127.0.0.1:6379' + * url: 'redis://:authpassword@127.0.0.1:6379/0' + * ``` + * @property {string=} url + */ + url?: string, + /** + * Redis host. + * @property {string=} host + * @default 'localhost' + */ + host?: string, + /** + * Redis port. + * @property {number=} port + * @default 6379 + */ + port?: number, + /** + * Redis database to be used. + * @property {number=} db + * @default 0 + */ + db?: number, + /** + * Redis password. Don't define if no password is used. + * @property {string=} pass + * @default undefined + */ + pass?: string, + /** + * The milliseconds before a timeout occurs during the initial connection to the Redis server. + * @property {number=} connectionTimeout + * @default 10000 + */ + connectionTimeout?: number, + /** + * The milliseconds before Redis commands are timeout by the SDK. + * Method calls that involve Redis commands, like `client.getTreatment` or `client.track` calls, are resolved when the commands success or timeout. + * @property {number=} operationTimeout + * @default 5000 + */ + operationTimeout?: number, + /** + * TLS configuration for Redis connection. + * @see {@link https://www.npmjs.com/package/ioredis#tls-options } + * + * @property {Object=} tls + * @default undefined + */ + tls?: RedisOptions['tls'], + }, + /** + * Optional prefix to prevent any kind of data collision between SDK versions. + * @property {string} prefix + * @default 'SPLITIO' + */ + prefix?: string + }, + /** + * The SDK mode. When using 'REDIS' storage type, the only possible value is "consumer", which is required. + * + * @see {@link https://help.split.io/hc/en-us/articles/360020564931-Node-js-SDK#state-sharing-redis-integration} + * + * @property {'consumer'} mode + */ + mode: 'consumer' + } + /** + * This represents the interface for the SDK instance with synchronous storage and client-side API, + * i.e., where client instances have a bound user key. + * @interface ISDK + * @extends IBasicSDK + */ + interface ISDK extends IBasicSDK { + /** + * Returns the default client instance of the SDK, associated with the key provided on settings. + * @function client + * @returns {IClient} The client instance. + */ + client(): IClient; + /** + * Returns a shared client of the SDK, associated with the given key. + * @function client + * @param {SplitKey} key The key for the new client instance. + * @returns {IClient} The client instance. + */ + client(key: SplitKey): IClient; + /** + * Returns a manager instance of the SDK to explore available information. + * @function manager + * @returns {IManager} The manager instance. + */ + manager(): IManager; + /** + * User consent API. + * @property UserConsent + */ + UserConsent: IUserConsentAPI; + } + /** + * This represents the interface for the SDK instance with asynchronous storage and client-side API, + * i.e., where client instances have a bound user key. + * @interface IAsyncSDK + * @extends IBasicSDK + */ + interface IAsyncSDK extends IBasicSDK { + /** + * Returns the default client instance of the SDK, associated with the key provided on settings. + * @function client + * @returns {IAsyncClient} The asynchronous client instance. + */ + client(): IAsyncClient; + /** + * Returns a shared client of the SDK, associated with the given key. + * @function client + * @param {SplitKey} key The key for the new client instance. + * @returns {IAsyncClient} The asynchronous client instance. + */ + client(key: SplitKey): IAsyncClient; + /** + * Returns a manager instance of the SDK to explore available information. + * @function manager + * @returns {IManager} The manager instance. + */ + manager(): IAsyncManager; + /** + * User consent API. + * @property UserConsent + */ + UserConsent: IUserConsentAPI; + } + /** + * This represents the interface for the SDK instance for server-side with synchronous storage. + * @interface INodeSDK + * @extends IBasicSDK + */ + interface INodeSDK extends IBasicSDK { + /** + * Returns the default client instance of the SDK. + * @function client + * @returns {INodeClient} The client instance. + */ + client(): INodeClient; + /** + * Returns a manager instance of the SDK to explore available information. + * @function manager + * @returns {IManager} The manager instance. + */ + manager(): IManager; + } + /** + * This represents the interface for the SDK instance for server-side with asynchronous storage. + * @interface INodeAsyncSDK + * @extends IBasicSDK + */ + interface INodeAsyncSDK extends IBasicSDK { + /** + * Returns the default client instance of the SDK. + * @function client + * @returns {INodeAsyncClient} The asynchronous client instance. + */ + client(): INodeAsyncClient; + /** + * Returns a manager instance of the SDK to explore available information. + * @function manager + * @returns {IManager} The manager instance. + */ + manager(): IAsyncManager; + } + /** + * This represents the interface for the Client instance on server-side, where the user key is not bound to the instance and must be provided on each method call. + * This interface is available in NodeJS, or when importing the 'server' sub-package of JS SDK (e.g., `import { SplitFactory } from '@splitsoftware/splitio/server'`). + * + * @interface INodeClient + * @extends IBasicClient + */ + interface INodeClient extends IBasicClient { + /** + * Returns a Treatment value, which is the treatment string for the given feature. + * + * @function getTreatment + * @param {string} key - The string key representing the consumer. + * @param {string} featureFlagName - The string that represents the feature flag we want to get the treatment. + * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. + * @returns {Treatment} The treatment string. + */ + getTreatment(key: SplitKey, featureFlagName: string, attributes?: Attributes): Treatment; + /** + * Returns a TreatmentWithConfig value, which is an object with both treatment and config string for the given feature. + * + * @function getTreatmentWithConfig + * @param {string} key - The string key representing the consumer. + * @param {string} featureFlagName - The string that represents the feature flag we want to get the treatment. + * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. + * @returns {TreatmentWithConfig} The TreatmentWithConfig, the object containing the treatment string and the + * configuration stringified JSON (or null if there was no config for that treatment). + */ + getTreatmentWithConfig(key: SplitKey, featureFlagName: string, attributes?: Attributes): TreatmentWithConfig; + /** + * Returns a Treatments value, which is an object map with the treatments for the given features. + * + * @function getTreatments + * @param {string} key - The string key representing the consumer. + * @param {Array} featureFlagNames - An array of the feature flag names we want to get the treatments. + * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. + * @returns {Treatments} The treatments object map. + */ + getTreatments(key: SplitKey, featureFlagNames: string[], attributes?: Attributes): Treatments; + /** + * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the given features. + * + * @function getTreatmentsWithConfig + * @param {string} key - The string key representing the consumer. + * @param {Array} featureFlagNames - An array of the feature flag names we want to get the treatments. + * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. + * @returns {TreatmentsWithConfig} The map with all the TreatmentWithConfig objects + */ + getTreatmentsWithConfig(key: SplitKey, featureFlagNames: string[], attributes?: Attributes): TreatmentsWithConfig; + /** * Returns a Treatments value, which is an object map with the treatments for the feature flags related to the given flag set. * * @function getTreatmentsByFlagSet @@ -1261,14 +2014,59 @@ declare namespace SplitIO { */ track(key: SplitIO.SplitKey, trafficType: string, eventType: string, value?: number, properties?: Properties): Promise; } + interface IClientWithAttributes extends IBasicClient { + /** + * Add an attribute to client's in memory attributes storage. + * + * @param {string} attributeName Attribute name + * @param {AttributeType} attributeValue Attribute value + * @returns {boolean} true if the attribute was stored and false otherwise + */ + setAttribute(attributeName: string, attributeValue: AttributeType): boolean; + /** + * Returns the attribute with the given name. + * + * @param {string} attributeName Attribute name + * @returns {AttributeType} Attribute with the given name + */ + getAttribute(attributeName: string): AttributeType; + /** + * Removes from client's in memory attributes storage the attribute with the given name. + * + * @param {string} attributeName + * @returns {boolean} true if attribute was removed and false otherwise + */ + removeAttribute(attributeName: string): boolean; + /** + * Add to client's in memory attributes storage the attributes in 'attributes'. + * + * @param {Attributes} attributes Object with attributes to store + * @returns true if attributes were stored an false otherwise + */ + setAttributes(attributes: Attributes): boolean; + /** + * Return all the attributes stored in client's in memory attributes storage. + * + * @returns {Attributes} returns all the stored attributes + */ + getAttributes(): Attributes; + /** + * Remove all the stored attributes in the client's in memory attribute storage. + * + * @returns {boolean} true if all attribute were removed and false otherwise + */ + clearAttributes(): boolean; + } /** - * This represents the interface for the Client instance with synchronous storage for client-side SDK, where each client has associated a key. + * This represents the interface for the Client instance on client-side, where the user key is bound to the instance on creation and does not need to be provided on each method call. + * * @interface IClient - * @extends IBasicClient + * @extends IClientWithAttributes */ - interface IClient extends IBasicClient { + interface IClient extends IClientWithAttributes { /** * Returns a Treatment value, which is the treatment string for the given feature. + * * @function getTreatment * @param {string} featureFlagName - The string that represents the feature flag we want to get the treatment. * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. @@ -1277,6 +2075,7 @@ declare namespace SplitIO { getTreatment(featureFlagName: string, attributes?: Attributes): Treatment; /** * Returns a TreatmentWithConfig value, which is an object with both treatment and config string for the given feature. + * * @function getTreatmentWithConfig * @param {string} featureFlagName - The string that represents the feature flag we want to get the treatment. * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. @@ -1285,6 +2084,7 @@ declare namespace SplitIO { getTreatmentWithConfig(featureFlagName: string, attributes?: Attributes): TreatmentWithConfig; /** * Returns a Treatments value, which is an object map with the treatments for the given features. + * * @function getTreatments * @param {Array} featureFlagNames - An array of the feature flag names we want to get the treatments. * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. @@ -1293,6 +2093,7 @@ declare namespace SplitIO { getTreatments(featureFlagNames: string[], attributes?: Attributes): Treatments; /** * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the given features. + * * @function getTreatmentsWithConfig * @param {Array} featureFlagNames - An array of the feature flag names we want to get the treatments. * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. @@ -1301,6 +2102,7 @@ declare namespace SplitIO { getTreatmentsWithConfig(featureFlagNames: string[], attributes?: Attributes): TreatmentsWithConfig; /** * Returns a Treatments value, which is an object map with the treatments for the feature flags related to the given flag set. + * * @function getTreatmentsByFlagSet * @param {string} flagSet - The flag set name we want to get the treatments. * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. @@ -1309,6 +2111,7 @@ declare namespace SplitIO { getTreatmentsByFlagSet(flagSet: string, attributes?: Attributes): Treatments; /** * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the feature flags related to the given flag set. + * * @function getTreatmentsWithConfigByFlagSet * @param {string} flagSet - The flag set name we want to get the treatments. * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. @@ -1317,6 +2120,7 @@ declare namespace SplitIO { getTreatmentsWithConfigByFlagSet(flagSet: string, attributes?: Attributes): TreatmentsWithConfig; /** * Returns a Returns a Treatments value, which is an object with both treatment and config string for to the feature flags related to the given flag sets. + * * @function getTreatmentsByFlagSets * @param {Array} flagSets - An array of the flag set names we want to get the treatments. * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. @@ -1325,6 +2129,7 @@ declare namespace SplitIO { getTreatmentsByFlagSets(flagSets: string[], attributes?: Attributes): Treatments; /** * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the feature flags related to the given flag sets. + * * @function getTreatmentsWithConfigByFlagSets * @param {Array} flagSets - An array of the flag set names we want to get the treatments. * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. @@ -1333,64 +2138,25 @@ declare namespace SplitIO { getTreatmentsWithConfigByFlagSets(flagSets: string[], attributes?: Attributes): TreatmentsWithConfig; /** * Tracks an event to be fed to the results product on Split user interface. + * * @function track - * @param {string} trafficType - The traffic type of the entity related to this event. + * @param {string} trafficType - The traffic type of the entity related to this event. See {@link https://help.split.io/hc/en-us/articles/360019916311-Traffic-type} * @param {string} eventType - The event type corresponding to this event. * @param {number=} value - The value of this event. * @param {Properties=} properties - The properties of this event. Values can be string, number, boolean or null. * @returns {boolean} Whether the event was added to the queue successfully or not. */ track(trafficType: string, eventType: string, value?: number, properties?: Properties): boolean; - /** - * Add an attribute to client's in memory attributes storage. - * - * @param {string} attributeName Attribute name - * @param {AttributeType} attributeValue Attribute value - * @returns {boolean} true if the attribute was stored and false otherwise - */ - setAttribute(attributeName: string, attributeValue: AttributeType): boolean; - /** - * Returns the attribute with the given name. - * - * @param {string} attributeName Attribute name - * @returns {AttributeType} Attribute with the given name - */ - getAttribute(attributeName: string): AttributeType; - /** - * Removes from client's in memory attributes storage the attribute with the given name. - * - * @param {string} attributeName - * @returns {boolean} true if attribute was removed and false otherwise - */ - removeAttribute(attributeName: string): boolean; - /** - * Add to client's in memory attributes storage the attributes in 'attributes'. - * - * @param {Attributes} attributes Object with attributes to store - * @returns true if attributes were stored an false otherwise - */ - setAttributes(attributes: Attributes): boolean; - /** - * Return all the attributes stored in client's in memory attributes storage. - * - * @returns {Attributes} returns all the stored attributes - */ - getAttributes(): Attributes; - /** - * Remove all the stored attributes in the client's in memory attribute storage. - * - * @returns {boolean} true if all attribute were removed and false otherwise - */ - clearAttributes(): boolean; } /** * This represents the interface for the Client instance with asynchronous storage for client-side SDK, where each client has associated a key. * @interface IAsyncClient - * @extends IBasicClient + * @extends IClientWithAttributes */ - interface IAsyncClient extends IBasicClient { + interface IAsyncClient extends IClientWithAttributes { /** * Returns a Treatment value, which will be (or eventually be) the treatment string for the given feature. + * * @function getTreatment * @param {string} featureFlagName - The string that represents the feature flag we want to get the treatment. * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. @@ -1399,6 +2165,7 @@ declare namespace SplitIO { getTreatment(featureFlagName: string, attributes?: Attributes): AsyncTreatment; /** * Returns a TreatmentWithConfig value, which will be (or eventually be) an object with both treatment and config string for the given feature. + * * @function getTreatmentWithConfig * @param {string} featureFlagName - The string that represents the feature flag we want to get the treatment. * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. @@ -1407,6 +2174,7 @@ declare namespace SplitIO { getTreatmentWithConfig(featureFlagName: string, attributes?: Attributes): AsyncTreatmentWithConfig; /** * Returns a Treatments value, which will be (or eventually be) an object map with the treatments for the given features. + * * @function getTreatments * @param {Array} featureFlagNames - An array of the feature flag names we want to get the treatments. * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. @@ -1415,6 +2183,7 @@ declare namespace SplitIO { getTreatments(featureFlagNames: string[], attributes?: Attributes): AsyncTreatments; /** * Returns a TreatmentsWithConfig value, which will be (or eventually be) an object map with the TreatmentWithConfig (an object with both treatment and config string) for the given features. + * * @function getTreatmentsWithConfig * @param {Array} featureFlagNames - An array of the feature flag names we want to get the treatments. * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. @@ -1423,6 +2192,7 @@ declare namespace SplitIO { getTreatmentsWithConfig(featureFlagNames: string[], attributes?: Attributes): AsyncTreatmentsWithConfig; /** * Returns a Treatments value, which is an object map with the treatments for the feature flags related to the given flag set. + * * @function getTreatmentsByFlagSet * @param {string} flagSet - The flag set name we want to get the treatments. * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. @@ -1431,6 +2201,7 @@ declare namespace SplitIO { getTreatmentsByFlagSet(flagSet: string, attributes?: Attributes): AsyncTreatments; /** * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the feature flags related to the given flag set. + * * @function getTreatmentsWithConfigByFlagSet * @param {string} flagSet - The flag set name we want to get the treatments. * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. @@ -1439,6 +2210,7 @@ declare namespace SplitIO { getTreatmentsWithConfigByFlagSet(flagSet: string, attributes?: Attributes): AsyncTreatmentsWithConfig; /** * Returns a Returns a Treatments value, which is an object with both treatment and config string for to the feature flags related to the given flag sets. + * * @function getTreatmentsByFlagSets * @param {Array} flagSets - An array of the flag set names we want to get the treatments. * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. @@ -1447,6 +2219,7 @@ declare namespace SplitIO { getTreatmentsByFlagSets(flagSets: string[], attributes?: Attributes): AsyncTreatments; /** * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the feature flags related to the given flag sets. + * * @function getTreatmentsWithConfigByFlagSets * @param {Array} flagSets - An array of the flag set names we want to get the treatments. * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. @@ -1455,6 +2228,7 @@ declare namespace SplitIO { getTreatmentsWithConfigByFlagSets(flagSets: string[], attributes?: Attributes): AsyncTreatmentsWithConfig; /** * Tracks an event to be fed to the results product on Split user interface, and returns a promise to signal when the event was successfully queued (or not). + * * @function track * @param {string} trafficType - The traffic type of the entity related to this event. * @param {string} eventType - The event type corresponding to this event. @@ -1463,47 +2237,6 @@ declare namespace SplitIO { * @returns {boolean} A promise that resolves to a boolean indicating if the event was added to the queue successfully or not. */ track(trafficType: string, eventType: string, value?: number, properties?: Properties): Promise; - /** - * Add an attribute to client's in memory attributes storage. - * - * @param {string} attributeName Attribute name - * @param {AttributeType} attributeValue Attribute value - * @returns {boolean} true if the attribute was stored and false otherwise - */ - setAttribute(attributeName: string, attributeValue: AttributeType): boolean; - /** - * Returns the attribute with the given name. - * - * @param {string} attributeName Attribute name - * @returns {AttributeType} Attribute with the given name - */ - getAttribute(attributeName: string): AttributeType; - /** - * Removes from client's in memory attributes storage the attribute with the given name. - * - * @param {string} attributeName - * @returns {boolean} true if attribute was removed and false otherwise - */ - removeAttribute(attributeName: string): boolean; - /** - * Add to client's in memory attributes storage the attributes in 'attributes'. - * - * @param {Attributes} attributes Object with attributes to store - * @returns true if attributes were stored an false otherwise - */ - setAttributes(attributes: Attributes): boolean; - /** - * Return all the attributes stored in client's in memory attributes storage. - * - * @returns {Attributes} returns all the stored attributes - */ - getAttributes(): Attributes; - /** - * Remove all the stored attributes in the client's in memory attribute storage. - * - * @returns {boolean} true if all attribute were removed and false otherwise - */ - clearAttributes(): boolean; } /** * Representation of a manager instance with synchronous storage of the SDK. @@ -1524,12 +2257,12 @@ declare namespace SplitIO { */ splits(): SplitViews; /** - * Get the data of a split in SplitView format. + * Get the data of a feature flag in SplitView format. * @function split * @param {string} featureFlagName The name of the feature flag we want to get info of. - * @returns {SplitView} The SplitIO.SplitView of the given split. + * @returns {SplitView | null} The SplitIO.SplitView of the given feature flag name or null if the feature flag is not found. */ - split(featureFlagName: string): SplitView; + split(featureFlagName: string): SplitView | null; } /** * Representation of a manager instance with asynchronous storage of the SDK. @@ -1550,7 +2283,7 @@ declare namespace SplitIO { */ splits(): SplitViewsAsync; /** - * Get the data of a split in SplitView format. + * Get the data of a feature flag in SplitView format. * @function split * @param {string} featureFlagName The name of the feature flag we want to get info of. * @returns {SplitViewAsync} A promise that resolves to the SplitIO.SplitView value. From 901140aaf4e27540b38162990554f771bbd666e6 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Fri, 25 Oct 2024 17:57:18 -0300 Subject: [PATCH 126/146] rc --- package-lock.json | 4 ++-- package.json | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/package-lock.json b/package-lock.json index 73ffb070..da662c9d 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "@splitsoftware/splitio-commons", - "version": "2.0.0-rc.3", + "version": "2.0.0-rc.4", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "@splitsoftware/splitio-commons", - "version": "2.0.0-rc.3", + "version": "2.0.0-rc.4", "license": "Apache-2.0", "dependencies": { "@types/ioredis": "^4.28.0", diff --git a/package.json b/package.json index 9ab651a3..0d4dadd0 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@splitsoftware/splitio-commons", - "version": "2.0.0-rc.3", + "version": "2.0.0-rc.4", "description": "Split JavaScript SDK common components", "main": "cjs/index.js", "module": "esm/index.js", From d383fc589454771179192af1d1d863f46db8d86b Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Fri, 25 Oct 2024 18:04:24 -0300 Subject: [PATCH 127/146] Formatting --- types/splitio.d.ts | 160 ++++++++++++++++++++++----------------------- 1 file changed, 80 insertions(+), 80 deletions(-) diff --git a/types/splitio.d.ts b/types/splitio.d.ts index f1a4b3fb..9a965408 100644 --- a/types/splitio.d.ts +++ b/types/splitio.d.ts @@ -1077,21 +1077,21 @@ declare namespace SplitIO { * @property {boolean | LogLevel} debug * @default false */ - debug?: boolean | LogLevel, + debug?: boolean | LogLevel; /** * The impression listener, which is optional. Whatever you provide here needs to comply with the SplitIO.IImpressionListener interface, * which will check for the logImpression method. * @property {IImpressionListener} impressionListener * @default undefined */ - impressionListener?: SplitIO.IImpressionListener, + impressionListener?: SplitIO.IImpressionListener; /** * Boolean flag to enable the streaming service as default synchronization mechanism. In the event of any issue with streaming, * the SDK would fallback to the polling mechanism. If false, the SDK would poll for changes as usual without attempting to use streaming. * @property {boolean} streamingEnabled * @default true */ - streamingEnabled?: boolean, + streamingEnabled?: boolean; /** * SDK synchronization settings. * @property {Object} sync @@ -1107,7 +1107,7 @@ declare namespace SplitIO { * ]` * @property {SplitIO.SplitFilter[]} splitFilters */ - splitFilters?: SplitIO.SplitFilter[] + splitFilters?: SplitIO.SplitFilter[]; /** * Impressions Collection Mode. Option to determine how impressions are going to be sent to Split servers. * Possible values are 'DEBUG', 'OPTIMIZED', and 'NONE'. @@ -1118,7 +1118,7 @@ declare namespace SplitIO { * @property {string} impressionsMode * @default 'OPTIMIZED' */ - impressionsMode?: SplitIO.ImpressionsMode, + impressionsMode?: SplitIO.ImpressionsMode; /** * Controls the SDK continuous synchronization flags. * @@ -1128,8 +1128,8 @@ declare namespace SplitIO { * @property {boolean} enabled * @default true */ - enabled?: boolean - } + enabled?: boolean; + }; } /** * Common settings interface for SDK instances on NodeJS. @@ -1147,19 +1147,19 @@ declare namespace SplitIO { * @property {number} readyTimeout * @default 15 */ - readyTimeout?: number, + readyTimeout?: number; /** * Time to wait for a request before the SDK is ready. If this time expires, JS Sdk will retry 'retriesOnFailureBeforeReady' times before notifying its failure to be 'ready'. * @property {number} requestTimeoutBeforeReady * @default 15 */ - requestTimeoutBeforeReady?: number, + requestTimeoutBeforeReady?: number; /** * How many quick retries we will do while starting up the SDK. * @property {number} retriesOnFailureBeforeReady * @default 1 */ - retriesOnFailureBeforeReady?: number, + retriesOnFailureBeforeReady?: number; /** * For SDK posts the queued events data in bulks with a given rate, but the first push window is defined separately, * to better control on browsers. This number defines that window before the first events push. @@ -1167,8 +1167,8 @@ declare namespace SplitIO { * @property {number} eventsFirstPushWindow * @default 0 */ - eventsFirstPushWindow?: number, - }, + eventsFirstPushWindow?: number; + }; /** * SDK scheduler settings. * @property {Object} scheduler @@ -1179,67 +1179,67 @@ declare namespace SplitIO { * @property {number} featuresRefreshRate * @default 60 */ - featuresRefreshRate?: number, + featuresRefreshRate?: number; /** * The SDK sends information on who got what treatment at what time back to Split servers to power analytics. This parameter controls how often this data is sent to Split servers. The parameter should be in seconds. * @property {number} impressionsRefreshRate * @default 300 */ - impressionsRefreshRate?: number, + impressionsRefreshRate?: number; /** * The maximum number of impression items we want to queue. If we queue more values, it will trigger a flush and reset the timer. * If you use a 0 here, the queue will have no maximum size. * @property {number} impressionsQueueSize * @default 30000 */ - impressionsQueueSize?: number, + impressionsQueueSize?: number; /** * The SDK sends diagnostic metrics to Split servers. This parameters controls this metric flush period in seconds. * @property {number} metricsRefreshRate * @default 120 * @deprecated This parameter is ignored now. Use `telemetryRefreshRate` instead. */ - metricsRefreshRate?: number, + metricsRefreshRate?: number; /** * The SDK sends diagnostic metrics to Split servers. This parameters controls this metric flush period in seconds. * @property {number} telemetryRefreshRate * @default 3600 */ - telemetryRefreshRate?: number, + telemetryRefreshRate?: number; /** * The SDK polls Split servers for changes to segment definitions. This parameter controls this polling period in seconds. * @property {number} segmentsRefreshRate * @default 60 */ - segmentsRefreshRate?: number, + segmentsRefreshRate?: number; /** * The SDK posts the queued events data in bulks. This parameter controls the posting rate in seconds. * @property {number} eventsPushRate * @default 60 */ - eventsPushRate?: number, + eventsPushRate?: number; /** * The maximum number of event items we want to queue. If we queue more values, it will trigger a flush and reset the timer. * If you use a 0 here, the queue will have no maximum size. * @property {number} eventsQueueSize * @default 500 */ - eventsQueueSize?: number, + eventsQueueSize?: number; /** * For mocking/testing only. The SDK will refresh the features mocked data when mode is set to "localhost" by defining the key. * For more information see {@link https://help.split.io/hc/en-us/articles/360020564931-Node-js-SDK#localhost-mode} * @property {number} offlineRefreshRate * @default 15 */ - offlineRefreshRate?: number + offlineRefreshRate?: number; /** * When using streaming mode, seconds to wait before re attempting to connect for push notifications. * Next attempts follow intervals in power of two: base seconds, base x 2 seconds, base x 4 seconds, ... * @property {number} pushRetryBackoffBase * @default 1 */ - pushRetryBackoffBase?: number, - }, + pushRetryBackoffBase?: number; + }; /** * SDK Core settings for NodeJS. * @property {Object} core @@ -1250,20 +1250,20 @@ declare namespace SplitIO { * @see {@link https://help.split.io/hc/en-us/articles/360019916211-API-keys} * @property {string} authorizationKey */ - authorizationKey: string, + authorizationKey: string; /** * Disable labels from being sent to Split backend. Labels may contain sensitive information. * @property {boolean} labelsEnabled * @default true */ - labelsEnabled?: boolean + labelsEnabled?: boolean; /** * Disable machine IP and Name from being sent to Split backend. * @property {boolean} IPAddressesEnabled * @default true */ - IPAddressesEnabled?: boolean - }, + IPAddressesEnabled?: boolean; + }; /** * Defines which kind of storage we should instantiate. * @property {Object} storage @@ -1274,33 +1274,33 @@ declare namespace SplitIO { * @property {StorageType} type * @default 'MEMORY' */ - type?: StorageType, + type?: StorageType; /** * Options to be passed to the selected storage. * @property {Object} options */ - options?: Object, + options?: Object; /** * Optional prefix to prevent any kind of data collision between SDK versions. * @property {string} prefix * @default 'SPLITIO' */ - prefix?: string - }, + prefix?: string; + }; /** * The SDK mode. Possible values are "standalone", which is the default when using a synchronous storage, like 'MEMORY' and 'LOCALSTORAGE', * and "consumer", which must be set when using an asynchronous storage, like 'REDIS'. For "localhost" mode, use "localhost" as authorizationKey. * @property {SDKMode} mode * @default 'standalone' */ - mode?: SDKMode, + mode?: SDKMode; /** * Mocked features file path. For testing purposes only. For using this you should specify "localhost" as authorizationKey on core settings. * @see {@link https://help.split.io/hc/en-us/articles/360020564931-Node-js-SDK#localhost-mode} * @property {MockedFeaturesFilePath} features * @default '$HOME/.split' */ - features?: SplitIO.MockedFeaturesFilePath, + features?: SplitIO.MockedFeaturesFilePath; } /** * Settings interface for SDK instances created on the browser @@ -1319,19 +1319,19 @@ declare namespace SplitIO { * @property {number} readyTimeout * @default 1.5 */ - readyTimeout?: number, + readyTimeout?: number; /** * Time to wait for a request before the SDK is ready. If this time expires, JS Sdk will retry 'retriesOnFailureBeforeReady' times before notifying its failure to be 'ready'. * @property {number} requestTimeoutBeforeReady * @default 1.5 */ - requestTimeoutBeforeReady?: number, + requestTimeoutBeforeReady?: number; /** * How many quick retries we will do while starting up the SDK. * @property {number} retriesOnFailureBeforeReady * @default 1 */ - retriesOnFailureBeforeReady?: number, + retriesOnFailureBeforeReady?: number; /** * For SDK posts the queued events data in bulks with a given rate, but the first push window is defined separately, * to better control on browsers. This number defines that window before the first events push. @@ -1339,8 +1339,8 @@ declare namespace SplitIO { * @property {number} eventsFirstPushWindow * @default 10 */ - eventsFirstPushWindow?: number, - }, + eventsFirstPushWindow?: number; + }; /** * SDK scheduler settings. * @property {Object} scheduler @@ -1351,67 +1351,67 @@ declare namespace SplitIO { * @property {number} featuresRefreshRate * @default 60 */ - featuresRefreshRate?: number, + featuresRefreshRate?: number; /** * The SDK sends information on who got what treatment at what time back to Split servers to power analytics. This parameter controls how often this data is sent to Split servers. The parameter should be in seconds. * @property {number} impressionsRefreshRate * @default 60 */ - impressionsRefreshRate?: number, + impressionsRefreshRate?: number; /** * The maximum number of impression items we want to queue. If we queue more values, it will trigger a flush and reset the timer. * If you use a 0 here, the queue will have no maximum size. * @property {number} impressionsQueueSize * @default 30000 */ - impressionsQueueSize?: number, + impressionsQueueSize?: number; /** * The SDK sends diagnostic metrics to Split servers. This parameters controls this metric flush period in seconds. * @property {number} metricsRefreshRate * @default 120 * @deprecated This parameter is ignored now. Use `telemetryRefreshRate` instead. */ - metricsRefreshRate?: number, + metricsRefreshRate?: number; /** * The SDK sends diagnostic metrics to Split servers. This parameters controls this metric flush period in seconds. * @property {number} telemetryRefreshRate * @default 3600 */ - telemetryRefreshRate?: number, + telemetryRefreshRate?: number; /** * The SDK polls Split servers for changes to segment definitions. This parameter controls this polling period in seconds. * @property {number} segmentsRefreshRate * @default 60 */ - segmentsRefreshRate?: number, + segmentsRefreshRate?: number; /** * The SDK posts the queued events data in bulks. This parameter controls the posting rate in seconds. * @property {number} eventsPushRate * @default 60 */ - eventsPushRate?: number, + eventsPushRate?: number; /** * The maximum number of event items we want to queue. If we queue more values, it will trigger a flush and reset the timer. * If you use a 0 here, the queue will have no maximum size. * @property {number} eventsQueueSize * @default 500 */ - eventsQueueSize?: number, + eventsQueueSize?: number; /** * For mocking/testing only. The SDK will refresh the features mocked data when mode is set to "localhost" by defining the key. * For more information see {@link https://help.split.io/hc/en-us/articles/360020448791-JavaScript-SDK#localhost-mode} * @property {number} offlineRefreshRate * @default 15 */ - offlineRefreshRate?: number, + offlineRefreshRate?: number; /** * When using streaming mode, seconds to wait before re attempting to connect for push notifications. * Next attempts follow intervals in power of two: base seconds, base x 2 seconds, base x 4 seconds, ... * @property {number} pushRetryBackoffBase * @default 1 */ - pushRetryBackoffBase?: number, - }, + pushRetryBackoffBase?: number; + }; /** * SDK Core settings for the browser. * @property {Object} core @@ -1422,25 +1422,25 @@ declare namespace SplitIO { * @see {@link https://help.split.io/hc/en-us/articles/360019916211-API-keys} * @property {string} authorizationKey */ - authorizationKey: string, + authorizationKey: string; /** * Customer identifier. Whatever this means to you. * @see {@link https://help.split.io/hc/en-us/articles/360019916311-Traffic-type} * @property {SplitKey} key */ - key: SplitKey, + key: SplitKey; /** * Disable labels from being sent to Split backend. Labels may contain sensitive information. * @property {boolean} labelsEnabled * @default true */ labelsEnabled?: boolean - }, + }; /** * Mocked features map. For testing purposes only. For using this you should specify "localhost" as authorizationKey on core settings. * @see {@link https://help.split.io/hc/en-us/articles/360020448791-JavaScript-SDK#localhost-mode} */ - features?: MockedFeaturesMap, + features?: MockedFeaturesMap; /** * Defines which kind of storage we can instantiate on the browser. * Possible storage types are 'MEMORY', which is the default, and 'LOCALSTORAGE'. @@ -1452,20 +1452,20 @@ declare namespace SplitIO { * @property {BrowserStorage} type * @default 'MEMORY' */ - type?: BrowserStorage, + type?: BrowserStorage; /** * Optional prefix to prevent any kind of data collision between SDK versions. * @property {string} prefix * @default 'SPLITIO' */ - prefix?: string - }, + prefix?: string; + }; /** * List of URLs that the SDK will use as base for it's synchronization functionalities, applicable only when running as standalone. * Do not change these settings unless you're working an advanced use case, like connecting to the Split proxy. * @property {Object} urls */ - urls?: UrlSettings, + urls?: UrlSettings; /** * User consent status. Possible values are `'GRANTED'`, which is the default, `'DECLINED'` or `'UNKNOWN'`. * - `'GRANTED'`: the user grants consent for tracking events and impressions. The SDK sends them to Split cloud. @@ -1476,7 +1476,7 @@ declare namespace SplitIO { * @typedef {string} userConsent * @default 'GRANTED' */ - userConsent?: ConsentStatus, + userConsent?: ConsentStatus; sync?: ISharedSettings['sync'] & { /** * Custom options object for HTTP(S) requests in the Browser. @@ -1507,9 +1507,9 @@ declare namespace SplitIO { * }; * }; */ - getHeaderOverrides?: (context: { headers: Record }) => Record - }, - } + getHeaderOverrides?: (context: { headers: Record }) => Record; + }; + }; } /** * Settings interface for SDK instances created on NodeJS. @@ -1524,7 +1524,7 @@ declare namespace SplitIO { * Do not change these settings unless you're working an advanced use case, like connecting to the Split proxy. * @property {Object} urls */ - urls?: UrlSettings, + urls?: UrlSettings; /** * Defines which kind of storage we can instantiate on NodeJS for 'standalone' mode. * The only possible storage type is 'MEMORY', which is the default. @@ -1536,14 +1536,14 @@ declare namespace SplitIO { * @property {NodeSyncStorage} type * @default 'MEMORY' */ - type?: NodeSyncStorage, + type?: NodeSyncStorage; /** * Optional prefix to prevent any kind of data collision between SDK versions. * @property {string} prefix * @default 'SPLITIO' */ prefix?: string - }, + }; /** * The SDK mode. When using the default 'MEMORY' storage, the only possible value is "standalone", which is the default. * For "localhost" mode, use "localhost" as authorizationKey. @@ -1606,9 +1606,9 @@ declare namespace SplitIO { * @property {http.Agent | https.Agent} agent * @default undefined */ - agent?: RequestOptions["agent"] - }, - } + agent?: RequestOptions['agent']; + }; + }; } /** * Settings interface with async storage for SDK instances created on NodeJS. @@ -1628,7 +1628,7 @@ declare namespace SplitIO { * 'REDIS' storage type to be instantiated by the SDK. * @property {NodeAsyncStorage} type */ - type: NodeAsyncStorage, + type: NodeAsyncStorage; /** * Options to be passed to the Redis storage. Use it with storage type: 'REDIS'. * @property {Object} options @@ -1645,44 +1645,44 @@ declare namespace SplitIO { * ``` * @property {string=} url */ - url?: string, + url?: string; /** * Redis host. * @property {string=} host * @default 'localhost' */ - host?: string, + host?: string; /** * Redis port. * @property {number=} port * @default 6379 */ - port?: number, + port?: number; /** * Redis database to be used. * @property {number=} db * @default 0 */ - db?: number, + db?: number; /** * Redis password. Don't define if no password is used. * @property {string=} pass * @default undefined */ - pass?: string, + pass?: string; /** * The milliseconds before a timeout occurs during the initial connection to the Redis server. * @property {number=} connectionTimeout * @default 10000 */ - connectionTimeout?: number, + connectionTimeout?: number; /** * The milliseconds before Redis commands are timeout by the SDK. * Method calls that involve Redis commands, like `client.getTreatment` or `client.track` calls, are resolved when the commands success or timeout. * @property {number=} operationTimeout * @default 5000 */ - operationTimeout?: number, + operationTimeout?: number; /** * TLS configuration for Redis connection. * @see {@link https://www.npmjs.com/package/ioredis#tls-options } @@ -1690,15 +1690,15 @@ declare namespace SplitIO { * @property {Object=} tls * @default undefined */ - tls?: RedisOptions['tls'], - }, + tls?: RedisOptions['tls']; + }; /** * Optional prefix to prevent any kind of data collision between SDK versions. * @property {string} prefix * @default 'SPLITIO' */ - prefix?: string - }, + prefix?: string; + }; /** * The SDK mode. When using 'REDIS' storage type, the only possible value is "consumer", which is required. * @@ -1706,7 +1706,7 @@ declare namespace SplitIO { * * @property {'consumer'} mode */ - mode: 'consumer' + mode: 'consumer'; } /** * This represents the interface for the SDK instance with synchronous storage and client-side API, From 61580e51323de4d54de685cc127206a1c002ee61 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Sat, 26 Oct 2024 13:01:20 -0300 Subject: [PATCH 128/146] Add IReactNativeSettings to specialize the type. Fixed some defaults --- CHANGES.txt | 1 + types/splitio.d.ts | 43 ++++++++++++++++++++++++++++--------------- 2 files changed, 29 insertions(+), 15 deletions(-) diff --git a/CHANGES.txt b/CHANGES.txt index c1852500..63b7cbfc 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -1,6 +1,7 @@ 2.0.0 (October XX, 2024) - Added support for targeting rules based on large segments. - Added `factory.destroy()` method, which invokes the `destroy` method on all SDK clients created by the factory. + - Added `SplitIO` namespace with the public TypeScript definitions to be reused by the SDKs, rather than having each SDK define its own types. - Updated the handling of timers and async operations inside an `init` factory method to enable lazy initialization of the SDK in standalone mode. This update is intended for the React SDK. - Bugfixing - Fixed an issue with the server-side polling manager that caused dangling timers when the SDK was destroyed before it was ready. - BREAKING CHANGES: diff --git a/types/splitio.d.ts b/types/splitio.d.ts index 9a965408..d8ebe166 100644 --- a/types/splitio.d.ts +++ b/types/splitio.d.ts @@ -835,7 +835,7 @@ declare namespace SplitIO { userConsent?: ConsentStatus; } /** - * Settings interface for SDK instances created for client-side with synchronous storage (e.g., Browser or React Native). + * Settings interface for SDK instances created with client-side API and synchronous storage (e.g., Browser or React Native). * * @interface IClientSideSettings * @extends IClientSideBasicSettings @@ -878,13 +878,13 @@ declare namespace SplitIO { /** * Maximum amount of time used before notify a timeout. * @property {number} readyTimeout - * @default 1.5 + * @default 10 */ readyTimeout?: number; /** * Time to wait for a request before the SDK is ready. If this time expires, JS Sdk will retry 'retriesOnFailureBeforeReady' times before notifying its failure to be 'ready'. * @property {number} requestTimeoutBeforeReady - * @default 1.5 + * @default 5 */ requestTimeoutBeforeReady?: number; /** @@ -968,10 +968,20 @@ declare namespace SplitIO { } } /** - * Settings interface with async storage for SDK instances created for client-side (e.g., Serverless environments). - * If your storage is synchronous (by defaut we use memory, which is sync) use SplitIO.IClientSideSettings instead. + * Settings interface for SDK instances created in React Native, with client-side API and synchronous storage. + * + * @interface IReactNativeSettings + * @extends IClientSideSettings + * @see {@link https://help.split.io/hc/en-us/articles/4406066357901-React-Native-SDK#configuration} + */ + interface IReactNativeSettings extends IClientSideSettings { } + /** + * Settings interface for SDK instances created with client-side API and asynchronous storage (e.g., serverless environments with a persistent storage). + * If your storage is synchronous (by default we use memory, which is sync) use SplitIO.IClientSideSettings instead. + * * @interface IClientSideAsyncSettings * @extends IClientSideBasicSettings + * @see {@link https://help.split.io/hc/en-us/articles/360058730852-Browser-SDK#sharing-state-with-a-pluggable-storage} */ interface IClientSideAsyncSettings extends IClientSideBasicSettings { /** @@ -1003,7 +1013,7 @@ declare namespace SplitIO { /** * Maximum amount of time used before notify a timeout. * @property {number} readyTimeout - * @default 1.5 + * @default 5 */ readyTimeout?: number; /** @@ -1303,7 +1313,8 @@ declare namespace SplitIO { features?: SplitIO.MockedFeaturesFilePath; } /** - * Settings interface for SDK instances created on the browser + * Settings interface for JavaScript SDK instances created on the browser, with client-side API and synchronous storage. + * * @interface IBrowserSettings * @extends ISharedSettings * @see {@link https://help.split.io/hc/en-us/articles/360020448791-JavaScript-SDK#configuration} @@ -1317,13 +1328,13 @@ declare namespace SplitIO { /** * Maximum amount of time used before notify a timeout. * @property {number} readyTimeout - * @default 1.5 + * @default 10 */ readyTimeout?: number; /** * Time to wait for a request before the SDK is ready. If this time expires, JS Sdk will retry 'retriesOnFailureBeforeReady' times before notifying its failure to be 'ready'. * @property {number} requestTimeoutBeforeReady - * @default 1.5 + * @default 5 */ requestTimeoutBeforeReady?: number; /** @@ -1512,8 +1523,9 @@ declare namespace SplitIO { }; } /** - * Settings interface for SDK instances created on NodeJS. + * Settings interface for JavaScript SDK instances created on NodeJS, with server-side API and synchronous storage. * If your storage is asynchronous (Redis for example) use SplitIO.INodeAsyncSettings instead. + * * @interface INodeSettings * @extends INodeBasicSettings * @see {@link https://help.split.io/hc/en-us/articles/360020564931-Node-js-SDK#configuration} @@ -1542,7 +1554,7 @@ declare namespace SplitIO { * @property {string} prefix * @default 'SPLITIO' */ - prefix?: string + prefix?: string; }; /** * The SDK mode. When using the default 'MEMORY' storage, the only possible value is "standalone", which is the default. @@ -1551,7 +1563,7 @@ declare namespace SplitIO { * @property {'standalone'} mode * @default 'standalone' */ - mode?: 'standalone' + mode?: 'standalone'; sync?: INodeBasicSettings['sync'] & { /** * Custom options object for HTTP(S) requests in NodeJS. @@ -1580,7 +1592,7 @@ declare namespace SplitIO { * }; * }; */ - getHeaderOverrides?: (context: { headers: Record }) => Record + getHeaderOverrides?: (context: { headers: Record }) => Record; /** * Custom NodeJS HTTP(S) Agent used by the SDK for HTTP(S) requests. * @@ -1611,8 +1623,9 @@ declare namespace SplitIO { }; } /** - * Settings interface with async storage for SDK instances created on NodeJS. - * If your storage is synchronous (by defaut we use memory, which is sync) use SplitIO.INodeSettings instead. + * Settings interface for JavaScript SDK instances created on NodeJS, with asynchronous storage like Redis. + * If your storage is synchronous (by default we use memory, which is sync) use SplitIO.INodeSettings instead. + * * @interface INodeAsyncSettings * @extends INodeBasicSettings * @see {@link https://help.split.io/hc/en-us/articles/360020564931-Node-js-SDK#configuration} From 49d76b2a522185663cec4419e68f84ca080ece27 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Mon, 28 Oct 2024 16:30:31 -0300 Subject: [PATCH 129/146] Polishing --- types/splitio.d.ts | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/types/splitio.d.ts b/types/splitio.d.ts index d8ebe166..de3db1ee 100644 --- a/types/splitio.d.ts +++ b/types/splitio.d.ts @@ -123,10 +123,11 @@ interface ISettings { getHeaderOverrides?: (context: { headers: Record }) => Record; }; }; + readonly impressionListener?: SplitIO.IImpressionListener; /** * User consent status if using in client-side. Undefined if using in server-side (NodeJS). */ - readonly userConsent?: SplitIO.ConsentStatus + readonly userConsent?: SplitIO.ConsentStatus; } /** * Log levels. From b09832f1f099e345c5ed772791671d49961209ba Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Mon, 28 Oct 2024 21:37:24 -0300 Subject: [PATCH 130/146] Remove SplitIO namespace from 'src/types.ts' and reuse it from 'types/splitio.d.ts' --- src/__tests__/mocks/fetchSpecificSplits.ts | 2 +- src/__tests__/testUtils/eventSourceMock.ts | 4 +- src/dtos/types.ts | 2 +- src/evaluator/Engine.ts | 2 +- src/evaluator/combiners/ifelseif.ts | 2 +- src/evaluator/condition/index.ts | 2 +- src/evaluator/index.ts | 2 +- src/evaluator/parser/index.ts | 2 +- src/evaluator/types.ts | 2 +- src/evaluator/value/index.ts | 2 +- src/evaluator/value/sanitize.ts | 2 +- src/integrations/pluggable.ts | 2 +- src/integrations/types.ts | 5 +- src/listeners/browser.ts | 5 +- src/logger/__tests__/index.spec.ts | 6 +- src/logger/__tests__/sdkLogger.mock.ts | 4 +- src/logger/index.ts | 12 +- src/logger/sdkLogger.ts | 4 +- src/logger/types.ts | 24 +- .../__tests__/sdkReadinessManager.spec.ts | 4 +- src/readiness/readinessManager.ts | 9 +- src/readiness/sdkReadinessManager.ts | 5 +- src/readiness/types.ts | 9 +- .../__tests__/sdkClientMethod.spec.ts | 3 +- src/sdkClient/client.ts | 15 +- src/sdkClient/clientAttributesDecoration.ts | 4 +- src/sdkClient/clientCS.ts | 6 +- src/sdkClient/clientInputValidation.ts | 5 +- src/sdkClient/identity.ts | 2 +- src/sdkClient/sdkClient.ts | 6 +- src/sdkClient/sdkClientMethod.ts | 4 +- src/sdkClient/sdkClientMethodCS.ts | 10 +- src/sdkFactory/__tests__/index.spec.ts | 4 +- src/sdkFactory/index.ts | 6 +- src/sdkFactory/types.ts | 13 +- .../__tests__/index.asyncCache.spec.ts | 2 +- src/sdkManager/index.ts | 3 +- src/storages/dataLoader.ts | 4 +- .../inMemory/AttributesCacheInMemory.ts | 2 +- src/storages/inMemory/EventsCacheInMemory.ts | 2 +- .../inMemory/ImpressionsCacheInMemory.ts | 8 +- src/storages/inRedis/EventsCacheInRedis.ts | 2 +- .../inRedis/ImpressionsCacheInRedis.ts | 4 +- .../pluggable/EventsCachePluggable.ts | 2 +- .../pluggable/ImpressionsCachePluggable.ts | 4 +- src/storages/types.ts | 20 +- src/storages/utils.ts | 5 +- .../splitsParserFromSettings.spec.ts | 2 +- .../splitsParser/splitsParserFromSettings.ts | 4 +- src/sync/streaming/types.ts | 4 +- src/sync/submitters/impressionsSubmitter.ts | 4 +- src/sync/submitters/telemetrySubmitter.ts | 7 +- src/sync/submitters/types.ts | 2 +- src/trackers/__tests__/eventTracker.spec.ts | 2 +- .../__tests__/impressionsTracker.spec.ts | 18 +- src/trackers/eventTracker.ts | 3 +- .../impressionObserver/ImpressionObserver.ts | 8 +- .../impressionObserver/__tests__/testUtils.ts | 4 +- src/trackers/impressionObserver/buildKey.ts | 4 +- .../impressionObserverCS.ts | 4 +- .../impressionObserverSS.ts | 4 +- src/trackers/impressionObserver/types.ts | 4 +- src/trackers/impressionsTracker.ts | 5 +- src/trackers/strategy/__tests__/testUtils.ts | 6 +- src/trackers/strategy/strategyDebug.ts | 4 +- src/trackers/strategy/strategyNone.ts | 4 +- src/trackers/strategy/strategyOptimized.ts | 6 +- src/trackers/types.ts | 10 +- src/types.ts | 1393 +---------------- src/utils/MinEventEmitter.ts | 5 +- src/utils/MinEvents.ts | 4 +- src/utils/constants/index.ts | 19 +- src/utils/inputValidation/attributes.ts | 2 +- src/utils/inputValidation/eventProperties.ts | 2 +- src/utils/inputValidation/key.ts | 2 +- src/utils/inputValidation/preloadedData.ts | 3 +- .../inputValidation/trafficTypeExistence.ts | 4 +- src/utils/key/index.ts | 2 +- src/utils/settingsValidation/consent.ts | 4 +- .../settingsValidation/impressionsMode.ts | 2 +- .../logger/builtinLogger.ts | 4 +- .../settingsValidation/logger/commons.ts | 4 +- .../logger/pluggableLogger.ts | 4 +- src/utils/settingsValidation/splitFilters.ts | 2 +- .../settingsValidation/storage/storageCS.ts | 5 +- types/splitio.d.ts | 531 +++---- 86 files changed, 525 insertions(+), 1825 deletions(-) diff --git a/src/__tests__/mocks/fetchSpecificSplits.ts b/src/__tests__/mocks/fetchSpecificSplits.ts index 4b2f4c90..3fd16dfe 100644 --- a/src/__tests__/mocks/fetchSpecificSplits.ts +++ b/src/__tests__/mocks/fetchSpecificSplits.ts @@ -1,4 +1,4 @@ -import { SplitIO } from '../../types'; +import SplitIO from '../../../types/splitio'; const valuesExamples = [ ['\u0223abc', 'abc\u0223asd', 'abc\u0223', 'abcȣ'], diff --git a/src/__tests__/testUtils/eventSourceMock.ts b/src/__tests__/testUtils/eventSourceMock.ts index f47615ed..c231aa9f 100644 --- a/src/__tests__/testUtils/eventSourceMock.ts +++ b/src/__tests__/testUtils/eventSourceMock.ts @@ -13,7 +13,7 @@ */ import { EventEmitter } from '../../utils/MinEvents'; -import { IEventEmitter } from '../../types'; +import SplitIO from '../../../types/splitio'; type ReadyStateType = 0 | 1 | 2; @@ -46,7 +46,7 @@ export default class EventSource { static readonly OPEN: ReadyStateType = 1; static readonly CLOSED: ReadyStateType = 2; - private readonly __emitter: IEventEmitter; + private readonly __emitter: SplitIO.IEventEmitter; private readonly __eventSourceInitDict: EventSourceInitDict; onerror?: (evt: MessageEvent) => any; onmessage?: (evt: MessageEvent) => any; diff --git a/src/dtos/types.ts b/src/dtos/types.ts index f23bd372..dce1d12d 100644 --- a/src/dtos/types.ts +++ b/src/dtos/types.ts @@ -1,4 +1,4 @@ -import { SplitIO } from '../types'; +import SplitIO from '../../types/splitio'; export type MaybeThenable = T | Promise diff --git a/src/evaluator/Engine.ts b/src/evaluator/Engine.ts index 89f4ef69..36f52cb4 100644 --- a/src/evaluator/Engine.ts +++ b/src/evaluator/Engine.ts @@ -5,7 +5,7 @@ import { thenable } from '../utils/promise/thenable'; import { EXCEPTION, NO_CONDITION_MATCH, SPLIT_ARCHIVED, SPLIT_KILLED } from '../utils/labels'; import { CONTROL } from '../utils/constants'; import { ISplit, MaybeThenable } from '../dtos/types'; -import { SplitIO } from '../types'; +import SplitIO from '../../types/splitio'; import { IStorageAsync, IStorageSync } from '../storages/types'; import { IEvaluation, IEvaluationResult, IEvaluator, ISplitEvaluator } from './types'; import { ILogger } from '../logger/types'; diff --git a/src/evaluator/combiners/ifelseif.ts b/src/evaluator/combiners/ifelseif.ts index c96df683..68fe5725 100644 --- a/src/evaluator/combiners/ifelseif.ts +++ b/src/evaluator/combiners/ifelseif.ts @@ -3,7 +3,7 @@ import { ILogger } from '../../logger/types'; import { thenable } from '../../utils/promise/thenable'; import { UNSUPPORTED_MATCHER_TYPE } from '../../utils/labels'; import { CONTROL } from '../../utils/constants'; -import { SplitIO } from '../../types'; +import SplitIO from '../../../types/splitio'; import { IEvaluation, IEvaluator, ISplitEvaluator } from '../types'; import { ENGINE_COMBINER_IFELSEIF, ENGINE_COMBINER_IFELSEIF_NO_TREATMENT, ERROR_ENGINE_COMBINER_IFELSEIF } from '../../logger/constants'; diff --git a/src/evaluator/condition/index.ts b/src/evaluator/condition/index.ts index 64b42e5f..7ffaef79 100644 --- a/src/evaluator/condition/index.ts +++ b/src/evaluator/condition/index.ts @@ -3,7 +3,7 @@ import { thenable } from '../../utils/promise/thenable'; import { NOT_IN_SPLIT } from '../../utils/labels'; import { MaybeThenable } from '../../dtos/types'; import { IEvaluation, IEvaluator, ISplitEvaluator } from '../types'; -import { SplitIO } from '../../types'; +import SplitIO from '../../../types/splitio'; import { ILogger } from '../../logger/types'; // Build Evaluation object if and only if matchingResult is true diff --git a/src/evaluator/index.ts b/src/evaluator/index.ts index d58c7f5d..c0576019 100644 --- a/src/evaluator/index.ts +++ b/src/evaluator/index.ts @@ -5,7 +5,7 @@ import { CONTROL } from '../utils/constants'; import { ISplit, MaybeThenable } from '../dtos/types'; import { IStorageAsync, IStorageSync } from '../storages/types'; import { IEvaluationResult } from './types'; -import { SplitIO } from '../types'; +import SplitIO from '../../types/splitio'; import { ILogger } from '../logger/types'; import { returnSetsUnion, setToArray } from '../utils/lang/sets'; import { WARN_FLAGSET_WITHOUT_FLAGS } from '../logger/constants'; diff --git a/src/evaluator/parser/index.ts b/src/evaluator/parser/index.ts index d124ad41..a398aa0b 100644 --- a/src/evaluator/parser/index.ts +++ b/src/evaluator/parser/index.ts @@ -9,7 +9,7 @@ import { thenable } from '../../utils/promise/thenable'; import { IEvaluator, IMatcherDto, ISplitEvaluator } from '../types'; import { ISplitCondition, MaybeThenable } from '../../dtos/types'; import { IStorageAsync, IStorageSync } from '../../storages/types'; -import { SplitIO } from '../../types'; +import SplitIO from '../../../types/splitio'; import { ILogger } from '../../logger/types'; import { ENGINE_MATCHER_ERROR, ENGINE_MATCHER_RESULT } from '../../logger/constants'; diff --git a/src/evaluator/types.ts b/src/evaluator/types.ts index a34c33c7..92e3446d 100644 --- a/src/evaluator/types.ts +++ b/src/evaluator/types.ts @@ -1,6 +1,6 @@ import { IBetweenMatcherData, IBetweenStringMatcherData, IDependencyMatcherData, MaybeThenable } from '../dtos/types'; import { IStorageAsync, IStorageSync } from '../storages/types'; -import { SplitIO } from '../types'; +import SplitIO from '../../types/splitio'; import { ILogger } from '../logger/types'; export interface IDependencyMatcherValue { diff --git a/src/evaluator/value/index.ts b/src/evaluator/value/index.ts index c564a68f..95b4000c 100644 --- a/src/evaluator/value/index.ts +++ b/src/evaluator/value/index.ts @@ -1,4 +1,4 @@ -import { SplitIO } from '../../types'; +import SplitIO from '../../../types/splitio'; import { IMatcherDto } from '../types'; import { ILogger } from '../../logger/types'; import { sanitize } from './sanitize'; diff --git a/src/evaluator/value/sanitize.ts b/src/evaluator/value/sanitize.ts index d12de8ed..9fbf74f7 100644 --- a/src/evaluator/value/sanitize.ts +++ b/src/evaluator/value/sanitize.ts @@ -1,4 +1,4 @@ -import { SplitIO } from '../../types'; +import SplitIO from '../../../types/splitio'; import { IDependencyMatcherValue } from '../types'; import { ILogger } from '../../logger/types'; import { isObject, uniq, toString, toNumber } from '../../utils/lang'; diff --git a/src/integrations/pluggable.ts b/src/integrations/pluggable.ts index df4ccd21..64dce8bb 100644 --- a/src/integrations/pluggable.ts +++ b/src/integrations/pluggable.ts @@ -1,5 +1,5 @@ import { SPLIT_IMPRESSION, SPLIT_EVENT } from '../utils/constants'; -import { SplitIO } from '../types'; +import SplitIO from '../../types/splitio'; import { IIntegration, IIntegrationManager, IIntegrationFactoryParams } from './types'; /** diff --git a/src/integrations/types.ts b/src/integrations/types.ts index d2ac4fb0..b0059f26 100644 --- a/src/integrations/types.ts +++ b/src/integrations/types.ts @@ -1,6 +1,7 @@ import { IEventsCacheBase } from '../storages/types'; import { IEventsHandler, IImpressionsHandler, ITelemetryTracker } from '../trackers/types'; -import { ISettings, SplitIO } from '../types'; +import { ISettings } from '../types'; +import SplitIO from '../../types/splitio'; export interface IIntegration { queue(data: SplitIO.IntegrationData): void @@ -14,7 +15,7 @@ export interface IIntegrationFactoryParams { telemetryTracker: ITelemetryTracker } -export type IntegrationFactory = { +export type IntegrationFactory = SplitIO.IntegrationFactory & { readonly type: string (params: IIntegrationFactoryParams): IIntegration | void } diff --git a/src/listeners/browser.ts b/src/listeners/browser.ts index 2320879f..12f13b74 100644 --- a/src/listeners/browser.ts +++ b/src/listeners/browser.ts @@ -5,7 +5,8 @@ import { IRecorderCacheSync, IStorageSync } from '../storages/types'; import { fromImpressionsCollector } from '../sync/submitters/impressionsSubmitter'; import { fromImpressionCountsCollector } from '../sync/submitters/impressionCountsSubmitter'; import { IResponse, ISplitApi } from '../services/types'; -import { ImpressionDTO, ISettings } from '../types'; +import { ISettings } from '../types'; +import SplitIO from '../../types/splitio'; import { ImpressionsPayload } from '../sync/submitters/types'; import { OPTIMIZED, DEBUG, NONE } from '../utils/constants'; import { objectAssign } from '../utils/lang/objectAssign'; @@ -22,7 +23,7 @@ const EVENT_NAME = 'for visibilitychange and pagehide events.'; */ export class BrowserSignalListener implements ISignalListener { - private fromImpressionsCollector: (data: ImpressionDTO[]) => ImpressionsPayload; + private fromImpressionsCollector: (data: SplitIO.ImpressionDTO[]) => ImpressionsPayload; constructor( private syncManager: ISyncManager | undefined, diff --git a/src/logger/__tests__/index.spec.ts b/src/logger/__tests__/index.spec.ts index 754acf0f..3b6e7dec 100644 --- a/src/logger/__tests__/index.spec.ts +++ b/src/logger/__tests__/index.spec.ts @@ -1,4 +1,4 @@ -import { LogLevel } from '../../types'; +import SplitIO from '../../../types/splitio'; import { Logger, LogLevels, isLogLevelString, _sprintf } from '../index'; // We'll set this only once. These are the constants we will use for @@ -36,9 +36,9 @@ test('SPLIT LOGGER / Logger class shape', () => { expect(typeof logger.setLogLevel).toBe('function'); // instance.setLogLevel should be a method. }); -const LOG_LEVELS_IN_ORDER = ['DEBUG', 'INFO', 'WARN', 'ERROR', 'NONE']; +const LOG_LEVELS_IN_ORDER: SplitIO.LogLevel[] = ['DEBUG', 'INFO', 'WARN', 'ERROR', 'NONE']; /* Utility function to avoid repeating too much code */ -function testLogLevels(levelToTest: LogLevel) { +function testLogLevels(levelToTest: SplitIO.LogLevel) { // Builds the expected message. const buildExpectedMessage = (lvl: string, category: string, msg: string, showLevel?: boolean) => { let res = ''; diff --git a/src/logger/__tests__/sdkLogger.mock.ts b/src/logger/__tests__/sdkLogger.mock.ts index a2cc184f..b7c4aa25 100644 --- a/src/logger/__tests__/sdkLogger.mock.ts +++ b/src/logger/__tests__/sdkLogger.mock.ts @@ -1,4 +1,4 @@ -import { LogLevel } from '../../types'; +import SplitIO from '../../../types/splitio'; export const loggerMock = { warn: jest.fn(), @@ -16,6 +16,6 @@ export const loggerMock = { } }; -export function getLoggerLogLevel(logger: any): LogLevel | undefined { +export function getLoggerLogLevel(logger: any): SplitIO.LogLevel | undefined { if (logger) return logger.options.logLevel; } diff --git a/src/logger/index.ts b/src/logger/index.ts index f343ba75..662e1f86 100644 --- a/src/logger/index.ts +++ b/src/logger/index.ts @@ -1,9 +1,9 @@ import { objectAssign } from '../utils/lang/objectAssign'; import { ILoggerOptions, ILogger } from './types'; import { find, isObject } from '../utils/lang'; -import { LogLevel } from '../types'; +import SplitIO from '../../types/splitio'; -export const LogLevels: { [level: string]: LogLevel } = { +export const LogLevels: SplitIO.ILoggerAPI['LogLevel'] = { DEBUG: 'DEBUG', INFO: 'INFO', WARN: 'WARN', @@ -19,7 +19,7 @@ const LogLevelIndexes = { NONE: 5 }; -export function isLogLevelString(str: string): str is LogLevel { +export function isLogLevelString(str: string): str is SplitIO.LogLevel { return !!find(LogLevels, (lvl: string) => str === lvl); } @@ -55,7 +55,7 @@ export class Logger implements ILogger { this.logLevel = LogLevelIndexes[this.options.logLevel]; } - setLogLevel(logLevel: LogLevel) { + setLogLevel(logLevel: SplitIO.LogLevel) { this.options.logLevel = logLevel; this.logLevel = LogLevelIndexes[logLevel]; } @@ -76,7 +76,7 @@ export class Logger implements ILogger { if (this._shouldLog(LogLevelIndexes.ERROR)) this._log(LogLevels.ERROR, msg, args); } - private _log(level: LogLevel, msg: string | number, args?: any[]) { + private _log(level: SplitIO.LogLevel, msg: string | number, args?: any[]) { if (typeof msg === 'number') { const format = this.codes.get(msg); msg = format ? _sprintf(format, args) : `Message code ${msg}${args ? ', with args: ' + args.toString() : ''}`; @@ -89,7 +89,7 @@ export class Logger implements ILogger { console.log(formattedText); } - private _generateLogMessage(level: LogLevel, text: string) { + private _generateLogMessage(level: SplitIO.LogLevel, text: string) { const textPre = ' => '; let result = ''; diff --git a/src/logger/sdkLogger.ts b/src/logger/sdkLogger.ts index 41c3635a..bbe3043e 100644 --- a/src/logger/sdkLogger.ts +++ b/src/logger/sdkLogger.ts @@ -1,5 +1,5 @@ import { LogLevels, isLogLevelString } from './index'; -import { ILoggerAPI } from '../types'; +import SplitIO from '../../types/splitio'; import { ILogger } from './types'; import { ERROR_LOGLEVEL_INVALID } from './constants'; @@ -8,7 +8,7 @@ import { ERROR_LOGLEVEL_INVALID } from './constants'; * * @param log the factory logger instance to handle */ -export function createLoggerAPI(log: ILogger): ILoggerAPI { +export function createLoggerAPI(log: ILogger): SplitIO.ILoggerAPI { function setLogLevel(logLevel: string) { if (isLogLevelString(logLevel)) { diff --git a/src/logger/types.ts b/src/logger/types.ts index 79ec1b07..2f05b3ba 100644 --- a/src/logger/types.ts +++ b/src/logger/types.ts @@ -1,23 +1,21 @@ -import { LogLevel } from '../types'; +import SplitIO from '../../types/splitio'; export interface ILoggerOptions { prefix?: string, - logLevel?: LogLevel, + logLevel?: SplitIO.LogLevel, showLevel?: boolean, // @TODO remove this param eventually since it is not being set `false` anymore } -export interface ILogger { - setLogLevel(logLevel: LogLevel): void +export interface ILogger extends SplitIO.ILogger { + debug(msg: any): void; + debug(msg: string | number, args?: any[]): void; - debug(msg: any): void - debug(msg: string | number, args?: any[]): void + info(msg: any): void; + info(msg: string | number, args?: any[]): void; - info(msg: any): void - info(msg: string | number, args?: any[]): void + warn(msg: any): void; + warn(msg: string | number, args?: any[]): void; - warn(msg: any): void - warn(msg: string | number, args?: any[]): void - - error(msg: any): void - error(msg: string | number, args?: any[]): void + error(msg: any): void; + error(msg: string | number, args?: any[]): void; } diff --git a/src/readiness/__tests__/sdkReadinessManager.spec.ts b/src/readiness/__tests__/sdkReadinessManager.spec.ts index 6d639c84..17d98102 100644 --- a/src/readiness/__tests__/sdkReadinessManager.spec.ts +++ b/src/readiness/__tests__/sdkReadinessManager.spec.ts @@ -1,6 +1,6 @@ // @ts-nocheck import { loggerMock } from '../../logger/__tests__/sdkLogger.mock'; -import { IEventEmitter } from '../../types'; +import SplitIO from '../../../types/splitio'; import { SDK_READY, SDK_READY_FROM_CACHE, SDK_READY_TIMED_OUT, SDK_UPDATE } from '../constants'; import { sdkReadinessManagerFactory } from '../sdkReadinessManager'; import { IReadinessManager } from '../types'; @@ -15,7 +15,7 @@ const EventEmitterMock = jest.fn(() => ({ addListener: jest.fn(), off: jest.fn(), removeListener: jest.fn() -})) as new () => IEventEmitter; +})) as new () => SplitIO.IEventEmitter; // Makes readinessManager emit SDK_READY & update isReady flag function emitReadyEvent(readinessManager: IReadinessManager) { diff --git a/src/readiness/readinessManager.ts b/src/readiness/readinessManager.ts index bc31152b..1581014c 100644 --- a/src/readiness/readinessManager.ts +++ b/src/readiness/readinessManager.ts @@ -1,9 +1,10 @@ import { objectAssign } from '../utils/lang/objectAssign'; -import { IEventEmitter, ISettings } from '../types'; +import { ISettings } from '../types'; +import SplitIO from '../../types/splitio'; import { SDK_SPLITS_ARRIVED, SDK_SPLITS_CACHE_LOADED, SDK_SEGMENTS_ARRIVED, SDK_READY_TIMED_OUT, SDK_READY_FROM_CACHE, SDK_UPDATE, SDK_READY } from './constants'; import { IReadinessEventEmitter, IReadinessManager, ISegmentsEventEmitter, ISplitsEventEmitter } from './types'; -function splitsEventEmitterFactory(EventEmitter: new () => IEventEmitter): ISplitsEventEmitter { +function splitsEventEmitterFactory(EventEmitter: new () => SplitIO.IEventEmitter): ISplitsEventEmitter { const splitsEventEmitter = objectAssign(new EventEmitter(), { splitsArrived: false, splitsCacheLoaded: false, @@ -20,7 +21,7 @@ function splitsEventEmitterFactory(EventEmitter: new () => IEventEmitter): ISpli return splitsEventEmitter; } -function segmentsEventEmitterFactory(EventEmitter: new () => IEventEmitter): ISegmentsEventEmitter { +function segmentsEventEmitterFactory(EventEmitter: new () => SplitIO.IEventEmitter): ISegmentsEventEmitter { const segmentsEventEmitter = objectAssign(new EventEmitter(), { segmentsArrived: false }); @@ -34,7 +35,7 @@ function segmentsEventEmitterFactory(EventEmitter: new () => IEventEmitter): ISe * Factory of readiness manager, which handles the ready / update event propagation. */ export function readinessManagerFactory( - EventEmitter: new () => IEventEmitter, + EventEmitter: new () => SplitIO.IEventEmitter, settings: ISettings, splits: ISplitsEventEmitter = splitsEventEmitterFactory(EventEmitter)): IReadinessManager { diff --git a/src/readiness/sdkReadinessManager.ts b/src/readiness/sdkReadinessManager.ts index 238297e0..61cabfe6 100644 --- a/src/readiness/sdkReadinessManager.ts +++ b/src/readiness/sdkReadinessManager.ts @@ -2,7 +2,8 @@ import { objectAssign } from '../utils/lang/objectAssign'; import { promiseWrapper } from '../utils/promise/wrapper'; import { readinessManagerFactory } from './readinessManager'; import { ISdkReadinessManager } from './types'; -import { IEventEmitter, ISettings } from '../types'; +import { ISettings } from '../types'; +import SplitIO from '../../types/splitio'; import { SDK_READY, SDK_READY_TIMED_OUT, SDK_READY_FROM_CACHE, SDK_UPDATE } from './constants'; import { ERROR_CLIENT_LISTENER, CLIENT_READY_FROM_CACHE, CLIENT_READY, CLIENT_NO_LISTENER } from '../logger/constants'; @@ -17,7 +18,7 @@ const REMOVE_LISTENER_EVENT = 'removeListener'; * @param readinessManager optional readinessManager to use. only used internally for `shared` method */ export function sdkReadinessManagerFactory( - EventEmitter: new () => IEventEmitter, + EventEmitter: new () => SplitIO.IEventEmitter, settings: ISettings, readinessManager = readinessManagerFactory(EventEmitter, settings)): ISdkReadinessManager { diff --git a/src/readiness/types.ts b/src/readiness/types.ts index ebe22b0f..03886c24 100644 --- a/src/readiness/types.ts +++ b/src/readiness/types.ts @@ -1,4 +1,5 @@ -import { IEventEmitter, IStatusInterface } from '../types'; +import { IStatusInterface } from '../types'; +import SplitIO from '../../types/splitio'; /** Splits data emitter */ @@ -6,7 +7,7 @@ type SDK_SPLITS_ARRIVED = 'state::splits-arrived' type SDK_SPLITS_CACHE_LOADED = 'state::splits-cache-loaded' type ISplitsEvent = SDK_SPLITS_ARRIVED | SDK_SPLITS_CACHE_LOADED -export interface ISplitsEventEmitter extends IEventEmitter { +export interface ISplitsEventEmitter extends SplitIO.IEventEmitter { emit(event: ISplitsEvent, ...args: any[]): boolean on(event: ISplitsEvent, listener: (...args: any[]) => void): this; once(event: ISplitsEvent, listener: (...args: any[]) => void): this; @@ -21,7 +22,7 @@ export interface ISplitsEventEmitter extends IEventEmitter { type SDK_SEGMENTS_ARRIVED = 'state::segments-arrived' type ISegmentsEvent = SDK_SEGMENTS_ARRIVED -export interface ISegmentsEventEmitter extends IEventEmitter { +export interface ISegmentsEventEmitter extends SplitIO.IEventEmitter { emit(event: ISegmentsEvent, ...args: any[]): boolean on(event: ISegmentsEvent, listener: (...args: any[]) => void): this; once(event: ISegmentsEvent, listener: (...args: any[]) => void): this; @@ -37,7 +38,7 @@ export type SDK_UPDATE = 'state::update' export type SDK_DESTROY = 'state::destroy' export type IReadinessEvent = SDK_READY_TIMED_OUT | SDK_READY | SDK_READY_FROM_CACHE | SDK_UPDATE | SDK_DESTROY -export interface IReadinessEventEmitter extends IEventEmitter { +export interface IReadinessEventEmitter extends SplitIO.IEventEmitter { emit(event: IReadinessEvent, ...args: any[]): boolean } diff --git a/src/sdkClient/__tests__/sdkClientMethod.spec.ts b/src/sdkClient/__tests__/sdkClientMethod.spec.ts index 2ae7dff3..068d0278 100644 --- a/src/sdkClient/__tests__/sdkClientMethod.spec.ts +++ b/src/sdkClient/__tests__/sdkClientMethod.spec.ts @@ -3,6 +3,7 @@ import { CONSUMER_MODE, STANDALONE_MODE } from '../../utils/constants'; import { sdkClientMethodFactory } from '../sdkClientMethod'; import { assertClientApi } from './testUtils'; import { telemetryTrackerFactory } from '../../trackers/telemetryTracker'; +import { IBasicClient } from '../../types'; const errorMessage = 'Shared Client not supported by the storage mechanism. Create isolated instances instead.'; @@ -37,7 +38,7 @@ test.each(paramMocks)('sdkClientMethodFactory', (params, done: any) => { expect(typeof sdkClientMethod).toBe('function'); // calling the function should return a client instance - const client = sdkClientMethod(); + const client = sdkClientMethod() as unknown as IBasicClient; assertClientApi(client, params.sdkReadinessManager.sdkStatus); // multiple calls should return the same instance diff --git a/src/sdkClient/client.ts b/src/sdkClient/client.ts index 98073e9d..0f5eb8a6 100644 --- a/src/sdkClient/client.ts +++ b/src/sdkClient/client.ts @@ -6,7 +6,7 @@ import { validateTrafficTypeExistence } from '../utils/inputValidation/trafficTy import { SDK_NOT_READY } from '../utils/labels'; import { CONTROL, TREATMENT, TREATMENTS, TREATMENT_WITH_CONFIG, TREATMENTS_WITH_CONFIG, TRACK, TREATMENTS_WITH_CONFIG_BY_FLAGSETS, TREATMENTS_BY_FLAGSETS, TREATMENTS_BY_FLAGSET, TREATMENTS_WITH_CONFIG_BY_FLAGSET, GET_TREATMENTS_WITH_CONFIG, GET_TREATMENTS_BY_FLAG_SETS, GET_TREATMENTS_WITH_CONFIG_BY_FLAG_SETS, GET_TREATMENTS_BY_FLAG_SET, GET_TREATMENTS_WITH_CONFIG_BY_FLAG_SET, GET_TREATMENT_WITH_CONFIG, GET_TREATMENT, GET_TREATMENTS, TRACK_FN_LABEL } from '../utils/constants'; import { IEvaluationResult } from '../evaluator/types'; -import { SplitIO, ImpressionDTO } from '../types'; +import SplitIO from '../../types/splitio'; import { IMPRESSION, IMPRESSION_QUEUEING } from '../logger/constants'; import { ISdkFactoryContext } from '../sdkFactory/types'; import { isConsumerMode } from '../utils/settingsValidation/mode'; @@ -25,7 +25,7 @@ function treatmentsNotReady(featureFlagNames: string[]) { /** * Creator of base client with getTreatments and track methods. */ -export function clientFactory(params: ISdkFactoryContext): SplitIO.IClient | SplitIO.IAsyncClient { +export function clientFactory(params: ISdkFactoryContext): SplitIO.INodeClient | SplitIO.INodeAsyncClient { const { sdkReadinessManager: { readinessManager }, storage, settings, impressionsTracker, eventTracker, telemetryTracker } = params; const { log, mode } = settings; const isAsync = isConsumerMode(mode); @@ -34,7 +34,7 @@ export function clientFactory(params: ISdkFactoryContext): SplitIO.IClient | Spl const stopTelemetryTracker = telemetryTracker.trackEval(withConfig ? TREATMENT_WITH_CONFIG : TREATMENT); const wrapUp = (evaluationResult: IEvaluationResult) => { - const queue: ImpressionDTO[] = []; + const queue: SplitIO.ImpressionDTO[] = []; const treatment = processEvaluation(evaluationResult, featureFlagName, key, attributes, withConfig, methodName, queue); impressionsTracker.track(queue, attributes); @@ -59,7 +59,7 @@ export function clientFactory(params: ISdkFactoryContext): SplitIO.IClient | Spl const stopTelemetryTracker = telemetryTracker.trackEval(withConfig ? TREATMENTS_WITH_CONFIG : TREATMENTS); const wrapUp = (evaluationResults: Record) => { - const queue: ImpressionDTO[] = []; + const queue: SplitIO.ImpressionDTO[] = []; const treatments: Record = {}; Object.keys(evaluationResults).forEach(featureFlagName => { treatments[featureFlagName] = processEvaluation(evaluationResults[featureFlagName], featureFlagName, key, attributes, withConfig, methodName, queue); @@ -87,7 +87,7 @@ export function clientFactory(params: ISdkFactoryContext): SplitIO.IClient | Spl const stopTelemetryTracker = telemetryTracker.trackEval(method); const wrapUp = (evaluationResults: Record) => { - const queue: ImpressionDTO[] = []; + const queue: SplitIO.ImpressionDTO[] = []; const treatments: Record = {}; const evaluations = evaluationResults; Object.keys(evaluations).forEach(featureFlagName => { @@ -128,7 +128,7 @@ export function clientFactory(params: ISdkFactoryContext): SplitIO.IClient | Spl attributes: SplitIO.Attributes | undefined, withConfig: boolean, invokingMethodName: string, - queue: ImpressionDTO[] + queue: SplitIO.ImpressionDTO[] ): SplitIO.Treatment | SplitIO.TreatmentWithConfig { const matchingKey = getMatching(key); const bucketingKey = getBucketing(key); @@ -199,6 +199,5 @@ export function clientFactory(params: ISdkFactoryContext): SplitIO.IClient | Spl getTreatmentsByFlagSet, getTreatmentsWithConfigByFlagSet, track, - isClientSide: false - } as SplitIO.IClient | SplitIO.IAsyncClient; + } as SplitIO.INodeClient | SplitIO.INodeAsyncClient; } diff --git a/src/sdkClient/clientAttributesDecoration.ts b/src/sdkClient/clientAttributesDecoration.ts index 57413ad9..4f4d9070 100644 --- a/src/sdkClient/clientAttributesDecoration.ts +++ b/src/sdkClient/clientAttributesDecoration.ts @@ -1,13 +1,13 @@ import { AttributesCacheInMemory } from '../storages/inMemory/AttributesCacheInMemory'; import { validateAttributesDeep } from '../utils/inputValidation/attributes'; -import { SplitIO } from '../types'; +import SplitIO from '../../types/splitio'; import { ILogger } from '../logger/types'; import { objectAssign } from '../utils/lang/objectAssign'; /** * Add in memory attributes storage methods and combine them with any attribute received from the getTreatment/s call */ -export function clientAttributesDecoration(log: ILogger, client: TClient) { +export function clientAttributesDecoration(log: ILogger, client: TClient) { const attributeStorage = new AttributesCacheInMemory(); diff --git a/src/sdkClient/clientCS.ts b/src/sdkClient/clientCS.ts index a4a63edb..75401560 100644 --- a/src/sdkClient/clientCS.ts +++ b/src/sdkClient/clientCS.ts @@ -1,6 +1,6 @@ import { objectAssign } from '../utils/lang/objectAssign'; import { ILogger } from '../logger/types'; -import { SplitIO } from '../types'; +import SplitIO from '../../types/splitio'; import { clientAttributesDecoration } from './clientAttributesDecoration'; @@ -10,7 +10,7 @@ import { clientAttributesDecoration } from './clientAttributesDecoration'; * @param client sync client instance * @param key validated split key */ -export function clientCSDecorator(log: ILogger, client: SplitIO.IClient, key: SplitIO.SplitKey): SplitIO.ICsClient { +export function clientCSDecorator(log: ILogger, client: SplitIO.INodeClient, key: SplitIO.SplitKey): SplitIO.IClient { let clientCS = clientAttributesDecoration(log, client); @@ -30,5 +30,5 @@ export function clientCSDecorator(log: ILogger, client: SplitIO.IClient, key: Sp // Not part of the public API. These properties are used to support other modules (e.g., Split Suite) isClientSide: true, key - }) as SplitIO.ICsClient; + }) as SplitIO.IClient; } diff --git a/src/sdkClient/clientInputValidation.ts b/src/sdkClient/clientInputValidation.ts index 13c2fbc6..8b93cc96 100644 --- a/src/sdkClient/clientInputValidation.ts +++ b/src/sdkClient/clientInputValidation.ts @@ -15,7 +15,8 @@ import { startsWith } from '../utils/lang'; import { CONTROL, CONTROL_WITH_CONFIG, GET_TREATMENT, GET_TREATMENTS, GET_TREATMENTS_BY_FLAG_SET, GET_TREATMENTS_BY_FLAG_SETS, GET_TREATMENTS_WITH_CONFIG, GET_TREATMENTS_WITH_CONFIG_BY_FLAG_SET, GET_TREATMENTS_WITH_CONFIG_BY_FLAG_SETS, GET_TREATMENT_WITH_CONFIG, TRACK_FN_LABEL } from '../utils/constants'; import { IReadinessManager } from '../readiness/types'; import { MaybeThenable } from '../dtos/types'; -import { ISettings, SplitIO } from '../types'; +import { ISettings } from '../types'; +import SplitIO from '../../types/splitio'; import { isConsumerMode } from '../utils/settingsValidation/mode'; import { validateFlagSets } from '../utils/settingsValidation/splitFilters'; @@ -23,7 +24,7 @@ import { validateFlagSets } from '../utils/settingsValidation/splitFilters'; * Decorator that validates the input before actually executing the client methods. * We should "guard" the client here, while not polluting the "real" implementation of those methods. */ -export function clientInputValidationDecorator(settings: ISettings, client: TClient, readinessManager: IReadinessManager): TClient { +export function clientInputValidationDecorator(settings: ISettings, client: TClient, readinessManager: IReadinessManager): TClient { const { log, mode } = settings; const isAsync = isConsumerMode(mode); diff --git a/src/sdkClient/identity.ts b/src/sdkClient/identity.ts index 08822263..ef3fd377 100644 --- a/src/sdkClient/identity.ts +++ b/src/sdkClient/identity.ts @@ -1,4 +1,4 @@ -import { SplitIO } from '../types'; +import SplitIO from '../../types/splitio'; export function buildInstanceId(key: SplitIO.SplitKey, trafficType?: string) { // @ts-ignore return `${key.matchingKey ? key.matchingKey : key}-${key.bucketingKey ? key.bucketingKey : key}-${trafficType ? trafficType : ''}`; diff --git a/src/sdkClient/sdkClient.ts b/src/sdkClient/sdkClient.ts index fdfa135b..3d7aef28 100644 --- a/src/sdkClient/sdkClient.ts +++ b/src/sdkClient/sdkClient.ts @@ -1,5 +1,5 @@ import { objectAssign } from '../utils/lang/objectAssign'; -import { IStatusInterface, SplitIO } from '../types'; +import SplitIO from '../../types/splitio'; import { releaseApiKey } from '../utils/inputValidation/apiKey'; import { clientFactory } from './client'; import { clientInputValidationDecorator } from './clientInputValidation'; @@ -10,7 +10,7 @@ const COOLDOWN_TIME_IN_MILLIS = 1000; /** * Creates an Sdk client, i.e., a base client with status and destroy interface */ -export function sdkClientFactory(params: ISdkFactoryContext, isSharedClient?: boolean): SplitIO.IClient | SplitIO.IAsyncClient { +export function sdkClientFactory(params: ISdkFactoryContext, isSharedClient?: boolean): SplitIO.INodeClient | SplitIO.INodeAsyncClient { const { sdkReadinessManager, syncManager, storage, signalListener, settings, telemetryTracker, uniqueKeysTracker } = params; let lastActionTime = 0; @@ -37,7 +37,7 @@ export function sdkClientFactory(params: ISdkFactoryContext, isSharedClient?: bo return objectAssign( // Proto-linkage of the readiness Event Emitter - Object.create(sdkReadinessManager.sdkStatus) as IStatusInterface, + Object.create(sdkReadinessManager.sdkStatus) as SplitIO.IStatusInterface, // Client API (getTreatment* & track methods) clientInputValidationDecorator( diff --git a/src/sdkClient/sdkClientMethod.ts b/src/sdkClient/sdkClientMethod.ts index 3860c5a7..c24307e2 100644 --- a/src/sdkClient/sdkClientMethod.ts +++ b/src/sdkClient/sdkClientMethod.ts @@ -1,4 +1,4 @@ -import { SplitIO } from '../types'; +import SplitIO from '../../types/splitio'; import { sdkClientFactory } from './sdkClient'; import { RETRIEVE_CLIENT_DEFAULT } from '../logger/constants'; import { ISdkFactoryContext } from '../sdkFactory/types'; @@ -6,7 +6,7 @@ import { ISdkFactoryContext } from '../sdkFactory/types'; /** * Factory of client method for server-side SDKs */ -export function sdkClientMethodFactory(params: ISdkFactoryContext): () => SplitIO.IClient | SplitIO.IAsyncClient { +export function sdkClientMethodFactory(params: ISdkFactoryContext): () => SplitIO.INodeClient | SplitIO.INodeAsyncClient { const log = params.settings.log; const clientInstance = sdkClientFactory(params); diff --git a/src/sdkClient/sdkClientMethodCS.ts b/src/sdkClient/sdkClientMethodCS.ts index b5cc0e58..a5d66656 100644 --- a/src/sdkClient/sdkClientMethodCS.ts +++ b/src/sdkClient/sdkClientMethodCS.ts @@ -1,5 +1,5 @@ import { clientCSDecorator } from './clientCS'; -import { SplitIO } from '../types'; +import SplitIO from '../../types/splitio'; import { validateKey } from '../utils/inputValidation/key'; import { getMatching, keyParser } from '../utils/key'; import { sdkClientFactory } from './sdkClient'; @@ -14,12 +14,12 @@ import { buildInstanceId } from './identity'; * Factory of client method for the client-side API variant where TT is ignored. * Therefore, clients don't have a bound TT for the track method. */ -export function sdkClientMethodCSFactory(params: ISdkFactoryContext): (key?: SplitIO.SplitKey) => SplitIO.ICsClient { +export function sdkClientMethodCSFactory(params: ISdkFactoryContext): (key?: SplitIO.SplitKey) => SplitIO.IClient { const { clients, storage, syncManager, sdkReadinessManager, settings: { core: { key }, log } } = params; const mainClientInstance = clientCSDecorator( log, - sdkClientFactory(params) as SplitIO.IClient, + sdkClientFactory(params) as SplitIO.INodeClient, key ); @@ -71,7 +71,7 @@ export function sdkClientMethodCSFactory(params: ISdkFactoryContext): (key?: Spl sdkReadinessManager: sharedSdkReadiness, storage: sharedStorage || storage, syncManager: sharedSyncManager, - }), true) as SplitIO.IClient, + }), true) as SplitIO.INodeClient, validKey ); @@ -80,6 +80,6 @@ export function sdkClientMethodCSFactory(params: ISdkFactoryContext): (key?: Spl log.debug(RETRIEVE_CLIENT_EXISTING); } - return clients[instanceId] as SplitIO.ICsClient; + return clients[instanceId] as SplitIO.IClient; }; } diff --git a/src/sdkFactory/__tests__/index.spec.ts b/src/sdkFactory/__tests__/index.spec.ts index 742abaff..b4ef3ec1 100644 --- a/src/sdkFactory/__tests__/index.spec.ts +++ b/src/sdkFactory/__tests__/index.spec.ts @@ -1,7 +1,7 @@ import { ISdkFactoryParams } from '../types'; import { sdkFactory } from '../index'; import { fullSettings } from '../../utils/settingsValidation/__tests__/settings.mocks'; -import { SplitIO } from '../../types'; +import SplitIO from '../../../types/splitio'; import { EventEmitter } from '../../utils/MinEvents'; /** Mocks */ @@ -59,7 +59,7 @@ const fullParamsForSyncSDK = { /** End Mocks */ -function assertSdkApi(sdk: SplitIO.IAsyncSDK | SplitIO.ISDK | SplitIO.ICsSDK, params: any) { +function assertSdkApi(sdk: SplitIO.INodeAsyncSDK | SplitIO.INodeSDK | SplitIO.IAsyncSDK | SplitIO.ISDK, params: any) { expect(sdk.Logger).toBe(loggerApiMock); expect(sdk.settings).toBe(params.settings); expect(sdk.client).toBe(params.sdkClientMethodFactory.mock.results[0].value); diff --git a/src/sdkFactory/index.ts b/src/sdkFactory/index.ts index 4bfe62e6..4363f0e2 100644 --- a/src/sdkFactory/index.ts +++ b/src/sdkFactory/index.ts @@ -3,7 +3,7 @@ import { sdkReadinessManagerFactory } from '../readiness/sdkReadinessManager'; import { impressionsTrackerFactory } from '../trackers/impressionsTracker'; import { eventTrackerFactory } from '../trackers/eventTracker'; import { telemetryTrackerFactory } from '../trackers/telemetryTracker'; -import { IBasicClient, SplitIO } from '../types'; +import SplitIO from '../../types/splitio'; import { validateAndTrackApiKey } from '../utils/inputValidation/apiKey'; import { createLoggerAPI } from '../logger/sdkLogger'; import { NEW_FACTORY, RETRIEVE_MANAGER } from '../logger/constants'; @@ -18,7 +18,7 @@ import { NONE, OPTIMIZED } from '../utils/constants'; /** * Modular SDK factory */ -export function sdkFactory(params: ISdkFactoryParams): SplitIO.ICsSDK | SplitIO.ISDK | SplitIO.IAsyncSDK { +export function sdkFactory(params: ISdkFactoryParams): SplitIO.INodeSDK | SplitIO.INodeAsyncSDK | SplitIO.ISDK | SplitIO.IAsyncSDK { const { settings, platform, storageFactory, splitApiFactory, extraProps, syncManagerFactory, SignalListener, impressionsObserverFactory, @@ -54,7 +54,7 @@ export function sdkFactory(params: ISdkFactoryParams): SplitIO.ICsSDK | SplitIO. }, }); // @TODO add support for dataloader: `if (params.dataLoader) params.dataLoader(storage);` - const clients: Record = {}; + const clients: Record = {}; const telemetryTracker = telemetryTrackerFactory(storage.telemetry, platform.now); const integrationsManager = integrationsManagerFactory && integrationsManagerFactory({ settings, storage, telemetryTracker }); diff --git a/src/sdkFactory/types.ts b/src/sdkFactory/types.ts index 37a5ad46..2fa1cee0 100644 --- a/src/sdkFactory/types.ts +++ b/src/sdkFactory/types.ts @@ -8,7 +8,8 @@ import { IStorageAsync, IStorageSync, IStorageFactoryParams } from '../storages/ import { ISyncManager } from '../sync/types'; import { IImpressionObserver } from '../trackers/impressionObserver/types'; import { IImpressionsTracker, IEventTracker, ITelemetryTracker, IFilterAdapter, IUniqueKeysTracker } from '../trackers/types'; -import { SplitIO, ISettings, IEventEmitter, IBasicClient } from '../types'; +import { ISettings } from '../types'; +import SplitIO from '../../types/splitio'; /** * Environment related dependencies. @@ -29,7 +30,7 @@ export interface IPlatform { /** * EventEmitter constructor, like NodeJS.EventEmitter or a polyfill. */ - EventEmitter: new () => IEventEmitter, + EventEmitter: new () => SplitIO.IEventEmitter, /** * Function used to track latencies for telemetry. */ @@ -49,7 +50,7 @@ export interface ISdkFactoryContext { signalListener?: ISignalListener splitApi?: ISplitApi syncManager?: ISyncManager, - clients: Record, + clients: Record, } export interface ISdkFactoryContextSync extends ISdkFactoryContext { @@ -78,7 +79,7 @@ export interface ISdkFactoryParams { platform: IPlatform, // Storage factory. The result storage type implies the type of the SDK: - // sync SDK (`ISDK` or `ICsSDK`) with `IStorageSync`, and async SDK (`IAsyncSDK`) with `IStorageAsync` + // sync SDK (`ISDK` and `INodeSDK`) with `IStorageSync`, and async SDK (`IAsyncSDK` and `INodeAsyncSDK`) with `IStorageAsync` storageFactory: (params: IStorageFactoryParams) => IStorageSync | IStorageAsync, // Factory of Split Api (HTTP Client Service). @@ -94,8 +95,8 @@ export interface ISdkFactoryParams { sdkManagerFactory: typeof sdkManagerFactory, // Sdk client method factory (ISDK::client method). - // It Allows to distinguish SDK clients with the client-side API (`ICsSDK`) or server-side API (`ISDK` or `IAsyncSDK`). - sdkClientMethodFactory: (params: ISdkFactoryContext) => ({ (): SplitIO.ICsClient; (key: SplitIO.SplitKey): SplitIO.ICsClient; } | (() => SplitIO.IClient) | (() => SplitIO.IAsyncClient)) + // It Allows to distinguish SDK clients with the client-side API (`ISDK` and `IAsyncSDK`) or server-side API (`INodeSDK` and `INodeAsyncSDK`). + sdkClientMethodFactory: (params: ISdkFactoryContext) => ({ (): SplitIO.IClient; (key: SplitIO.SplitKey): SplitIO.IClient; } | (() => SplitIO.INodeClient) | (() => SplitIO.INodeAsyncClient)) // Impression observer factory. impressionsObserverFactory: () => IImpressionObserver diff --git a/src/sdkManager/__tests__/index.asyncCache.spec.ts b/src/sdkManager/__tests__/index.asyncCache.spec.ts index f812e959..a0277a75 100644 --- a/src/sdkManager/__tests__/index.asyncCache.spec.ts +++ b/src/sdkManager/__tests__/index.asyncCache.spec.ts @@ -9,7 +9,7 @@ import { ISdkReadinessManager } from '../../readiness/types'; import { loggerMock } from '../../logger/__tests__/sdkLogger.mock'; import { metadata } from '../../storages/__tests__/KeyBuilder.spec'; import { RedisAdapter } from '../../storages/inRedis/RedisAdapter'; -import { SplitIO } from '../../types'; +import SplitIO from '../../../types/splitio'; // @ts-expect-error const sdkReadinessManagerMock = { diff --git a/src/sdkManager/index.ts b/src/sdkManager/index.ts index 47aea3fa..1246f16f 100644 --- a/src/sdkManager/index.ts +++ b/src/sdkManager/index.ts @@ -5,7 +5,8 @@ import { validateSplit, validateSplitExistence, validateIfNotDestroyed, validate import { ISplitsCacheAsync, ISplitsCacheSync } from '../storages/types'; import { ISdkReadinessManager } from '../readiness/types'; import { ISplit } from '../dtos/types'; -import { ISettings, SplitIO } from '../types'; +import { ISettings } from '../types'; +import SplitIO from '../../types/splitio'; import { isConsumerMode } from '../utils/settingsValidation/mode'; import { SPLIT_FN_LABEL, SPLITS_FN_LABEL, NAMES_FN_LABEL } from '../utils/constants'; diff --git a/src/storages/dataLoader.ts b/src/storages/dataLoader.ts index 24898d68..5e70db86 100644 --- a/src/storages/dataLoader.ts +++ b/src/storages/dataLoader.ts @@ -1,4 +1,4 @@ -import { SplitIO } from '../types'; +import { PreloadedData } from '../types'; import { DEFAULT_CACHE_EXPIRATION_IN_MILLIS } from '../utils/constants/browser'; import { DataLoader, ISegmentsCacheSync, ISplitsCacheSync } from './types'; @@ -9,7 +9,7 @@ import { DataLoader, ISegmentsCacheSync, ISplitsCacheSync } from './types'; * and extended with a `mySegmentsData` property. * @returns function to preload the storage */ -export function dataLoaderFactory(preloadedData: SplitIO.PreloadedData): DataLoader { +export function dataLoaderFactory(preloadedData: PreloadedData): DataLoader { /** * Storage-agnostic adaptation of `loadDataIntoLocalStorage` function diff --git a/src/storages/inMemory/AttributesCacheInMemory.ts b/src/storages/inMemory/AttributesCacheInMemory.ts index da7445a1..80870e55 100644 --- a/src/storages/inMemory/AttributesCacheInMemory.ts +++ b/src/storages/inMemory/AttributesCacheInMemory.ts @@ -1,4 +1,4 @@ -import { SplitIO } from '../../types'; +import SplitIO from '../../../types/splitio'; import { objectAssign } from '../../utils/lang/objectAssign'; export class AttributesCacheInMemory { diff --git a/src/storages/inMemory/EventsCacheInMemory.ts b/src/storages/inMemory/EventsCacheInMemory.ts index 64525cdf..92152aa0 100644 --- a/src/storages/inMemory/EventsCacheInMemory.ts +++ b/src/storages/inMemory/EventsCacheInMemory.ts @@ -1,4 +1,4 @@ -import { SplitIO } from '../../types'; +import SplitIO from '../../../types/splitio'; import { IEventsCacheSync } from '../types'; const MAX_QUEUE_BYTE_SIZE = 5 * 1024 * 1024; // 5M diff --git a/src/storages/inMemory/ImpressionsCacheInMemory.ts b/src/storages/inMemory/ImpressionsCacheInMemory.ts index a3d46634..9b959c31 100644 --- a/src/storages/inMemory/ImpressionsCacheInMemory.ts +++ b/src/storages/inMemory/ImpressionsCacheInMemory.ts @@ -1,11 +1,11 @@ import { IImpressionsCacheSync } from '../types'; -import { ImpressionDTO } from '../../types'; +import SplitIO from '../../../types/splitio'; export class ImpressionsCacheInMemory implements IImpressionsCacheSync { private onFullQueue?: () => void; private readonly maxQueue: number; - private queue: ImpressionDTO[]; + private queue: SplitIO.ImpressionDTO[]; /** * @@ -24,7 +24,7 @@ export class ImpressionsCacheInMemory implements IImpressionsCacheSync { /** * Store impressions in sequential order */ - track(data: ImpressionDTO[]) { + track(data: SplitIO.ImpressionDTO[]) { this.queue.push(...data); // Check if the cache queue is full and we need to flush it. @@ -43,7 +43,7 @@ export class ImpressionsCacheInMemory implements IImpressionsCacheSync { /** * Pop the collected data, used as payload for posting. */ - pop(toMerge?: ImpressionDTO[]) { + pop(toMerge?: SplitIO.ImpressionDTO[]) { const data = this.queue; this.clear(); return toMerge ? toMerge.concat(data) : data; diff --git a/src/storages/inRedis/EventsCacheInRedis.ts b/src/storages/inRedis/EventsCacheInRedis.ts index ecd14a32..c9b84459 100644 --- a/src/storages/inRedis/EventsCacheInRedis.ts +++ b/src/storages/inRedis/EventsCacheInRedis.ts @@ -1,6 +1,6 @@ import { IEventsCacheAsync } from '../types'; import { IMetadata } from '../../dtos/types'; -import { SplitIO } from '../../types'; +import SplitIO from '../../../types/splitio'; import { ILogger } from '../../logger/types'; import { LOG_PREFIX } from './constants'; import { StoredEventWithMetadata } from '../../sync/submitters/types'; diff --git a/src/storages/inRedis/ImpressionsCacheInRedis.ts b/src/storages/inRedis/ImpressionsCacheInRedis.ts index 4ac0acaa..15d02508 100644 --- a/src/storages/inRedis/ImpressionsCacheInRedis.ts +++ b/src/storages/inRedis/ImpressionsCacheInRedis.ts @@ -1,6 +1,6 @@ import { IImpressionsCacheAsync } from '../types'; import { IMetadata } from '../../dtos/types'; -import { ImpressionDTO } from '../../types'; +import SplitIO from '../../../types/splitio'; import { StoredImpressionWithMetadata } from '../../sync/submitters/types'; import { ILogger } from '../../logger/types'; import { impressionsToJSON } from '../utils'; @@ -22,7 +22,7 @@ export class ImpressionsCacheInRedis implements IImpressionsCacheAsync { this.metadata = metadata; } - track(impressions: ImpressionDTO[]): Promise { // @ts-ignore + track(impressions: SplitIO.ImpressionDTO[]): Promise { // @ts-ignore return this.redis.rpush( this.key, impressionsToJSON(impressions, this.metadata), diff --git a/src/storages/pluggable/EventsCachePluggable.ts b/src/storages/pluggable/EventsCachePluggable.ts index d30d43b7..1fa99030 100644 --- a/src/storages/pluggable/EventsCachePluggable.ts +++ b/src/storages/pluggable/EventsCachePluggable.ts @@ -1,6 +1,6 @@ import { IPluggableStorageWrapper, IEventsCacheAsync } from '../types'; import { IMetadata } from '../../dtos/types'; -import { SplitIO } from '../../types'; +import SplitIO from '../../../types/splitio'; import { ILogger } from '../../logger/types'; import { LOG_PREFIX } from './constants'; import { StoredEventWithMetadata } from '../../sync/submitters/types'; diff --git a/src/storages/pluggable/ImpressionsCachePluggable.ts b/src/storages/pluggable/ImpressionsCachePluggable.ts index dede350d..e7dadab6 100644 --- a/src/storages/pluggable/ImpressionsCachePluggable.ts +++ b/src/storages/pluggable/ImpressionsCachePluggable.ts @@ -1,6 +1,6 @@ import { IPluggableStorageWrapper, IImpressionsCacheAsync } from '../types'; import { IMetadata } from '../../dtos/types'; -import { ImpressionDTO } from '../../types'; +import SplitIO from '../../../types/splitio'; import { StoredImpressionWithMetadata } from '../../sync/submitters/types'; import { ILogger } from '../../logger/types'; import { impressionsToJSON } from '../utils'; @@ -25,7 +25,7 @@ export class ImpressionsCachePluggable implements IImpressionsCacheAsync { * @returns A promise that is resolved if the push operation succeeded * or rejected if the wrapper operation fails. */ - track(impressions: ImpressionDTO[]): Promise { + track(impressions: SplitIO.ImpressionDTO[]): Promise { return this.wrapper.pushItems( this.key, impressionsToJSON(impressions, this.metadata) diff --git a/src/storages/types.ts b/src/storages/types.ts index ea15c293..8e1516e9 100644 --- a/src/storages/types.ts +++ b/src/storages/types.ts @@ -1,7 +1,8 @@ +import SplitIO from '../../types/splitio'; import { MaybeThenable, ISplit, IMySegmentsResponse } from '../dtos/types'; import { MySegmentsData } from '../sync/polling/types'; import { EventDataType, HttpErrors, HttpLatencies, ImpressionDataType, LastSync, Method, MethodExceptions, MethodLatencies, MultiMethodExceptions, MultiMethodLatencies, MultiConfigs, OperationType, StoredEventWithMetadata, StoredImpressionWithMetadata, StreamingEvent, UniqueKeysPayloadCs, UniqueKeysPayloadSs, TelemetryUsageStatsPayload, UpdatesFromSSEEnum } from '../sync/submitters/types'; -import { SplitIO, ImpressionDTO, ISettings } from '../types'; +import { ISettings } from '../types'; /** * Interface of a pluggable storage wrapper. @@ -283,7 +284,7 @@ export interface ISegmentsCacheAsync extends ISegmentsCacheBase { export interface IImpressionsCacheBase { // Used by impressions tracker, in DEBUG and OPTIMIZED impression modes, to push impressions into the storage. - track(data: ImpressionDTO[]): MaybeThenable + track(data: SplitIO.ImpressionDTO[]): MaybeThenable } export interface IEventsCacheBase { @@ -314,8 +315,8 @@ export interface IRecorderCacheSync { pop(toMerge?: T): T } -export interface IImpressionsCacheSync extends IImpressionsCacheBase, IRecorderCacheSync { - track(data: ImpressionDTO[]): void +export interface IImpressionsCacheSync extends IImpressionsCacheBase, IRecorderCacheSync { + track(data: SplitIO.ImpressionDTO[]): void /* Registers callback for full queue */ setOnFullQueueCb(cb: () => void): void } @@ -348,7 +349,7 @@ export interface IRecorderCacheAsync { export interface IImpressionsCacheAsync extends IImpressionsCacheBase, IRecorderCacheAsync { // Consumer API method, used by impressions tracker (in standalone and consumer modes) to push data into. // The result promise can reject. - track(data: ImpressionDTO[]): Promise + track(data: SplitIO.ImpressionDTO[]): Promise } export interface IEventsCacheAsync extends IEventsCacheBase, IRecorderCacheAsync { @@ -499,14 +500,13 @@ export interface IStorageFactoryParams { onReadyCb: (error?: any) => void, } -export type StorageType = 'MEMORY' | 'LOCALSTORAGE' | 'REDIS' | 'PLUGGABLE'; -export type IStorageSyncFactory = { - readonly type: StorageType, +export type IStorageSyncFactory = SplitIO.StorageSyncFactory & { + readonly type: SplitIO.StorageType, (params: IStorageFactoryParams): IStorageSync } -export type IStorageAsyncFactory = { - type: StorageType, +export type IStorageAsyncFactory = SplitIO.StorageAsyncFactory & { + readonly type: SplitIO.StorageType, (params: IStorageFactoryParams): IStorageAsync } diff --git a/src/storages/utils.ts b/src/storages/utils.ts index 2bf236e3..2963bbc5 100644 --- a/src/storages/utils.ts +++ b/src/storages/utils.ts @@ -2,7 +2,8 @@ import { IMetadata } from '../dtos/types'; import { Method, StoredImpressionWithMetadata } from '../sync/submitters/types'; -import { ImpressionDTO, ISettings } from '../types'; +import { ISettings } from '../types'; +import SplitIO from '../../types/splitio'; import { UNKNOWN } from '../utils/constants'; import { MAX_LATENCY_BUCKET_COUNT } from './inMemory/TelemetryCacheInMemory'; import { METHOD_NAMES } from './KeyBuilderSS'; @@ -16,7 +17,7 @@ export function metadataBuilder(settings: Pick } // Converts impressions to be stored in Redis or pluggable storage. -export function impressionsToJSON(impressions: ImpressionDTO[], metadata: IMetadata): string[] { +export function impressionsToJSON(impressions: SplitIO.ImpressionDTO[], metadata: IMetadata): string[] { return impressions.map(impression => { const impressionWithMetadata: StoredImpressionWithMetadata = { m: metadata, diff --git a/src/sync/offline/splitsParser/__tests__/splitsParserFromSettings.spec.ts b/src/sync/offline/splitsParser/__tests__/splitsParserFromSettings.spec.ts index 08e42996..20e1a17f 100644 --- a/src/sync/offline/splitsParser/__tests__/splitsParserFromSettings.spec.ts +++ b/src/sync/offline/splitsParser/__tests__/splitsParserFromSettings.spec.ts @@ -1,4 +1,4 @@ -import { SplitIO } from '../../../../types'; +import SplitIO from '../../../../../types/splitio'; import { splitsParserFromSettingsFactory } from '../splitsParserFromSettings'; const FEATURE_ON = { conditions: [{ conditionType: 'ROLLOUT', label: 'default rule', matcherGroup: { combiner: 'AND', matchers: [{ keySelector: null, matcherType: 'ALL_KEYS', negate: false }] }, partitions: [{ size: 100, treatment: 'on' }] }], configurations: {}, trafficTypeName: 'localhost' }; diff --git a/src/sync/offline/splitsParser/splitsParserFromSettings.ts b/src/sync/offline/splitsParser/splitsParserFromSettings.ts index e94d3b07..d0cafc9f 100644 --- a/src/sync/offline/splitsParser/splitsParserFromSettings.ts +++ b/src/sync/offline/splitsParser/splitsParserFromSettings.ts @@ -1,5 +1,5 @@ import { ISplitPartial } from '../../../dtos/types'; -import { ISettings, SplitIO } from '../../../types'; +import SplitIO from '../../../../types/splitio'; import { isObject, forOwn, merge } from '../../../utils/lang'; import { parseCondition } from './parseCondition'; @@ -41,7 +41,7 @@ export function splitsParserFromSettingsFactory() { * * @param settings validated object with mocked features mapping. */ - return function splitsParserFromSettings(settings: Pick): false | Record { + return function splitsParserFromSettings(settings: Pick): false | Record { const features = settings.features as SplitIO.MockedFeaturesMap || {}; if (!mockUpdated(features)) return false; diff --git a/src/sync/streaming/types.ts b/src/sync/streaming/types.ts index 0684c099..ec80781e 100644 --- a/src/sync/streaming/types.ts +++ b/src/sync/streaming/types.ts @@ -1,7 +1,7 @@ import { IMembershipMSUpdateData, IMembershipLSUpdateData, ISegmentUpdateData, ISplitUpdateData, ISplitKillData, INotificationData } from './SSEHandler/types'; import { ITask } from '../types'; import { IMySegmentsSyncTask } from '../polling/types'; -import { IEventEmitter } from '../../types'; +import SplitIO from '../../../types/splitio'; import { ControlType } from './constants'; // Internal SDK events, subscribed by SyncManager and PushManager @@ -34,7 +34,7 @@ type IParsedData = * EventEmitter used as Feedback Loop between the SyncManager and PushManager, * where the latter pushes messages and the former consumes it */ -export interface IPushEventEmitter extends IEventEmitter { +export interface IPushEventEmitter extends SplitIO.IEventEmitter { once(event: T, listener: (parsedData: IParsedData) => void): this; on(event: T, listener: (parsedData: IParsedData) => void): this; emit(event: T, parsedData?: IParsedData): boolean; diff --git a/src/sync/submitters/impressionsSubmitter.ts b/src/sync/submitters/impressionsSubmitter.ts index e54b5f1f..193e2703 100644 --- a/src/sync/submitters/impressionsSubmitter.ts +++ b/src/sync/submitters/impressionsSubmitter.ts @@ -1,5 +1,5 @@ import { groupBy, forOwn } from '../../utils/lang'; -import { ImpressionDTO } from '../../types'; +import SplitIO from '../../../types/splitio'; import { submitterFactory } from './submitter'; import { ImpressionsPayload } from './types'; import { SUBMITTERS_PUSH_FULL_QUEUE } from '../../logger/constants'; @@ -10,7 +10,7 @@ const DATA_NAME = 'impressions'; /** * Converts `impressions` data from cache into request payload. */ -export function fromImpressionsCollector(sendLabels: boolean, data: ImpressionDTO[]): ImpressionsPayload { +export function fromImpressionsCollector(sendLabels: boolean, data: SplitIO.ImpressionDTO[]): ImpressionsPayload { let groupedByFeature = groupBy(data, 'feature'); let dto: ImpressionsPayload = []; diff --git a/src/sync/submitters/telemetrySubmitter.ts b/src/sync/submitters/telemetrySubmitter.ts index a2289e08..7a2e2ee7 100644 --- a/src/sync/submitters/telemetrySubmitter.ts +++ b/src/sync/submitters/telemetrySubmitter.ts @@ -3,7 +3,8 @@ import { submitterFactory, firstPushWindowDecorator } from './submitter'; import { TelemetryConfigStatsPayload, TelemetryConfigStats } from './types'; import { CONSUMER_MODE, CONSUMER_ENUM, STANDALONE_MODE, CONSUMER_PARTIAL_MODE, STANDALONE_ENUM, CONSUMER_PARTIAL_ENUM, OPTIMIZED, DEBUG, NONE, DEBUG_ENUM, OPTIMIZED_ENUM, NONE_ENUM, CONSENT_GRANTED, CONSENT_DECLINED, CONSENT_UNKNOWN } from '../../utils/constants'; import { SDK_READY, SDK_READY_FROM_CACHE } from '../../readiness/constants'; -import { ConsentStatus, ISettings, SDKMode, SplitIO } from '../../types'; +import { ISettings } from '../../types'; +import SplitIO from '../../../types/splitio'; import { base } from '../../utils/settingsValidation'; import { usedKeysMap } from '../../utils/inputValidation/apiKey'; import { timer } from '../../utils/timeTracker/timer'; @@ -27,7 +28,7 @@ const USER_CONSENT_MAP = { [CONSENT_UNKNOWN]: 1, [CONSENT_GRANTED]: 2, [CONSENT_DECLINED]: 3 -} as Record; +} as Record; function getActiveFactories() { return Object.keys(usedKeysMap).length; @@ -51,7 +52,7 @@ function getTelemetryFlagSetsStats(splitFiltersValidation: ISplitFiltersValidati return { flagSetsTotal, flagSetsIgnored }; } -export function getTelemetryConfigStats(mode: SDKMode, storageType: string): TelemetryConfigStats { +export function getTelemetryConfigStats(mode: SplitIO.SDKMode, storageType: string): TelemetryConfigStats { return { oM: OPERATION_MODE_MAP[mode], // @ts-ignore lower case of storage type st: storageType.toLowerCase(), diff --git a/src/sync/submitters/types.ts b/src/sync/submitters/types.ts index d1629c34..f3b93c4d 100644 --- a/src/sync/submitters/types.ts +++ b/src/sync/submitters/types.ts @@ -1,6 +1,6 @@ /* eslint-disable no-use-before-define */ import { IMetadata } from '../../dtos/types'; -import { SplitIO } from '../../types'; +import SplitIO from '../../../types/splitio'; import { ISyncTask } from '../types'; export type ImpressionsPayload = { diff --git a/src/trackers/__tests__/eventTracker.spec.ts b/src/trackers/__tests__/eventTracker.spec.ts index ec0e3f17..95ce3e33 100644 --- a/src/trackers/__tests__/eventTracker.spec.ts +++ b/src/trackers/__tests__/eventTracker.spec.ts @@ -1,4 +1,4 @@ -import { SplitIO } from '../../types'; +import SplitIO from '../../../types/splitio'; import { QUEUED } from '../../utils/constants'; import { fullSettings } from '../../utils/settingsValidation/__tests__/settings.mocks'; import { eventTrackerFactory } from '../eventTracker'; diff --git a/src/trackers/__tests__/impressionsTracker.spec.ts b/src/trackers/__tests__/impressionsTracker.spec.ts index 06f14fc2..08ec9f71 100644 --- a/src/trackers/__tests__/impressionsTracker.spec.ts +++ b/src/trackers/__tests__/impressionsTracker.spec.ts @@ -2,7 +2,7 @@ import { impressionsTrackerFactory } from '../impressionsTracker'; import { ImpressionCountsCacheInMemory } from '../../storages/inMemory/ImpressionCountsCacheInMemory'; import { impressionObserverSSFactory } from '../impressionObserver/impressionObserverSS'; import { impressionObserverCSFactory } from '../impressionObserver/impressionObserverCS'; -import { ImpressionDTO } from '../../types'; +import SplitIO from '../../../types/splitio'; import { fullSettings } from '../../utils/settingsValidation/__tests__/settings.mocks'; import { strategyDebugFactory } from '../strategy/strategyDebug'; import { strategyOptimizedFactory } from '../strategy/strategyOptimized'; @@ -60,13 +60,13 @@ describe('Impressions Tracker', () => { const imp1 = { feature: '10', - } as ImpressionDTO; + } as SplitIO.ImpressionDTO; const imp2 = { feature: '20', - } as ImpressionDTO; + } as SplitIO.ImpressionDTO; const imp3 = { feature: '30', - } as ImpressionDTO; + } as SplitIO.ImpressionDTO; expect(fakeImpressionsCache.track).not.toBeCalled(); // cache method should not be called by just creating a tracker @@ -80,10 +80,10 @@ describe('Impressions Tracker', () => { const fakeImpression = { feature: 'impression' - } as ImpressionDTO; + } as SplitIO.ImpressionDTO; const fakeImpression2 = { feature: 'impression_2' - } as ImpressionDTO; + } as SplitIO.ImpressionDTO; const fakeAttributes = { fake: 'attributes' }; @@ -127,7 +127,7 @@ describe('Impressions Tracker', () => { time: 0, bucketingKey: 'impr_bucketing_2', label: 'default rule' - } as ImpressionDTO; + } as SplitIO.ImpressionDTO; const impression2 = { feature: 'qc_team_2', keyName: 'marcio@split.io', @@ -135,7 +135,7 @@ describe('Impressions Tracker', () => { time: 0, bucketingKey: 'impr_bucketing_2', label: 'default rule' - } as ImpressionDTO; + } as SplitIO.ImpressionDTO; const impression3 = { feature: 'qc_team', keyName: 'marcio@split.io', @@ -143,7 +143,7 @@ describe('Impressions Tracker', () => { time: 0, bucketingKey: 'impr_bucketing_2', label: 'default rule' - } as ImpressionDTO; + } as SplitIO.ImpressionDTO; test('Should track 3 impressions with Previous Time.', () => { impression.time = impression2.time = 123456789; diff --git a/src/trackers/eventTracker.ts b/src/trackers/eventTracker.ts index 8efcf413..473d519c 100644 --- a/src/trackers/eventTracker.ts +++ b/src/trackers/eventTracker.ts @@ -2,7 +2,8 @@ import { objectAssign } from '../utils/lang/objectAssign'; import { thenable } from '../utils/promise/thenable'; import { IEventsCacheBase, ITelemetryCacheAsync, ITelemetryCacheSync } from '../storages/types'; import { IEventsHandler, IEventTracker } from './types'; -import { ISettings, SplitIO } from '../types'; +import { ISettings } from '../types'; +import SplitIO from '../../types/splitio'; import { EVENTS_TRACKER_SUCCESS, ERROR_EVENTS_TRACKER } from '../logger/constants'; import { CONSENT_DECLINED, DROPPED, QUEUED } from '../utils/constants'; import { isConsumerMode } from '../utils/settingsValidation/mode'; diff --git a/src/trackers/impressionObserver/ImpressionObserver.ts b/src/trackers/impressionObserver/ImpressionObserver.ts index 377a4f08..b9765b47 100644 --- a/src/trackers/impressionObserver/ImpressionObserver.ts +++ b/src/trackers/impressionObserver/ImpressionObserver.ts @@ -1,17 +1,17 @@ -import { ImpressionDTO } from '../../types'; +import SplitIO from '../../../types/splitio'; import { LRUCache } from '../../utils/LRUCache'; import { IImpressionObserver } from './types'; export class ImpressionObserver implements IImpressionObserver { private cache: LRUCache; - private hasher: (impression: ImpressionDTO) => K; + private hasher: (impression: SplitIO.ImpressionDTO) => K; - constructor(size: number, hasher: (impression: ImpressionDTO) => K) { + constructor(size: number, hasher: (impression: SplitIO.ImpressionDTO) => K) { this.cache = new LRUCache(size); this.hasher = hasher; } - testAndSet(impression: ImpressionDTO) { + testAndSet(impression: SplitIO.ImpressionDTO) { const hash = this.hasher(impression); const previous = this.cache.get(hash); this.cache.set(hash, impression.time); diff --git a/src/trackers/impressionObserver/__tests__/testUtils.ts b/src/trackers/impressionObserver/__tests__/testUtils.ts index a41b9f24..3ede736c 100644 --- a/src/trackers/impressionObserver/__tests__/testUtils.ts +++ b/src/trackers/impressionObserver/__tests__/testUtils.ts @@ -1,6 +1,6 @@ -import { ImpressionDTO } from '../../../types'; +import SplitIO from '../../../../types/splitio'; -export function generateImpressions(count: number): ImpressionDTO[] { +export function generateImpressions(count: number): SplitIO.ImpressionDTO[] { const impressions = []; for (let i = 0; i < count; i++) { impressions.push({ diff --git a/src/trackers/impressionObserver/buildKey.ts b/src/trackers/impressionObserver/buildKey.ts index 8adedb82..8479e4ac 100644 --- a/src/trackers/impressionObserver/buildKey.ts +++ b/src/trackers/impressionObserver/buildKey.ts @@ -1,5 +1,5 @@ -import { ImpressionDTO } from '../../types'; +import SplitIO from '../../../types/splitio'; -export function buildKey(impression: ImpressionDTO) { +export function buildKey(impression: SplitIO.ImpressionDTO) { return `${impression.keyName}:${impression.feature}:${impression.treatment}:${impression.label}:${impression.changeNumber}`; } diff --git a/src/trackers/impressionObserver/impressionObserverCS.ts b/src/trackers/impressionObserver/impressionObserverCS.ts index 712d8738..de69455c 100644 --- a/src/trackers/impressionObserver/impressionObserverCS.ts +++ b/src/trackers/impressionObserver/impressionObserverCS.ts @@ -1,9 +1,9 @@ import { ImpressionObserver } from './ImpressionObserver'; import { hash } from '../../utils/murmur3/murmur3'; import { buildKey } from './buildKey'; -import { ImpressionDTO } from '../../types'; +import SplitIO from '../../../types/splitio'; -export function hashImpression32(impression: ImpressionDTO) { +export function hashImpression32(impression: SplitIO.ImpressionDTO) { return hash(buildKey(impression)); } diff --git a/src/trackers/impressionObserver/impressionObserverSS.ts b/src/trackers/impressionObserver/impressionObserverSS.ts index 7a81279f..23efeec4 100644 --- a/src/trackers/impressionObserver/impressionObserverSS.ts +++ b/src/trackers/impressionObserver/impressionObserverSS.ts @@ -1,9 +1,9 @@ import { ImpressionObserver } from './ImpressionObserver'; import { hash128 } from '../../utils/murmur3/murmur3_128_x86'; import { buildKey } from './buildKey'; -import { ImpressionDTO } from '../../types'; +import SplitIO from '../../../types/splitio'; -export function hashImpression128(impression: ImpressionDTO) { +export function hashImpression128(impression: SplitIO.ImpressionDTO) { return hash128(buildKey(impression)); } diff --git a/src/trackers/impressionObserver/types.ts b/src/trackers/impressionObserver/types.ts index 16aeafb3..8c4f2a33 100644 --- a/src/trackers/impressionObserver/types.ts +++ b/src/trackers/impressionObserver/types.ts @@ -1,5 +1,5 @@ -import { ImpressionDTO } from '../../types'; +import SplitIO from '../../../types/splitio'; export interface IImpressionObserver { - testAndSet(impression: ImpressionDTO): number | undefined + testAndSet(impression: SplitIO.ImpressionDTO): number | undefined } diff --git a/src/trackers/impressionsTracker.ts b/src/trackers/impressionsTracker.ts index dcf998fc..ef58908b 100644 --- a/src/trackers/impressionsTracker.ts +++ b/src/trackers/impressionsTracker.ts @@ -2,9 +2,10 @@ import { objectAssign } from '../utils/lang/objectAssign'; import { thenable } from '../utils/promise/thenable'; import { IImpressionsCacheBase, ITelemetryCacheSync, ITelemetryCacheAsync } from '../storages/types'; import { IImpressionsHandler, IImpressionsTracker, IStrategy } from './types'; -import { SplitIO, ImpressionDTO, ISettings } from '../types'; +import { ISettings } from '../types'; import { IMPRESSIONS_TRACKER_SUCCESS, ERROR_IMPRESSIONS_TRACKER, ERROR_IMPRESSIONS_LISTENER } from '../logger/constants'; import { CONSENT_DECLINED, DEDUPED, QUEUED } from '../utils/constants'; +import SplitIO from '../../types/splitio'; /** * Impressions tracker stores impressions in cache and pass them to the listener and integrations manager if provided. @@ -27,7 +28,7 @@ export function impressionsTrackerFactory( const { log, impressionListener, runtime: { ip, hostname }, version } = settings; return { - track(impressions: ImpressionDTO[], attributes?: SplitIO.Attributes) { + track(impressions: SplitIO.ImpressionDTO[], attributes?: SplitIO.Attributes) { if (settings.userConsent === CONSENT_DECLINED) return; const impressionsCount = impressions.length; diff --git a/src/trackers/strategy/__tests__/testUtils.ts b/src/trackers/strategy/__tests__/testUtils.ts index 121ec69c..9c8fcd68 100644 --- a/src/trackers/strategy/__tests__/testUtils.ts +++ b/src/trackers/strategy/__tests__/testUtils.ts @@ -1,4 +1,4 @@ -import { ImpressionDTO } from '../../../types'; +import SplitIO from '../../../../types/splitio'; export const impression1 = { feature: 'qc_team', @@ -7,7 +7,7 @@ export const impression1 = { time: Date.now(), bucketingKey: 'impr_bucketing_2', label: 'default rule' -} as ImpressionDTO; +} as SplitIO.ImpressionDTO; export const impression2 = { feature: 'qc_team_2', keyName: 'emma@split.io', @@ -15,4 +15,4 @@ export const impression2 = { time: Date.now(), bucketingKey: 'impr_bucketing_2', label: 'default rule' -} as ImpressionDTO; +} as SplitIO.ImpressionDTO; diff --git a/src/trackers/strategy/strategyDebug.ts b/src/trackers/strategy/strategyDebug.ts index c6d29e8d..e8341152 100644 --- a/src/trackers/strategy/strategyDebug.ts +++ b/src/trackers/strategy/strategyDebug.ts @@ -1,4 +1,4 @@ -import { ImpressionDTO } from '../../types'; +import SplitIO from '../../../types/splitio'; import { IImpressionObserver } from '../impressionObserver/types'; import { IStrategy } from '../types'; @@ -13,7 +13,7 @@ export function strategyDebugFactory( ): IStrategy { return { - process(impressions: ImpressionDTO[]) { + process(impressions: SplitIO.ImpressionDTO[]) { impressions.forEach((impression) => { // Adds previous time if it is enabled impression.pt = impressionsObserver.testAndSet(impression); diff --git a/src/trackers/strategy/strategyNone.ts b/src/trackers/strategy/strategyNone.ts index 0a2e75ef..54223de7 100644 --- a/src/trackers/strategy/strategyNone.ts +++ b/src/trackers/strategy/strategyNone.ts @@ -1,5 +1,5 @@ import { IImpressionCountsCacheBase } from '../../storages/types'; -import { ImpressionDTO } from '../../types'; +import SplitIO from '../../../types/splitio'; import { IStrategy, IUniqueKeysTracker } from '../types'; /** @@ -15,7 +15,7 @@ export function strategyNoneFactory( ): IStrategy { return { - process(impressions: ImpressionDTO[]) { + process(impressions: SplitIO.ImpressionDTO[]) { impressions.forEach((impression) => { const now = Date.now(); // Increments impression counter per featureName diff --git a/src/trackers/strategy/strategyOptimized.ts b/src/trackers/strategy/strategyOptimized.ts index ce1a9857..d94e2ab2 100644 --- a/src/trackers/strategy/strategyOptimized.ts +++ b/src/trackers/strategy/strategyOptimized.ts @@ -1,5 +1,5 @@ import { IImpressionCountsCacheBase } from '../../storages/types'; -import { ImpressionDTO } from '../../types'; +import SplitIO from '../../../types/splitio'; import { truncateTimeFrame } from '../../utils/time'; import { IImpressionObserver } from '../impressionObserver/types'; import { IStrategy } from '../types'; @@ -17,8 +17,8 @@ export function strategyOptimizedFactory( ): IStrategy { return { - process(impressions: ImpressionDTO[]) { - const impressionsToStore: ImpressionDTO[] = []; + process(impressions: SplitIO.ImpressionDTO[]) { + const impressionsToStore: SplitIO.ImpressionDTO[] = []; impressions.forEach((impression) => { impression.pt = impressionsObserver.testAndSet(impression); diff --git a/src/trackers/types.ts b/src/trackers/types.ts index dfb01c5e..db6d5bcb 100644 --- a/src/trackers/types.ts +++ b/src/trackers/types.ts @@ -1,4 +1,4 @@ -import { SplitIO, ImpressionDTO } from '../types'; +import SplitIO from '../../types/splitio'; import { StreamingEventType, Method, OperationType, UpdatesFromSSEEnum } from '../sync/submitters/types'; import { IEventsCacheBase } from '../storages/types'; import { NetworkError } from '../services/types'; @@ -18,7 +18,7 @@ export interface IImpressionsHandler { } export interface IImpressionsTracker { - track(impressions: ImpressionDTO[], attributes?: SplitIO.Attributes): void + track(impressions: SplitIO.ImpressionDTO[], attributes?: SplitIO.Attributes): void } /** Telemetry tracker */ @@ -71,11 +71,11 @@ export interface IUniqueKeysTracker { } export interface IStrategyResult { - impressionsToStore: ImpressionDTO[], - impressionsToListener: ImpressionDTO[], + impressionsToStore: SplitIO.ImpressionDTO[], + impressionsToListener: SplitIO.ImpressionDTO[], deduped: number } export interface IStrategy { - process(impressions: ImpressionDTO[]): IStrategyResult + process(impressions: SplitIO.ImpressionDTO[]): IStrategyResult } diff --git a/src/types.ts b/src/types.ts index d221df57..ef91321c 100644 --- a/src/types.ts +++ b/src/types.ts @@ -1,410 +1,25 @@ -/* eslint-disable no-use-before-define */ +import SplitIO from '../types/splitio'; import { ISplitFiltersValidation } from './dtos/types'; -import { IIntegration, IIntegrationFactoryParams } from './integrations/types'; import { ILogger } from './logger/types'; -import { IStorageFactoryParams, IStorageSync, IStorageAsync, IStorageSyncFactory, IStorageAsyncFactory } from './storages/types'; /** - * Reduced version of NodeJS.EventEmitter interface with the minimal methods used by the SDK - * @see {@link https://nodejs.org/api/events.html} + * SplitIO.ISettings interface extended with private properties for internal use */ -export interface IEventEmitter { - addListener(event: string, listener: (...args: any[]) => void): this; - on(event: string, listener: (...args: any[]) => void): this - once(event: string, listener: (...args: any[]) => void): this - removeListener(event: string, listener: (...args: any[]) => void): this; - off(event: string, listener: (...args: any[]) => void): this; - removeAllListeners(event?: string): this - emit(event: string, ...args: any[]): boolean -} - -/** - * impression DTO generated by the Sdk client when processing evaluations - */ -export type ImpressionDTO = { - feature: string, - keyName: string, - treatment: string, - time: number, - bucketingKey?: string, - label: string, - changeNumber: number, - pt?: number, -} - -/** splitio.d.ts */ - -/** - * @typedef {Object} EventConsts - * @property {string} SDK_READY The ready event. - * @property {string} SDK_READY_FROM_CACHE The ready event when fired with cached data. - * @property {string} SDK_READY_TIMED_OUT The timeout event. - * @property {string} SDK_UPDATE The update event. - */ -type EventConsts = { - SDK_READY: 'init::ready', - SDK_READY_FROM_CACHE: 'init::cache-ready', - SDK_READY_TIMED_OUT: 'init::timeout', - SDK_UPDATE: 'state::update' -}; -/** - * SDK Modes. - * @typedef {string} SDKMode - */ -export type SDKMode = 'standalone' | 'consumer' | 'localhost' | 'consumer_partial'; -/** - * User consent status. - * @typedef {string} ConsentStatus - */ -export type ConsentStatus = 'GRANTED' | 'DECLINED' | 'UNKNOWN'; -/** - * Settings interface. This is a representation of the settings the SDK expose, that's why - * most of it's props are readonly. Only features should be rewritten when localhost mode is active. - * @interface ISettings - * - * NOTE: same ISettings interface from public type declarations extended with private properties. - */ -export interface ISettings { - readonly core: { - authorizationKey: string, - key: SplitIO.SplitKey, - labelsEnabled: boolean, - IPAddressesEnabled: boolean - }, - readonly mode: SDKMode, - readonly scheduler: { - featuresRefreshRate: number, - impressionsRefreshRate: number, - impressionsQueueSize: number, - /** - * @deprecated - */ - metricsRefreshRate?: number, - telemetryRefreshRate: number, - segmentsRefreshRate: number, - offlineRefreshRate: number, - eventsPushRate: number, - eventsQueueSize: number, - pushRetryBackoffBase: number - }, - readonly startup: { - readyTimeout: number, - requestTimeoutBeforeReady: number, - retriesOnFailureBeforeReady: number, - eventsFirstPushWindow: number - }, - readonly storage: IStorageSyncFactory | IStorageAsyncFactory, - readonly integrations: Array<{ - readonly type: string, - (params: IIntegrationFactoryParams): IIntegration | void - }>, - readonly urls: { - events: string, - sdk: string, - auth: string, - streaming: string, - telemetry: string - }, - readonly debug: boolean | LogLevel | ILogger, - readonly version: string, - features: SplitIO.MockedFeaturesFilePath | SplitIO.MockedFeaturesMap, - readonly streamingEnabled: boolean, - readonly sync: { - splitFilters: SplitIO.SplitFilter[], - impressionsMode: SplitIO.ImpressionsMode, - __splitFiltersValidation: ISplitFiltersValidation, - enabled: boolean, - flagSpecVersion: string, - requestOptions?: { - getHeaderOverrides?: (context: { headers: Record }) => Record - } - }, +export interface ISettings extends SplitIO.ISettings { + readonly sync: SplitIO.ISettings['sync'] & { + __splitFiltersValidation: ISplitFiltersValidation; + }; readonly runtime: { - ip: string | false - hostname: string | false - }, - readonly log: ILogger - readonly impressionListener?: unknown - readonly userConsent?: ConsentStatus -} -/** - * Log levels. - * @typedef {string} LogLevel - */ -export type LogLevel = 'DEBUG' | 'INFO' | 'WARN' | 'ERROR' | 'NONE'; -/** - * Logger API - * @interface ILoggerAPI - */ -export interface ILoggerAPI { - /** - * Enables SDK logging to the console. - * @function enable - * @returns {void} - */ - enable(): void, - /** - * Disables SDK logging. - * @function disable - * @returns {void} - */ - disable(): void, - /** - * Sets a log level for the SDK logs. - * @function setLogLevel - * @returns {void} - */ - setLogLevel(logLevel: LogLevel): void, - /** - * Log level constants. Use this to pass them to setLogLevel function. - */ - LogLevel: { - [level: string]: LogLevel - } -} -/** - * Common settings between Browser and NodeJS settings interface. - * @interface ISharedSettings - */ -interface ISharedSettings { - /** - * Whether the logger should be enabled or disabled by default. - * @property {Boolean} debug - * @default false - */ - debug?: boolean, - /** - * The impression listener, which is optional. Whatever you provide here needs to comply with the SplitIO.IImpressionListener interface, - * which will check for the logImpression method. - * @property {IImpressionListener} impressionListener - * @default undefined - */ - impressionListener?: SplitIO.IImpressionListener, - /** - * Boolean flag to enable the streaming service as default synchronization mechanism. In the event of any issue with streaming, - * the SDK would fallback to the polling mechanism. If false, the SDK would poll for changes as usual without attempting to use streaming. - * @property {boolean} streamingEnabled - * @default true - */ - streamingEnabled?: boolean, - /** - * SDK synchronization settings. - * @property {Object} sync - */ - sync?: { - /** - * List of feature flag filters. These filters are used to fetch a subset of the feature flag definitions in your environment, in order to reduce the delay of the SDK to be ready. - * This configuration is only meaningful when the SDK is working in "standalone" mode. - * - * Example: - * `splitFilter: [ - * { type: 'byName', values: ['my_feature_flag_1', 'my_feature_flag_2'] }, // will fetch feature flags named 'my_feature_flag_1' and 'my_feature_flag_2' - * ]` - * @property {SplitIO.SplitFilter[]} splitFilters - */ - splitFilters?: SplitIO.SplitFilter[] - /** - * Impressions Collection Mode. Option to determine how impressions are going to be sent to Split servers. - * Possible values are 'DEBUG' and 'OPTIMIZED'. - * - DEBUG: will send all the impressions generated (recommended only for debugging purposes). - * - OPTIMIZED: will send unique impressions to Split servers avoiding a considerable amount of traffic that duplicated impressions could generate. - * @property {String} impressionsMode - * @default 'OPTIMIZED' - */ - impressionsMode?: SplitIO.ImpressionsMode, - /** - * Enables synchronization. - * @property {boolean} enabled - */ - enabled?: boolean, - requestOptions?: { - getHeaderOverrides?: (context: { headers: Record }) => Record - }, - } -} -/** - * Common settings interface for SDK instances on NodeJS. - * @interface INodeBasicSettings - * @extends ISharedSettings - */ -interface INodeBasicSettings extends ISharedSettings { - /** - * SDK Startup settings for NodeJS. - * @property {Object} startup - */ - startup?: { - /** - * Maximum amount of time used before notify a timeout. - * @property {number} readyTimeout - * @default 15 - */ - readyTimeout?: number, - /** - * Time to wait for a request before the SDK is ready. If this time expires, JS Sdk will retry 'retriesOnFailureBeforeReady' times before notifying its failure to be 'ready'. - * @property {number} requestTimeoutBeforeReady - * @default 15 - */ - requestTimeoutBeforeReady?: number, - /** - * How many quick retries we will do while starting up the SDK. - * @property {number} retriesOnFailureBeforeReady - * @default 1 - */ - retriesOnFailureBeforeReady?: number, - /** - * For SDK posts the queued events data in bulks with a given rate, but the first push window is defined separately, - * to better control on browsers. This number defines that window before the first events push. - * - * @property {number} eventsFirstPushWindow - * @default 0 - */ - eventsFirstPushWindow?: number, - }, - /** - * SDK scheduler settings. - * @property {Object} scheduler - */ - scheduler?: { - /** - * The SDK polls Split servers for changes to feature roll-out plans. This parameter controls this polling period in seconds. - * @property {number} featuresRefreshRate - * @default 5 - */ - featuresRefreshRate?: number, - /** - * The SDK sends information on who got what treatment at what time back to Split servers to power analytics. This parameter controls how often this data is sent to Split servers. The parameter should be in seconds. - * @property {number} impressionsRefreshRate - * @default 300 - */ - impressionsRefreshRate?: number, - /** - * The maximum number of impression items we want to queue. If we queue more values, it will trigger a flush and reset the timer. - * If you use a 0 here, the queue will have no maximum size. - * @property {number} impressionsQueueSize - * @default 30000 - */ - impressionsQueueSize?: number, - /** - * The SDK sends diagnostic metrics to Split servers. This parameters controls this metric flush period in seconds. - * @property {number} metricsRefreshRate - * @default 120 - * @deprecated This parameter is ignored now. - */ - metricsRefreshRate?: number, - /** - * The SDK sends diagnostic metrics to Split servers. This parameters controls this metric flush period in seconds. - * @property {number} telemetryRefreshRate - * @default 3600 - */ - telemetryRefreshRate?: number, - /** - * The SDK polls Split servers for changes to segment definitions. This parameter controls this polling period in seconds. - * @property {number} segmentsRefreshRate - * @default 60 - */ - segmentsRefreshRate?: number, - /** - * The SDK posts the queued events data in bulks. This parameter controls the posting rate in seconds. - * @property {number} eventsPushRate - * @default 60 - */ - eventsPushRate?: number, - /** - * The maximum number of event items we want to queue. If we queue more values, it will trigger a flush and reset the timer. - * If you use a 0 here, the queue will have no maximum size. - * @property {number} eventsQueueSize - * @default 500 - */ - eventsQueueSize?: number, - /** - * For mocking/testing only. The SDK will refresh the features mocked data when mode is set to "localhost" by defining the key. - * For more information @see {@link https://help.split.io/hc/en-us/articles/360020564931-Node-js-SDK#localhost-mode} - * @property {number} offlineRefreshRate - * @default 15 - */ - offlineRefreshRate?: number - /** - * When using streaming mode, seconds to wait before re attempting to connect for push notifications. - * Next attempts follow intervals in power of two: base seconds, base x 2 seconds, base x 4 seconds, ... - * @property {number} pushRetryBackoffBase - * @default 1 - */ - pushRetryBackoffBase?: number, - }, - /** - * SDK Core settings for NodeJS. - * @property {Object} core - */ - core: { - /** - * Your SDK key. More information: @see {@link https://help.split.io/hc/en-us/articles/360019916211-API-keys} - * @property {string} authorizationKey - */ - authorizationKey: string, - /** - * Disable labels from being sent to Split backend. Labels may contain sensitive information. - * @property {boolean} labelsEnabled - * @default true - */ - labelsEnabled?: boolean - /** - * Disable machine IP and Name from being sent to Split backend. - * @property {boolean} IPAddressesEnabled - * @default true - */ - IPAddressesEnabled?: boolean - }, - /** - * Defines which kind of storage we should instantiate. - * @property {Object} storage - */ - storage?: (params: any) => any, - /** - * The SDK mode. Possible values are "standalone" (which is the default) and "consumer". For "localhost" mode, use "localhost" as authorizationKey. - * @property {SDKMode} mode - * @default 'standalone' - */ - mode?: SDKMode, - /** - * Mocked features file path. For testing purposses only. For using this you should specify "localhost" as authorizationKey on core settings. - * @see {@link https://help.split.io/hc/en-us/articles/360020564931-Node-js-SDK#localhost-mode} - * @property {MockedFeaturesFilePath} features - * @default '$HOME/.split' - */ - features?: SplitIO.MockedFeaturesFilePath, + ip: string | false; + hostname: string | false; + }; + readonly log: ILogger; } + /** - * Common API for entities that expose status handlers. - * @interface IStatusInterface - * @extends IEventEmitter + * SplitIO.IStatusInterface interface extended with private properties for internal use */ -export interface IStatusInterface extends IEventEmitter { - /** - * Constant object containing the SDK events for you to use. - * @property {EventConsts} Event - */ - Event: EventConsts, - /** - * Returns a promise that will be resolved once the SDK has finished loading (SDK_READY event emitted) or rejected if the SDK has timedout (SDK_READY_TIMED_OUT event emitted). - * As it's meant to provide similar flexibility to the event approach, given that the SDK might be eventually ready after a timeout event, calling the `ready` method after the - * SDK had timed out will return a new promise that should eventually resolve if the SDK gets ready. - * - * Caveats: the method was designed to avoid an unhandled Promise rejection if the rejection case is not handled, so that `onRejected` handler is optional when using promises. - * However, when using async/await syntax, the rejection should be explicitly propagated like in the following example: - * ``` - * try { - * await client.ready().catch((e) => { throw e; }); - * // SDK is ready - * } catch(e) { - * // SDK has timedout - * } - * ``` - * - * @function ready - * @returns {Promise} - */ - ready(): Promise - +export interface IStatusInterface extends SplitIO.IStatusInterface { // Expose status for internal purposes only. Not considered part of the public API, and might be updated eventually. __getStatus(): { isReady: boolean; @@ -414,984 +29,56 @@ export interface IStatusInterface extends IEventEmitter { isDestroyed: boolean; isOperational: boolean; lastUpdate: number; - } + }; } /** - * Common definitions between clients for different environments interface. - * @interface IBasicClient - * @extends IStatusInterface + * SplitIO.IBasicClient interface extended with private properties for internal use */ -export interface IBasicClient extends IStatusInterface { +export interface IBasicClient extends SplitIO.IBasicClient { /** * Flush data * @function flush * @return {Promise} */ - flush(): Promise - /** - * Destroy the client instance. - * @function destroy - * @returns {Promise} - */ - destroy(): Promise - - // Whether the client implements the client-side API, i.e, with bound key, (true), or the server-side API (false). + flush(): Promise; + // Whether the client implements the client-side API, i.e, with bound key, (true), or the server-side API (false/undefined). // Exposed for internal purposes only. Not considered part of the public API, and might be renamed eventually. - isClientSide: boolean + isClientSide?: boolean; + key?: SplitIO.SplitKey; } /** - * Common definitions between SDK instances for different environments interface. - * @interface IBasicSDK + * Defines the format of rollout plan data to preload the factory storage (cache). */ -interface IBasicSDK { - /** - * Current settings of the SDK instance. - * @property settings - */ - settings: ISettings, - /** - * Logger API. - * @property Logger - */ - Logger: ILoggerAPI - /** - * Destroy all the clients created by this factory. - * @function destroy - * @returns {Promise} - */ - destroy(): Promise -} -/****** Exposed namespace ******/ -/** - * Types and interfaces for @splitsoftware/splitio package for usage when integrating javascript sdk on typescript apps. - * For the SDK package information - * @see {@link https://www.npmjs.com/package/@splitsoftware/splitio} - */ -export namespace SplitIO { - /** - * Feature flag treatment value, returned by getTreatment. - * @typedef {string} Treatment - */ - export type Treatment = string; - /** - * Feature flag treatment promise that will resolve to actual treatment value. - * @typedef {Promise} AsyncTreatment - */ - export type AsyncTreatment = Promise; - /** - * An object with the treatments for a bulk of feature flags, returned by getTreatments. For example: - * { - * feature1: 'on', - * feature2: 'off - * } - * @typedef {Object.} Treatments - */ - export type Treatments = { - [featureName: string]: Treatment - }; - /** - * Feature flag treatments promise that will resolve to the actual SplitIO.Treatments object. - * @typedef {Promise} AsyncTreatments - */ - export type AsyncTreatments = Promise; - /** - * Feature flag evaluation result with treatment and configuration, returned by getTreatmentWithConfig. - * @typedef {Object} TreatmentWithConfig - * @property {string} treatment The treatment string - * @property {string | null} config The stringified version of the JSON config defined for that treatment, null if there is no config for the resulting treatment. - */ - export type TreatmentWithConfig = { - treatment: string, - config: string | null - }; - /** - * Feature flag treatment promise that will resolve to actual treatment with config value. - * @typedef {Promise} AsyncTreatmentWithConfig - */ - export type AsyncTreatmentWithConfig = Promise; - /** - * An object with the treatments with configs for a bulk of feature flags, returned by getTreatmentsWithConfig. - * Each existing configuration is a stringified version of the JSON you defined on the Split user interface. For example: - * { - * feature1: { treatment: 'on', config: null } - * feature2: { treatment: 'off', config: '{"bannerText":"Click here."}' } - * } - * @typedef {Object.} Treatments - */ - export type TreatmentsWithConfig = { - [featureName: string]: TreatmentWithConfig - }; - /** - * Feature flag treatments promise that will resolve to the actual SplitIO.TreatmentsWithConfig object. - * @typedef {Promise} AsyncTreatmentsWithConfig - */ - export type AsyncTreatmentsWithConfig = Promise; +export interface PreloadedData { /** - * Possible Split SDK events. - * @typedef {string} Event + * Timestamp of the last moment the data was synchronized with Split servers. + * If this value is older than 10 days ago (expiration time policy), the data is not used to update the storage content. + * @TODO configurable expiration time policy? */ - export type Event = 'init::timeout' | 'init::ready' | 'init::cache-ready' | 'state::update'; + lastUpdated: number; /** - * Attributes should be on object with values of type string or number (dates should be sent as millis since epoch). - * @typedef {Object.} Attributes - * @see {@link https://help.split.io/hc/en-us/articles/360020448791-JavaScript-SDK#attribute-syntax} + * Change number of the preloaded data. + * If this value is older than the current changeNumber at the storage, the data is not used to update the storage content. */ - export type Attributes = { - [attributeName: string]: AttributeType - }; + since: number; /** - * Type of an attribute value - * @typedef {string | number | boolean | Array} AttributeType + * Map of feature flags to their stringified definitions. */ - export type AttributeType = string | number | boolean | Array; - /** - * Properties should be an object with values of type string, number, boolean or null. Size limit of ~31kb. - * @typedef {Object.} Attributes - * @see {@link https://help.split.io/hc/en-us/articles/360020448791-JavaScript-SDK#track - */ - export type Properties = { - [propertyName: string]: string | number | boolean | null + splitsData: { + [splitName: string]: string; }; /** - * The SplitKey object format. - * @typedef {Object.} SplitKeyObject - */ - export type SplitKeyObject = { - matchingKey: string, - bucketingKey: string - }; - /** - * The customer identifier. Could be a SplitKeyObject or a string. - * @typedef {SplitKeyObject|string} SplitKey - */ - export type SplitKey = SplitKeyObject | string; - /** - * Path to file with mocked features (for node). - * @typedef {string} MockedFeaturesFilePath - */ - export type MockedFeaturesFilePath = string; - /** - * Object with mocked features mapping (for browser). We need to specify the featureName as key, and the mocked treatment as value. - * @typedef {Object} MockedFeaturesMap - */ - export type MockedFeaturesMap = { - [featureName: string]: string | TreatmentWithConfig - }; - /** - * Object with information about an impression. It contains the generated impression DTO as well as - * complementary information around where and how it was generated in that way. - * @typedef {Object} ImpressionData - */ - export type ImpressionData = { - impression: ImpressionDTO, - attributes?: SplitIO.Attributes, - ip: string | false, - hostname: string | false, - sdkLanguageVersion: string - }; - /** - * Data corresponding to one feature flag view. - * @typedef {Object} SplitView - */ - export type SplitView = { - /** - * The name of the feature flag. - * @property {string} name - */ - name: string, - /** - * The traffic type of the feature flag. - * @property {string} trafficType - */ - trafficType: string, - /** - * Whether the feature flag is killed or not. - * @property {boolean} killed - */ - killed: boolean, - /** - * The list of treatments available for the feature flag. - * @property {Array} treatments - */ - treatments: Array, - /** - * Current change number of the feature flag. - * @property {number} changeNumber - */ - changeNumber: number, - /** - * Map of configurations per treatment. - * Each existing configuration is a stringified version of the JSON you defined on the Split user interface. - * @property {Object.} configs - */ - configs: { - [treatmentName: string]: string - }, - /** - * List of sets of the feature flag. - * @property {string[]} sets - */ - sets: string[], - /** - * The default treatment of the feature flag. - * @property {string} defaultTreatment - */ - defaultTreatment: string, - }; - /** - * A promise that resolves to a feature flag view. - * @typedef {Promise} SplitView - */ - export type SplitViewAsync = Promise; - /** - * An array containing the SplitIO.SplitView elements. - */ - export type SplitViews = Array; - /** - * A promise that resolves to an SplitIO.SplitViews array. - * @typedef {Promise} SplitViewsAsync + * Optional map of user keys to their list of segments. + * @TODO remove when releasing first version */ - export type SplitViewsAsync = Promise; - /** - * An array of feature flag names. - * @typedef {Array} SplitNames - */ - export type SplitNames = Array; - /** - * A promise that resolves to an array of feature flag names. - * @typedef {Promise} SplitNamesAsync - */ - export type SplitNamesAsync = Promise; - /** - * Impression listener interface. This is the interface that needs to be implemented - * by the element you provide to the SDK as impression listener. - * @interface IImpressionListener - * @see {@link https://help.split.io/hc/en-us/articles/360020564931-Node-js-SDK#listener} - */ - export interface IImpressionListener { - logImpression(data: SplitIO.ImpressionData): void - } - /** - * Object with information about a Split event. - * @typedef {Object} EventData - */ - export type EventData = { - eventTypeId: string; - value?: number; - properties?: Properties; - trafficTypeName?: string; - key?: string; // matching user key - timestamp: number; + mySegmentsData?: { + [key: string]: string[]; }; /** - * Object representing the data sent by Split (events and impressions). - * @typedef {Object} IntegrationData - * @property {string} type The type of Split data, either 'IMPRESSION' or 'EVENT'. - * @property {ImpressionData | EventData} payload The data instance itself. - */ - export type IntegrationData = { type: 'IMPRESSION', payload: SplitIO.ImpressionData } | { type: 'EVENT', payload: SplitIO.EventData }; - /** - * Available URL settings for the SDKs. + * Optional map of segments to their stringified definitions. + * This property is ignored if `mySegmentsData` was provided. */ - export type UrlSettings = { - /** - * String property to override the base URL where the SDK will get rollout plan related data, like feature flags and segments definitions. - * @property {string} sdk - * @default 'https://sdk.split.io/api' - */ - sdk?: string, - /** - * String property to override the base URL where the SDK will post event-related information like impressions. - * @property {string} events - * @default 'https://events.split.io/api' - */ - events?: string, - /** - * String property to override the base URL where the SDK will get authorization tokens to be used with functionality that requires it, like streaming. - * @property {string} auth - * @default 'https://auth.split.io/api' - */ - auth?: string, - /** - * String property to override the base URL where the SDK will connect to receive streaming updates. - * @property {string} streaming - * @default 'https://streaming.split.io' - */ - streaming?: string, - /** - * String property to override the base URL where the SDK will post telemetry data. - * @property {string} telemetry - * @default 'https://telemetry.split.io/api' - */ - telemetry?: string + segmentsData?: { + [segmentName: string]: string; }; - /** - * SplitFilter type. - * @typedef {string} SplitFilterType - */ - export type SplitFilterType = 'byName' | 'byPrefix' | 'bySet'; - /** - * Defines a feature flag filter, described by a type and list of values. - */ - export interface SplitFilter { - /** - * Type of the filter. - * @property {SplitFilterType} type - */ - type: SplitFilterType, - /** - * List of values: feature flag names for 'byName' filter type, and feature flag name prefixes for 'byPrefix' type. - * @property {string[]} values - */ - values: string[], - } - /** - * ImpressionsMode type - * @typedef {string} ImpressionsMode - */ - export type ImpressionsMode = 'OPTIMIZED' | 'DEBUG' | 'NONE' - /** - * Defines the format of rollout plan data to preload on the factory storage (cache). - */ - export interface PreloadedData { - /** - * Timestamp of the last moment the data was synchronized with Split servers. - * If this value is older than 10 days ago (expiration time policy), the data is not used to update the storage content. - * @TODO configurable expiration time policy? - */ - lastUpdated: number, - /** - * Change number of the preloaded data. - * If this value is older than the current changeNumber at the storage, the data is not used to update the storage content. - */ - since: number, - /** - * Map of feature flags to their stringified definitions. - */ - splitsData: { - [splitName: string]: string - }, - /** - * Optional map of user keys to their list of segments. - * @TODO remove when releasing first version - */ - mySegmentsData?: { - [key: string]: string[] - }, - /** - * Optional map of segments to their stringified definitions. - * This property is ignored if `mySegmentsData` was provided. - */ - segmentsData?: { - [segmentName: string]: string - }, - } - /** - * Settings interface for SDK instances created on the browser - * @interface IBrowserSettings - * @extends ISharedSettings - * @see {@link https://help.split.io/hc/en-us/articles/360020448791-JavaScript-SDK#configuration} - */ - export interface IBrowserSettings extends ISharedSettings { - /** - * SDK Startup settings for the Browser. - * @property {Object} startup - */ - startup?: { - /** - * Maximum amount of time used before notify a timeout. - * @property {number} readyTimeout - * @default 1.5 - */ - readyTimeout?: number, - /** - * Time to wait for a request before the SDK is ready. If this time expires, JS Sdk will retry 'retriesOnFailureBeforeReady' times before notifying its failure to be 'ready'. - * @property {number} requestTimeoutBeforeReady - * @default 1.5 - */ - requestTimeoutBeforeReady?: number, - /** - * How many quick retries we will do while starting up the SDK. - * @property {number} retriesOnFailureBeforeReady - * @default 1 - */ - retriesOnFailureBeforeReady?: number, - /** - * For SDK posts the queued events data in bulks with a given rate, but the first push window is defined separately, - * to better control on browsers. This number defines that window before the first events push. - * - * @property {number} eventsFirstPushWindow - * @default 10 - */ - eventsFirstPushWindow?: number, - }, - /** - * SDK scheduler settings. - * @property {Object} scheduler - */ - scheduler?: { - /** - * The SDK polls Split servers for changes to feature flag definitions. This parameter controls this polling period in seconds. - * @property {number} featuresRefreshRate - * @default 60 - */ - featuresRefreshRate?: number, - /** - * The SDK sends information on who got what treatment at what time back to Split servers to power analytics. This parameter controls how often this data is sent to Split servers. The parameter should be in seconds. - * @property {number} impressionsRefreshRate - * @default 300 - */ - impressionsRefreshRate?: number, - /** - * The maximum number of impression items we want to queue. If we queue more values, it will trigger a flush and reset the timer. - * If you use a 0 here, the queue will have no maximum size. - * @property {number} impressionsQueueSize - * @default 30000 - */ - impressionsQueueSize?: number, - /** - * The SDK sends diagnostic metrics to Split servers. This parameters controls this metric flush period in seconds. - * @property {number} metricsRefreshRate - * @default 120 - * @deprecated This parameter is ignored now. Use `telemetryRefreshRate` instead. - */ - metricsRefreshRate?: number, - /** - * The SDK sends diagnostic metrics to Split servers. This parameters controls this metric flush period in seconds. - * @property {number} telemetryRefreshRate - * @default 3600 - */ - telemetryRefreshRate?: number, - /** - * The SDK polls Split servers for changes to segment definitions. This parameter controls this polling period in seconds. - * @property {number} segmentsRefreshRate - * @default 60 - */ - segmentsRefreshRate?: number, - /** - * The SDK posts the queued events data in bulks. This parameter controls the posting rate in seconds. - * @property {number} eventsPushRate - * @default 60 - */ - eventsPushRate?: number, - /** - * The maximum number of event items we want to queue. If we queue more values, it will trigger a flush and reset the timer. - * If you use a 0 here, the queue will have no maximum size. - * @property {number} eventsQueueSize - * @default 500 - */ - eventsQueueSize?: number, - /** - * For mocking/testing only. The SDK will refresh the features mocked data when mode is set to "localhost" by defining the key. - * For more information @see {@link https://help.split.io/hc/en-us/articles/360020564931-Node-js-SDK#localhost-mode} - * @property {number} offlineRefreshRate - * @default 15 - */ - offlineRefreshRate?: number - /** - * When using streaming mode, seconds to wait before re attempting to connect for push notifications. - * Next attempts follow intervals in power of two: base seconds, base x 2 seconds, base x 4 seconds, ... - * @property {number} pushRetryBackoffBase - * @default 1 - */ - pushRetryBackoffBase?: number, - }, - /** - * SDK Core settings for the browser. - * @property {Object} core - */ - core: { - /** - * Your SDK key. More information: @see {@link https://help.split.io/hc/en-us/articles/360019916211-API-keys} - * @property {string} authorizationKey - */ - authorizationKey: string, - /** - * Customer identifier. Whatever this means to you. @see {@link https://help.split.io/hc/en-us/articles/360019916311-Traffic-type} - * @property {SplitKey} key - */ - key: SplitKey, - /** - * Disable labels from being sent to Split backend. Labels may contain sensitive information. - * @property {boolean} labelsEnabled - * @default true - */ - labelsEnabled?: boolean - }, - /** - * Mocked features map. For testing purposses only. For using this you should specify "localhost" as authorizationKey on core settings. - * @see {@link https://help.split.io/hc/en-us/articles/360020564931-Node-js-SDK#localhost-mode} - */ - features?: MockedFeaturesMap, - /** - * Defines which kind of storage we should instantiate. - * @property {Object} storage - */ - storage?: (params: IStorageFactoryParams) => IStorageSync | IStorageAsync, - /** - * List of URLs that the SDK will use as base for it's synchronization functionalities, applicable only when running as standalone. - * Do not change these settings unless you're working an advanced use case, like connecting to the Split proxy. - * @property {Object} urls - */ - urls?: UrlSettings, - } - /** - * Settings interface for SDK instances created on NodeJS. - * If your storage is asynchronous (Redis for example) use SplitIO.INodeAsyncSettings instead. - * @interface INodeSettings - * @extends INodeBasicSettings - * @see {@link https://help.split.io/hc/en-us/articles/360020564931-Node-js-SDK#configuration} - */ - export interface INodeSettings extends INodeBasicSettings { - /** - * List of URLs that the SDK will use as base for it's synchronization functionalities, applicable only when running as standalone. - * Do not change these settings unless you're working an advanced use case, like connecting to the Split proxy. - * @property {Object} urls - */ - urls?: UrlSettings, - /** - * Defines which kind of storage we should instantiate. - * @property {Object} storage - */ - storage?: (params: IStorageFactoryParams) => IStorageSync, - } - /** - * Settings interface with async storage for SDK instances created on NodeJS. - * If your storage is synchronous (by defaut we use memory, which is sync) use SplitIO.INodeSyncSettings instead. - * @interface INodeAsyncSettings - * @extends INodeBasicSettings - * @see {@link https://help.split.io/hc/en-us/articles/360020564931-Node-js-SDK#configuration} - */ - export interface INodeAsyncSettings extends INodeBasicSettings { - storage?: (params: IStorageFactoryParams) => IStorageAsync, - } - /** - * This represents the interface for the Server-side SDK instance with synchronous storage. - * @interface ISDK - * @extends IBasicSDK - */ - export interface ISDK extends IBasicSDK { - /** - * Returns the client instance of the SDK. - * @function client - * @returns {IClient} The client instance. - */ - client(): IClient, - /** - * Returns a manager instance of the SDK to explore available information. - * @function manager - * @returns {IManager} The manager instance. - */ - manager(): IManager - } - /** - * This represents the interface for the Server-side SDK instance with asynchronous storage. - * @interface IAsyncSDK - * @extends IBasicSDK - */ - export interface IAsyncSDK extends IBasicSDK { - /** - * Returns the default client instance of the SDK. - * @function client - * @returns {IAsyncClient} The asynchronous client instance. - */ - client(): IAsyncClient, - /** - * Returns a manager instance of the SDK to explore available information. - * @function manager - * @returns {IManager} The manager instance. - */ - manager(): IAsyncManager - } - /** - * This represents the interface for the Client-side SDK instance with synchronous storage. - * @interface ICsSDK - * @extends IBasicSDK - */ - export interface ICsSDK extends IBasicSDK { - /** - * Returns the default client instance of the SDK, with the key and optional traffic type from settings. - * @function client - * @returns {ICsClient} The client instance. - */ - client(): ICsClient, - /** - * Returns a shared client of the SDK, with the given key and optional traffic type. - * @function client - * @param {SplitKey} key The key for the new client instance. - * @returns {ICsClient} The client instance. - */ - client(key: SplitKey): ICsClient, - /** - * Returns a manager instance of the SDK to explore available information. - * @function manager - * @returns {IManager} The manager instance. - */ - manager(): IManager - } - /** - * This represents the interface for the Client instance with synchronous storage for server-side SDK, where we don't have only one key. - * @interface IClient - * @extends IBasicClient - */ - export interface IClient extends IBasicClient { - /** - * Returns a Treatment value, which is the treatment string for the given feature. - * @function getTreatment - * @param {string} key - The string key representing the consumer. - * @param {string} featureFlagName - The string that represents the feature flag we want to get the treatment. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {Treatment} The treatment string. - */ - getTreatment(key: SplitKey, featureFlagName: string, attributes?: Attributes): Treatment, - /** - * Returns a TreatmentWithConfig value, which is an object with both treatment and config string for the given feature. - * @function getTreatmentWithConfig - * @param {string} key - The string key representing the consumer. - * @param {string} featureFlagName - The string that represents the feature flag we want to get the treatment. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {TreatmentWithConfig} The TreatmentWithConfig, the object containing the treatment string and the - * configuration stringified JSON (or null if there was no config for that treatment). - */ - getTreatmentWithConfig(key: SplitKey, featureFlagName: string, attributes?: Attributes): TreatmentWithConfig, - /** - * Returns a Treatments value, which is an object map with the treatments for the given features. - * @function getTreatments - * @param {string} key - The string key representing the consumer. - * @param {Array} featureFlagNames - An array of the feature flag names we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {Treatments} The treatments object map. - */ - getTreatments(key: SplitKey, featureFlagNames: string[], attributes?: Attributes): Treatments, - /** - * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the given features. - * @function getTreatmentsWithConfig - * @param {string} key - The string key representing the consumer. - * @param {Array} featureFlagNames - An array of the feature flag names we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {TreatmentsWithConfig} The map with all the TreatmentWithConfig objects - */ - getTreatmentsWithConfig(key: SplitKey, featureFlagNames: string[], attributes?: Attributes): TreatmentsWithConfig, - /** - * Returns a Treatments value, which is an object map with the treatments for the feature flags related to the given flagSet. - * @function getTreatmentsByFlagSet - * @param {string} key - The string key representing the consumer. - * @param {string} flagSet - The flagSet name we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {Treatments} The map with all the TreatmentWithConfig objects - */ - getTreatmentsByFlagSet(key: SplitKey, flagSet: string, attributes?: Attributes): Treatments, - /** - * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the feature flags related to the given flagSets. - * @function getTreatmentsWithConfigByFlagSet - * @param {string} key - The string key representing the consumer. - * @param {string} flagSet - The flagSet name we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {Treatments} The map with all the TreatmentWithConfig objects - */ - getTreatmentsWithConfigByFlagSet(key: SplitKey, flagSet: string, attributes?: Attributes): TreatmentsWithConfig, - /** - * Returns a Returns a Treatments value, which is an object with both treatment and config string for to the feature flags related to the given flagSets. - * @function getTreatmentsByFlagSets - * @param {string} key - The string key representing the consumer. - * @param {Array} flagSets - An array of the flagSet names we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {Treatments} The map with all the TreatmentWithConfig objects - */ - getTreatmentsByFlagSets(key: SplitKey, flagSets: string[], attributes?: Attributes): Treatments, - /** - * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the feature flags related to the given flagSets. - * @function getTreatmentsWithConfigByFlagSets - * @param {string} key - The string key representing the consumer. - * @param {Array} flagSets - An array of the flagSet names we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {Treatments} The map with all the TreatmentWithConfig objects - */ - getTreatmentsWithConfigByFlagSets(key: SplitKey, flagSets: string[], attributes?: Attributes): TreatmentsWithConfig, - /** - * Tracks an event to be fed to the results product on Split user interface. - * @function track - * @param {SplitKey} key - The key that identifies the entity related to this event. - * @param {string} trafficType - The traffic type of the entity related to this event. - * @param {string} eventType - The event type corresponding to this event. - * @param {number=} value - The value of this event. - * @param {Properties=} properties - The properties of this event. Values can be string, number, boolean or null. - * @returns {boolean} Whether the event was added to the queue successfully or not. - */ - track(key: SplitIO.SplitKey, trafficType: string, eventType: string, value?: number, properties?: Properties): boolean, - } - /** - * This represents the interface for the Client instance with asynchronous storage for server-side SDK, where we don't have only one key. - * @interface IAsyncClient - * @extends IBasicClient - */ - export interface IAsyncClient extends IBasicClient { - /** - * Returns a Treatment value, which will be (or eventually be) the treatment string for the given feature. - * For usage on NodeJS as we don't have only one key. - * NOTE: Treatment will be a promise only in async storages, like REDIS. - * @function getTreatment - * @param {string} key - The string key representing the consumer. - * @param {string} featureFlagName - The string that represents the feature flag we want to get the treatment. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {AsyncTreatment} Treatment promise that resolves to the treatment string. - */ - getTreatment(key: SplitKey, featureFlagName: string, attributes?: Attributes): AsyncTreatment, - /** - * Returns a TreatmentWithConfig value, which will be (or eventually be) an object with both treatment and config string for the given feature. - * For usage on NodeJS as we don't have only one key. - * NOTE: Treatment will be a promise only in async storages, like REDIS. - * @function getTreatmentWithConfig - * @param {string} key - The string key representing the consumer. - * @param {string} featureFlagName - The string that represents the feature flag we want to get the treatment. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {AsyncTreatmentWithConfig} TreatmentWithConfig promise that resolves to the TreatmentWithConfig object. - */ - getTreatmentWithConfig(key: SplitKey, featureFlagName: string, attributes?: Attributes): AsyncTreatmentWithConfig, - /** - * Returns a Treatments value, which will be (or eventually be) an object map with the treatments for the given features. - * For usage on NodeJS as we don't have only one key. - * @function getTreatments - * @param {string} key - The string key representing the consumer. - * @param {Array} featureFlagNames - An array of the feature flag names we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {AsyncTreatments} Treatments promise that resolves to the treatments object map. - */ - getTreatments(key: SplitKey, featureFlagNames: string[], attributes?: Attributes): AsyncTreatments, - /** - * Returns a TreatmentsWithConfig value, which will be (or eventually be) an object map with the TreatmentWithConfig (an object with both treatment and config string) for the given features. - * For usage on NodeJS as we don't have only one key. - * @function getTreatmentsWithConfig - * @param {string} key - The string key representing the consumer. - * @param {Array} featureFlagNames - An array of the feature flag names we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {AsyncTreatmentsWithConfig} TreatmentsWithConfig promise that resolves to the map of TreatmentsWithConfig objects. - */ - getTreatmentsWithConfig(key: SplitKey, featureFlagNames: string[], attributes?: Attributes): AsyncTreatmentsWithConfig, - /** - * Returns a Treatments value, which will be (or eventually be) an object map with the treatments for the features related to the given flag set. - * For usage on NodeJS as we don't have only one key. - * @function getTreatmentsByFlagSet - * @param {string} key - The string key representing the consumer. - * @param {string} flagSet - The flag set name we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {Treatments} The map with all the TreatmentWithConfig objects - */ - getTreatmentsByFlagSet(key: SplitKey, flagSet: string, attributes?: Attributes): AsyncTreatments, - /** - * Returns a TreatmentWithConfig value, which will be (or eventually be) an object with both treatment and config string for features related to the given flag set. - * For usage on NodeJS as we don't have only one key. - * @function getTreatmentsWithConfigByFlagSet - * @param {string} key - The string key representing the consumer. - * @param {string} flagSet - The flag set name we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {Treatments} The map with all the TreatmentWithConfig objects - */ - getTreatmentsWithConfigByFlagSet(key: SplitKey, flagSet: string, attributes?: Attributes): AsyncTreatmentsWithConfig, - /** - * Returns a Treatments value, which will be (or eventually be) an object map with the treatments for the feature flags related to the given flag sets. - * For usage on NodeJS as we don't have only one key. - * @function getTreatmentsByFlagSets - * @param {string} key - The string key representing the consumer. - * @param {Array} flagSets - An array of the flag set names we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {Treatments} The map with all the TreatmentWithConfig objects - */ - getTreatmentsByFlagSets(key: SplitKey, flagSets: string[], attributes?: Attributes): AsyncTreatments, - /** - * Returns a TreatmentWithConfig value, which will be (or eventually be) an object with both treatment and config string for the feature flags related to the given flag sets. - * For usage on NodeJS as we don't have only one key. - * @function getTreatmentsWithConfigByFlagSets - * @param {string} key - The string key representing the consumer. - * @param {Array} flagSets - An array of the flag set names we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {Treatments} The map with all the TreatmentWithConfig objects - */ - getTreatmentsWithConfigByFlagSets(key: SplitKey, flagSets: string[], attributes?: Attributes): AsyncTreatmentsWithConfig, - /** - * Tracks an event to be fed to the results product on Split user interface, and returns a promise to signal when the event was successfully queued (or not). - * @function track - * @param {SplitKey} key - The key that identifies the entity related to this event. - * @param {string} trafficType - The traffic type of the entity related to this event. - * @param {string} eventType - The event type corresponding to this event. - * @param {number=} value - The value of this event. - * @param {Properties=} properties - The properties of this event. Values can be string, number, boolean or null. - * @returns {Promise} A promise that resolves to a boolean indicating if the event was added to the queue successfully or not. - */ - track(key: SplitIO.SplitKey, trafficType: string, eventType: string, value?: number, properties?: Properties): Promise - } - /** - * This represents the interface for the Client instance with synchronous storage for client-side SDK, where each client has associated a key and optionally a traffic type. - * @interface IClient - * @extends IBasicClient - */ - export interface ICsClient extends IBasicClient { - /** - * Returns a Treatment value, which is the treatment string for the given feature. - * @function getTreatment - * @param {string} featureFlagName - The string that represents the feature flag we want to get the treatment. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {Treatment} The treatment string. - */ - getTreatment(featureFlagName: string, attributes?: Attributes): Treatment, - /** - * Returns a TreatmentWithConfig value, which is an object with both treatment and config string for the given feature. - * @function getTreatmentWithConfig - * @param {string} featureFlagName - The string that represents the feature flag we want to get the treatment. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {TreatmentWithConfig} The map containing the treatment and the configuration stringified JSON (or null if there was no config for that treatment). - */ - getTreatmentWithConfig(featureFlagName: string, attributes?: Attributes): TreatmentWithConfig, - /** - * Returns a Treatments value, which is an object map with the treatments for the given features. - * @function getTreatments - * @param {Array} featureFlagNames - An array of the feature flag names we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {Treatments} The treatments object map. - */ - getTreatments(featureFlagNames: string[], attributes?: Attributes): Treatments, - /** - * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the given features. - * @function getTreatmentsWithConfig - * @param {Array} featureFlagNames - An array of the feature flag names we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {TreatmentsWithConfig} The map with all the TreatmentWithConfig objects - */ - getTreatmentsWithConfig(featureFlagNames: string[], attributes?: Attributes): TreatmentsWithConfig, - /** - * Returns a Treatments value, which is an object map with the treatments for the feature flags related to the given flag set. - * @function getTreatmentsByFlagSet - * @param {string} flagSet - The flag set name we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {Treatments} The map with all the TreatmentWithConfig objects - */ - getTreatmentsByFlagSet(flagSet: string, attributes?: Attributes): Treatments, - /** - * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the feature flags related to the given flag set. - * @function getTreatmentsWithConfigByFlagSet - * @param {string} flagSet - The flag set name we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {Treatments} The map with all the TreatmentWithConfig objects - */ - getTreatmentsWithConfigByFlagSet(flagSet: string, attributes?: Attributes): TreatmentsWithConfig, - /** - * Returns a Returns a Treatments value, which is an object with both treatment and config string for to the feature flags related to the given flag sets. - * @function getTreatmentsByFlagSets - * @param {Array} flagSets - An array of the flag set names we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {Treatments} The map with all the TreatmentWithConfig objects - */ - getTreatmentsByFlagSets(flagSets: string[], attributes?: Attributes): Treatments, - /** - * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the feature flags related to the given flag sets. - * @function getTreatmentsWithConfigByFlagSets - * @param {Array} flagSets - An array of the flag set names we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {Treatments} The map with all the TreatmentWithConfig objects - */ - getTreatmentsWithConfigByFlagSets(flagSets: string[], attributes?: Attributes): TreatmentsWithConfig, - /** - * Tracks an event to be fed to the results product on Split user interface. - * @function track - * @param {string} trafficType - The traffic type of the entity related to this event. - * @param {string} eventType - The event type corresponding to this event. - * @param {number=} value - The value of this event. - * @param {Properties=} properties - The properties of this event. Values can be string, number, boolean or null. - * @returns {boolean} Whether the event was added to the queue successfully or not. - */ - track(trafficType: string, eventType: string, value?: number, properties?: Properties): boolean, - /** - * Add an attribute to client's in memory attributes storage - * - * @param {string} attributeName Attribute name - * @param {AttributeType} attributeValue Attribute value - * @returns {boolean} true if the attribute was stored and false otherwise - */ - setAttribute(attributeName: string, attributeValue: AttributeType): boolean, - /** - * Returns the attribute with the given name - * - * @param {string} attributeName Attribute name - * @returns {AttributeType} Attribute with the given name - */ - getAttribute(attributeName: string): AttributeType, - /** - * Removes from client's in memory attributes storage the attribute with the given name. - * - * @param {string} attributeName - * @returns {boolean} true if attribute was removed and false otherwise - */ - removeAttribute(attributeName: string): boolean, - /** - * Add to client's in memory attributes storage the attributes in 'attributes'. - * - * @param {Attributes} attributes Object with attributes to store - * @returns true if attributes were stored an false otherwise - */ - setAttributes(attributes: Attributes): boolean, - /** - * Return all the attributes stored in client's in memory attributes storage. - * - * @returns {Attributes} returns all the stored attributes - */ - getAttributes(): Attributes, - /** - * Remove all the stored attributes in the client's in memory attribute storage. - * - * @returns {boolean} true if all attribute were removed and false otherwise - */ - clearAttributes(): boolean - } - /** - * Representation of a manager instance with synchronous storage of the SDK. - * @interface IManager - * @extends IStatusInterface - */ - export interface IManager extends IStatusInterface { - /** - * Get the array of feature flag names. - * @function names - * @returns {SplitNames} The list of feature flag names. - */ - names(): SplitNames, - /** - * Get the array of feature flags data in SplitView format. - * @function splits - * @returns {SplitViews} The list of SplitIO.SplitView. - */ - splits(): SplitViews, - /** - * Get the data of a split in SplitView format. - * @function split - * @param {string} featureFlagName The name of the feature flag we want to get info of. - * @returns {SplitView} The SplitIO.SplitView of the given split. - */ - split(featureFlagName: string): SplitView, - } - /** - * Representation of a manager instance with asynchronous storage of the SDK. - * @interface IAsyncManager - * @extends IStatusInterface - */ - export interface IAsyncManager extends IStatusInterface { - /** - * Get the array of feature flag names. - * @function names - * @returns {SplitNamesAsync} A promise that resolves to the list of feature flag names. - */ - names(): SplitNamesAsync, - /** - * Get the array of feature flags data in SplitView format. - * @function splits - * @returns {SplitViewsAsync} A promise that resolves to the SplitIO.SplitView list. - */ - splits(): SplitViewsAsync, - /** - * Get the data of a split in SplitView format. - * @function split - * @param {string} featureFlagName The name of the feature flag we want to get info of. - * @returns {SplitViewAsync} A promise that resolves to the SplitIO.SplitView value. - */ - split(featureFlagName: string): SplitViewAsync, - } } diff --git a/src/utils/MinEventEmitter.ts b/src/utils/MinEventEmitter.ts index a081f1e7..750ce7b9 100644 --- a/src/utils/MinEventEmitter.ts +++ b/src/utils/MinEventEmitter.ts @@ -1,5 +1,4 @@ - -import { IEventEmitter } from '../types'; +import SplitIO from '../../types/splitio'; const NEW_LISTENER_EVENT = 'newListener'; const REMOVE_LISTENER_EVENT = 'removeListener'; @@ -11,7 +10,7 @@ function checkListener(listener: unknown) { } // @TODO implement missing methods, check spec and add UTs -export class EventEmitter implements IEventEmitter { +export class EventEmitter implements SplitIO.IEventEmitter { private listeners: Record void, // the event listener diff --git a/src/utils/MinEvents.ts b/src/utils/MinEvents.ts index 71aa3626..51b57c50 100644 --- a/src/utils/MinEvents.ts +++ b/src/utils/MinEvents.ts @@ -30,7 +30,7 @@ // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. -import { IEventEmitter } from '../types'; +import SplitIO from '../../types/splitio'; var R = typeof Reflect === 'object' ? Reflect : null; var ReflectApply = R && typeof R.apply === 'function' @@ -39,7 +39,7 @@ var ReflectApply = R && typeof R.apply === 'function' return Function.prototype.apply.call(target, receiver, args); }; -export const EventEmitter: { new(): IEventEmitter } = function EventEmitter() { +export const EventEmitter: { new(): SplitIO.IEventEmitter } = function EventEmitter() { EventEmitter.init.call(this); }; diff --git a/src/utils/constants/index.ts b/src/utils/constants/index.ts index 543aa90c..cd11790f 100644 --- a/src/utils/constants/index.ts +++ b/src/utils/constants/index.ts @@ -1,5 +1,4 @@ -import { StorageType } from '../../storages/types'; -import { SDKMode } from '../../types'; +import SplitIO from '../../../types/splitio'; // Special treatments export const CONTROL = 'control'; @@ -22,17 +21,17 @@ export const OPTIMIZED = 'OPTIMIZED'; export const NONE = 'NONE'; // SDK Modes -export const LOCALHOST_MODE: SDKMode = 'localhost'; -export const STANDALONE_MODE: SDKMode = 'standalone'; +export const LOCALHOST_MODE = 'localhost'; +export const STANDALONE_MODE = 'standalone'; export const PRODUCER_MODE = 'producer'; -export const CONSUMER_MODE: SDKMode = 'consumer'; -export const CONSUMER_PARTIAL_MODE: SDKMode = 'consumer_partial'; +export const CONSUMER_MODE = 'consumer'; +export const CONSUMER_PARTIAL_MODE = 'consumer_partial'; // Storage types -export const STORAGE_MEMORY: StorageType = 'MEMORY'; -export const STORAGE_LOCALSTORAGE: StorageType = 'LOCALSTORAGE'; -export const STORAGE_REDIS: StorageType = 'REDIS'; -export const STORAGE_PLUGGABLE: StorageType = 'PLUGGABLE'; +export const STORAGE_MEMORY: SplitIO.StorageType = 'MEMORY'; +export const STORAGE_LOCALSTORAGE: SplitIO.StorageType = 'LOCALSTORAGE'; +export const STORAGE_REDIS: SplitIO.StorageType = 'REDIS'; +export const STORAGE_PLUGGABLE: SplitIO.StorageType = 'PLUGGABLE'; // User consent export const CONSENT_GRANTED = 'GRANTED'; // The user has granted consent for tracking events and impressions diff --git a/src/utils/inputValidation/attributes.ts b/src/utils/inputValidation/attributes.ts index e9824113..a84b7e47 100644 --- a/src/utils/inputValidation/attributes.ts +++ b/src/utils/inputValidation/attributes.ts @@ -1,5 +1,5 @@ import { isObject } from '../lang'; -import { SplitIO } from '../../types'; +import SplitIO from '../../../types/splitio'; import { ILogger } from '../../logger/types'; import { validateAttribute } from './attribute'; import { ERROR_NOT_PLAIN_OBJECT } from '../../logger/constants'; diff --git a/src/utils/inputValidation/eventProperties.ts b/src/utils/inputValidation/eventProperties.ts index 1fb2984e..310946cc 100644 --- a/src/utils/inputValidation/eventProperties.ts +++ b/src/utils/inputValidation/eventProperties.ts @@ -1,6 +1,6 @@ import { isObject, isString, isFiniteNumber, isBoolean } from '../lang'; import { objectAssign } from '../lang/objectAssign'; -import { SplitIO } from '../../types'; +import SplitIO from '../../../types/splitio'; import { ILogger } from '../../logger/types'; import { ERROR_NOT_PLAIN_OBJECT, ERROR_SIZE_EXCEEDED, WARN_SETTING_NULL, WARN_TRIMMING_PROPERTIES } from '../../logger/constants'; diff --git a/src/utils/inputValidation/key.ts b/src/utils/inputValidation/key.ts index 9068bed6..3a612a59 100644 --- a/src/utils/inputValidation/key.ts +++ b/src/utils/inputValidation/key.ts @@ -1,5 +1,5 @@ import { isObject, isString, isFiniteNumber, toString } from '../lang'; -import { SplitIO } from '../../types'; +import SplitIO from '../../../types/splitio'; import { ILogger } from '../../logger/types'; import { ERROR_NULL, WARN_CONVERTING, ERROR_EMPTY, ERROR_TOO_LONG, ERROR_INVALID, ERROR_INVALID_KEY_OBJECT } from '../../logger/constants'; diff --git a/src/utils/inputValidation/preloadedData.ts b/src/utils/inputValidation/preloadedData.ts index 10531580..f07ee432 100644 --- a/src/utils/inputValidation/preloadedData.ts +++ b/src/utils/inputValidation/preloadedData.ts @@ -1,6 +1,5 @@ import { isObject, isString, isFiniteNumber } from '../lang'; import { validateSplit } from './split'; -import { SplitIO } from '../../types'; import { ILogger } from '../../logger/types'; function validateTimestampData(log: ILogger, maybeTimestamp: any, method: string, item: string) { @@ -42,7 +41,7 @@ function validateSegmentsData(log: ILogger, maybeSegmentsData: any, method: stri return false; } -export function validatePreloadedData(log: ILogger, maybePreloadedData: any, method: string): maybePreloadedData is SplitIO.PreloadedData { +export function validatePreloadedData(log: ILogger, maybePreloadedData: any, method: string) { if (!isObject(maybePreloadedData)) { log.error(`${method}: preloadedData must be an object.`); } else if ( diff --git a/src/utils/inputValidation/trafficTypeExistence.ts b/src/utils/inputValidation/trafficTypeExistence.ts index 9619f197..8040f849 100644 --- a/src/utils/inputValidation/trafficTypeExistence.ts +++ b/src/utils/inputValidation/trafficTypeExistence.ts @@ -2,7 +2,7 @@ import { thenable } from '../promise/thenable'; import { LOCALHOST_MODE } from '../constants'; import { ISplitsCacheBase } from '../../storages/types'; import { IReadinessManager } from '../../readiness/types'; -import { SDKMode } from '../../types'; +import SplitIO from '../../../types/splitio'; import { MaybeThenable } from '../../dtos/types'; import { ILogger } from '../../logger/types'; import { WARN_NOT_EXISTENT_TT } from '../../logger/constants'; @@ -14,7 +14,7 @@ function logTTExistenceWarning(log: ILogger, maybeTT: string, method: string) { /** * Separated from the previous method since on some cases it'll be async. */ -export function validateTrafficTypeExistence(log: ILogger, readinessManager: IReadinessManager, splitsCache: ISplitsCacheBase, mode: SDKMode, maybeTT: string, method: string): MaybeThenable { +export function validateTrafficTypeExistence(log: ILogger, readinessManager: IReadinessManager, splitsCache: ISplitsCacheBase, mode: SplitIO.SDKMode, maybeTT: string, method: string): MaybeThenable { // If not ready or in localhost mode, we won't run the validation if (!readinessManager.isReady() || mode === LOCALHOST_MODE) return true; diff --git a/src/utils/key/index.ts b/src/utils/key/index.ts index aa252beb..fc763b6e 100644 --- a/src/utils/key/index.ts +++ b/src/utils/key/index.ts @@ -1,4 +1,4 @@ -import { SplitIO } from '../../types'; +import SplitIO from '../../../types/splitio'; import { isObject } from '../lang'; // function isSplitKeyObject(key: any): key is SplitIO.SplitKeyObject { diff --git a/src/utils/settingsValidation/consent.ts b/src/utils/settingsValidation/consent.ts index 98b4112b..8c706b75 100644 --- a/src/utils/settingsValidation/consent.ts +++ b/src/utils/settingsValidation/consent.ts @@ -1,12 +1,12 @@ import { ERROR_INVALID_CONFIG_PARAM } from '../../logger/constants'; import { ILogger } from '../../logger/types'; -import { ConsentStatus } from '../../types'; +import SplitIO from '../../../types/splitio'; import { CONSENT_DECLINED, CONSENT_GRANTED, CONSENT_UNKNOWN } from '../constants'; import { stringToUpperCase } from '../lang'; const userConsentValues = [CONSENT_DECLINED, CONSENT_GRANTED, CONSENT_UNKNOWN]; -export function validateConsent({ userConsent, log }: { userConsent?: any, log: ILogger }): ConsentStatus { +export function validateConsent({ userConsent, log }: { userConsent?: any, log: ILogger }): SplitIO.ConsentStatus { userConsent = stringToUpperCase(userConsent); if (userConsentValues.indexOf(userConsent) > -1) return userConsent; diff --git a/src/utils/settingsValidation/impressionsMode.ts b/src/utils/settingsValidation/impressionsMode.ts index 98e23d95..a273161a 100644 --- a/src/utils/settingsValidation/impressionsMode.ts +++ b/src/utils/settingsValidation/impressionsMode.ts @@ -1,6 +1,6 @@ import { ERROR_INVALID_CONFIG_PARAM } from '../../logger/constants'; import { ILogger } from '../../logger/types'; -import { SplitIO } from '../../types'; +import SplitIO from '../../../types/splitio'; import { DEBUG, OPTIMIZED, NONE } from '../constants'; import { stringToUpperCase } from '../lang'; diff --git a/src/utils/settingsValidation/logger/builtinLogger.ts b/src/utils/settingsValidation/logger/builtinLogger.ts index abc4f56d..d64f32a3 100644 --- a/src/utils/settingsValidation/logger/builtinLogger.ts +++ b/src/utils/settingsValidation/logger/builtinLogger.ts @@ -4,7 +4,7 @@ import { isLocalStorageAvailable } from '../../env/isLocalStorageAvailable'; import { isNode } from '../../env/isNode'; import { codesDebug } from '../../../logger/messages/debug'; import { getLogLevel } from './commons'; -import { LogLevel } from '../../../types'; +import SplitIO from '../../../../types/splitio'; const allCodes = new Map(codesDebug); @@ -43,7 +43,7 @@ if (/^(enabled?|on)/i.test(initialState)) { export function validateLogger(settings: { debug: unknown }): ILogger { const { debug } = settings; - const logLevel: LogLevel | undefined = debug !== undefined ? getLogLevel(debug) : initialLogLevel; + const logLevel: SplitIO.LogLevel | undefined = debug !== undefined ? getLogLevel(debug) : initialLogLevel; const log = new Logger({ logLevel: logLevel || initialLogLevel }, allCodes); diff --git a/src/utils/settingsValidation/logger/commons.ts b/src/utils/settingsValidation/logger/commons.ts index a51ba991..3958baaf 100644 --- a/src/utils/settingsValidation/logger/commons.ts +++ b/src/utils/settingsValidation/logger/commons.ts @@ -1,6 +1,6 @@ import { LogLevels, isLogLevelString } from '../../../logger'; -import { LogLevel } from '../../../types'; +import SplitIO from '../../../../types/splitio'; /** * Returns the LogLevel for the given debugValue or undefined if it is invalid, @@ -9,7 +9,7 @@ import { LogLevel } from '../../../types'; * @param debugValue debug value at config * @returns LogLevel of the given debugValue or undefined if the provided value is invalid */ -export function getLogLevel(debugValue: unknown): LogLevel | undefined { +export function getLogLevel(debugValue: unknown): SplitIO.LogLevel | undefined { if (typeof debugValue === 'boolean') { if (debugValue) { return LogLevels.DEBUG; diff --git a/src/utils/settingsValidation/logger/pluggableLogger.ts b/src/utils/settingsValidation/logger/pluggableLogger.ts index 633d523a..3b872877 100644 --- a/src/utils/settingsValidation/logger/pluggableLogger.ts +++ b/src/utils/settingsValidation/logger/pluggableLogger.ts @@ -1,6 +1,6 @@ import { Logger, LogLevels } from '../../../logger'; import { ILogger } from '../../../logger/types'; -import { LogLevel } from '../../../types'; +import SplitIO from '../../../../types/splitio'; import { getLogLevel } from './commons'; function isLogger(log: any): log is ILogger { @@ -19,7 +19,7 @@ let initialLogLevel = LogLevels.NONE; */ export function validateLogger(settings: { debug: unknown }): ILogger { const { debug } = settings; - let logLevel: LogLevel | undefined = initialLogLevel; + let logLevel: SplitIO.LogLevel | undefined = initialLogLevel; if (debug !== undefined) { if (isLogger(debug)) return debug; diff --git a/src/utils/settingsValidation/splitFilters.ts b/src/utils/settingsValidation/splitFilters.ts index db9207df..21474322 100644 --- a/src/utils/settingsValidation/splitFilters.ts +++ b/src/utils/settingsValidation/splitFilters.ts @@ -1,6 +1,6 @@ import { validateSplits } from '../inputValidation/splits'; import { ISplitFiltersValidation } from '../../dtos/types'; -import { SplitIO } from '../../types'; +import SplitIO from '../../../types/splitio'; import { ILogger } from '../../logger/types'; import { WARN_SPLITS_FILTER_IGNORED, WARN_SPLITS_FILTER_EMPTY, WARN_SPLITS_FILTER_INVALID, SETTINGS_SPLITS_FILTER, LOG_PREFIX_SETTINGS, ERROR_SETS_FILTER_EXCLUSIVE, WARN_LOWERCASE_FLAGSET, WARN_INVALID_FLAGSET, WARN_FLAGSET_NOT_CONFIGURED } from '../../logger/constants'; import { objectAssign } from '../lang/objectAssign'; diff --git a/src/utils/settingsValidation/storage/storageCS.ts b/src/utils/settingsValidation/storage/storageCS.ts index 097ce95d..f1e49204 100644 --- a/src/utils/settingsValidation/storage/storageCS.ts +++ b/src/utils/settingsValidation/storage/storageCS.ts @@ -1,5 +1,6 @@ import { InMemoryStorageCSFactory } from '../../../storages/inMemory/InMemoryStorageCS'; -import { ISettings, SDKMode } from '../../../types'; +import { ISettings } from '../../../types'; +import SplitIO from '../../../../types/splitio'; import { ILogger } from '../../../logger/types'; import { ERROR_STORAGE_INVALID } from '../../../logger/constants'; import { LOCALHOST_MODE, STANDALONE_MODE, STORAGE_PLUGGABLE, STORAGE_LOCALSTORAGE, STORAGE_MEMORY } from '../../../utils/constants'; @@ -21,7 +22,7 @@ __InLocalStorageMockFactory.type = STORAGE_MEMORY; * * @throws error if mode is consumer and the provided storage is not compatible */ -export function validateStorageCS(settings: { log: ILogger, storage?: any, mode: SDKMode }): ISettings['storage'] { +export function validateStorageCS(settings: { log: ILogger, storage?: any, mode: SplitIO.SDKMode }): ISettings['storage'] { let { storage = InMemoryStorageCSFactory, log, mode } = settings; // If an invalid storage is provided, fallback into MEMORY diff --git a/types/splitio.d.ts b/types/splitio.d.ts index de3db1ee..46da9a6c 100644 --- a/types/splitio.d.ts +++ b/types/splitio.d.ts @@ -7,270 +7,271 @@ import { RequestOptions } from 'http'; export as namespace SplitIO; export = SplitIO; +/****** Exposed namespace ******/ /** - * EventEmitter interface based on a subset of the NodeJS.EventEmitter methods. - */ -interface IEventEmitter { - addListener(event: string, listener: (...args: any[]) => void): this; - on(event: string, listener: (...args: any[]) => void): this; - once(event: string, listener: (...args: any[]) => void): this; - removeListener(event: string, listener: (...args: any[]) => void): this; - off(event: string, listener: (...args: any[]) => void): this; - removeAllListeners(event?: string): this; - emit(event: string, ...args: any[]): boolean; -} -/** - * NodeJS.EventEmitter interface - * @see {@link https://nodejs.org/api/events.html} - */ -interface EventEmitter extends IEventEmitter { - addListener(event: string | symbol, listener: (...args: any[]) => void): this; - on(event: string | symbol, listener: (...args: any[]) => void): this; - once(event: string | symbol, listener: (...args: any[]) => void): this; - removeListener(event: string | symbol, listener: (...args: any[]) => void): this; - off(event: string | symbol, listener: (...args: any[]) => void): this; - removeAllListeners(event?: string | symbol): this; - emit(event: string | symbol, ...args: any[]): boolean; - setMaxListeners(n: number): this; - getMaxListeners(): number; - listeners(event: string | symbol): Function[]; - rawListeners(event: string | symbol): Function[]; - listenerCount(type: string | symbol): number; - // Added in Node 6... - prependListener(event: string | symbol, listener: (...args: any[]) => void): this; - prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; - eventNames(): Array; -} -/** - * @typedef {Object} EventConsts - * @property {string} SDK_READY The ready event. - * @property {string} SDK_READY_FROM_CACHE The ready event when fired with cached data. - * @property {string} SDK_READY_TIMED_OUT The timeout event. - * @property {string} SDK_UPDATE The update event. - */ -type EventConsts = { - SDK_READY: 'init::ready'; - SDK_READY_FROM_CACHE: 'init::cache-ready'; - SDK_READY_TIMED_OUT: 'init::timeout'; - SDK_UPDATE: 'state::update'; -}; -/** - * SDK Modes. - * @typedef {string} SDKMode - */ -type SDKMode = 'standalone' | 'localhost' | 'consumer' | 'consumer_partial'; -/** - * Storage types. - * @typedef {string} StorageType - */ -type StorageType = 'MEMORY' | 'LOCALSTORAGE' | 'REDIS' | 'PLUGGABLE'; -/** - * Settings interface. This is a representation of the settings the SDK expose, that's why - * most of it's props are readonly. Only features should be rewritten when localhost mode is active. - * @interface ISettings - */ -interface ISettings { - readonly core: { - authorizationKey: string; - key: SplitIO.SplitKey; - labelsEnabled: boolean; - IPAddressesEnabled: boolean; - }; - readonly mode: SDKMode; - readonly scheduler: { - featuresRefreshRate: number; - impressionsRefreshRate: number; - impressionsQueueSize: number; - /** - * @deprecated - */ - metricsRefreshRate?: number; - telemetryRefreshRate: number; - segmentsRefreshRate: number; - offlineRefreshRate: number; - eventsPushRate: number; - eventsQueueSize: number; - pushRetryBackoffBase: number; - }; - readonly startup: { - readyTimeout: number; - requestTimeoutBeforeReady: number; - retriesOnFailureBeforeReady: number; - eventsFirstPushWindow: number; - }; - readonly storage: SplitIO.StorageSyncFactory | SplitIO.StorageAsyncFactory | SplitIO.StorageOptions; - readonly urls: { - events: string; - sdk: string; - auth: string; - streaming: string; - telemetry: string; - }; - readonly integrations?: SplitIO.IntegrationFactory[]; - readonly debug: boolean | LogLevel | SplitIO.ILogger; - readonly version: string; - /** - * Mocked features map if using in client-side, or mocked features file path string if using in server-side (NodeJS). - */ - features: SplitIO.MockedFeaturesMap | SplitIO.MockedFeaturesFilePath; - readonly streamingEnabled: boolean; - readonly sync: { - splitFilters: SplitIO.SplitFilter[]; - impressionsMode: SplitIO.ImpressionsMode; - enabled: boolean; - flagSpecVersion: string; - requestOptions?: { - getHeaderOverrides?: (context: { headers: Record }) => Record; - }; - }; - readonly impressionListener?: SplitIO.IImpressionListener; - /** - * User consent status if using in client-side. Undefined if using in server-side (NodeJS). - */ - readonly userConsent?: SplitIO.ConsentStatus; -} -/** - * Log levels. - * @typedef {string} LogLevel - */ -type LogLevel = 'DEBUG' | 'INFO' | 'WARN' | 'ERROR' | 'NONE'; -/** - * Logger API - * @interface ILoggerAPI + * Shared types and interfaces for `@splitsoftware` packages, to support integrating JavaScript SDKs with TypeScript. */ -interface ILoggerAPI { - /** - * Enables SDK logging to the console. - * @function enable - * @returns {void} - */ - enable(): void; +declare namespace SplitIO { + /** - * Disables SDK logging. - * @function disable - * @returns {void} + * EventEmitter interface based on a subset of the NodeJS.EventEmitter methods. */ - disable(): void; + interface IEventEmitter { + addListener(event: string, listener: (...args: any[]) => void): this; + on(event: string, listener: (...args: any[]) => void): this; + once(event: string, listener: (...args: any[]) => void): this; + removeListener(event: string, listener: (...args: any[]) => void): this; + off(event: string, listener: (...args: any[]) => void): this; + removeAllListeners(event?: string): this; + emit(event: string, ...args: any[]): boolean; + } /** - * Sets a log level for the SDK logs. - * @function setLogLevel - * @returns {void} - */ - setLogLevel(logLevel: LogLevel): void; + * NodeJS.EventEmitter interface + * @see {@link https://nodejs.org/api/events.html} + */ + interface EventEmitter extends IEventEmitter { + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + removeListener(event: string | symbol, listener: (...args: any[]) => void): this; + off(event: string | symbol, listener: (...args: any[]) => void): this; + removeAllListeners(event?: string | symbol): this; + emit(event: string | symbol, ...args: any[]): boolean; + setMaxListeners(n: number): this; + getMaxListeners(): number; + listeners(event: string | symbol): Function[]; + rawListeners(event: string | symbol): Function[]; + listenerCount(type: string | symbol): number; + // Added in Node 6... + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + eventNames(): Array; + } /** - * Log level constants. Use this to pass them to setLogLevel function. - */ - LogLevel: { - [level in LogLevel]: LogLevel; + * @typedef {Object} EventConsts + * @property {string} SDK_READY The ready event. + * @property {string} SDK_READY_FROM_CACHE The ready event when fired with cached data. + * @property {string} SDK_READY_TIMED_OUT The timeout event. + * @property {string} SDK_UPDATE The update event. + */ + type EventConsts = { + SDK_READY: 'init::ready'; + SDK_READY_FROM_CACHE: 'init::cache-ready'; + SDK_READY_TIMED_OUT: 'init::timeout'; + SDK_UPDATE: 'state::update'; }; -} -/** - * User consent API - * @interface IUserConsentAPI - */ -interface IUserConsentAPI { /** - * Sets or updates the user consent status. Possible values are `true` and `false`, which represent user consent `'GRANTED'` and `'DECLINED'` respectively. - * - `true ('GRANTED')`: the user has granted consent for tracking events and impressions. The SDK will send them to Split cloud. - * - `false ('DECLINED')`: the user has declined consent for tracking events and impressions. The SDK will not send them to Split cloud. - * - * NOTE: calling this method updates the user consent at a factory level, affecting all clients of the same factory. - * - * @function setStatus - * @param {boolean} userConsent The user consent status, true for 'GRANTED' and false for 'DECLINED'. - * @returns {boolean} Whether the provided param is a valid value (i.e., a boolean value) or not. + * SDK Modes. + * @typedef {string} SDKMode */ - setStatus(userConsent: boolean): boolean; + type SDKMode = 'standalone' | 'localhost' | 'consumer' | 'consumer_partial'; /** - * Gets the user consent status. - * - * @function getStatus - * @returns {ConsentStatus} The user consent status. + * Storage types. + * @typedef {string} StorageType */ - getStatus(): SplitIO.ConsentStatus; + type StorageType = 'MEMORY' | 'LOCALSTORAGE' | 'REDIS' | 'PLUGGABLE'; /** - * Consent status constants. Use this to compare with the getStatus function result. + * Settings interface. This is a representation of the settings the SDK expose, that's why + * most of it's props are readonly. Only features should be rewritten when localhost mode is active. + * @interface ISettings */ - Status: { - [status in SplitIO.ConsentStatus]: SplitIO.ConsentStatus; - }; -} -/** - * Common API for entities that expose status handlers. - * @interface IStatusInterface - * @extends EventEmitter - */ -interface IStatusInterface extends EventEmitter { + interface ISettings { + readonly core: { + authorizationKey: string; + key: SplitIO.SplitKey; + labelsEnabled: boolean; + IPAddressesEnabled: boolean; + }; + readonly mode: SDKMode; + readonly scheduler: { + featuresRefreshRate: number; + impressionsRefreshRate: number; + impressionsQueueSize: number; + /** + * @deprecated + */ + metricsRefreshRate?: number; + telemetryRefreshRate: number; + segmentsRefreshRate: number; + offlineRefreshRate: number; + eventsPushRate: number; + eventsQueueSize: number; + pushRetryBackoffBase: number; + }; + readonly startup: { + readyTimeout: number; + requestTimeoutBeforeReady: number; + retriesOnFailureBeforeReady: number; + eventsFirstPushWindow: number; + }; + readonly storage: SplitIO.StorageSyncFactory | SplitIO.StorageAsyncFactory | SplitIO.StorageOptions; + readonly urls: { + events: string; + sdk: string; + auth: string; + streaming: string; + telemetry: string; + }; + readonly integrations?: SplitIO.IntegrationFactory[]; + readonly debug: boolean | LogLevel | SplitIO.ILogger; + readonly version: string; + /** + * Mocked features map if using in client-side, or mocked features file path string if using in server-side (NodeJS). + */ + features: SplitIO.MockedFeaturesMap | SplitIO.MockedFeaturesFilePath; + readonly streamingEnabled: boolean; + readonly sync: { + splitFilters: SplitIO.SplitFilter[]; + impressionsMode: SplitIO.ImpressionsMode; + enabled: boolean; + flagSpecVersion: string; + requestOptions?: { + getHeaderOverrides?: (context: { headers: Record }) => Record; + }; + }; + readonly impressionListener?: SplitIO.IImpressionListener; + /** + * User consent status if using in client-side. Undefined if using in server-side (NodeJS). + */ + readonly userConsent?: SplitIO.ConsentStatus; + } /** - * Constant object containing the SDK events for you to use. - * @property {EventConsts} Event + * Log levels. + * @typedef {string} LogLevel */ - Event: EventConsts; + type LogLevel = 'DEBUG' | 'INFO' | 'WARN' | 'ERROR' | 'NONE'; /** - * Returns a promise that resolves once the SDK has finished loading (`SDK_READY` event emitted) or rejected if the SDK has timedout (`SDK_READY_TIMED_OUT` event emitted). - * As it's meant to provide similar flexibility to the event approach, given that the SDK might be eventually ready after a timeout event, the `ready` method will return a resolved promise once the SDK is ready. - * - * Caveats: the method was designed to avoid an unhandled Promise rejection if the rejection case is not handled, so that `onRejected` handler is optional when using promises. - * However, when using async/await syntax, the rejection should be explicitly propagated like in the following example: - * ``` - * try { - * await client.ready().catch((e) => { throw e; }); - * // SDK is ready - * } catch(e) { - * // SDK has timedout - * } - * ``` - * - * @function ready - * @returns {Promise} + * Logger API + * @interface ILoggerAPI */ - ready(): Promise; -} -/** - * Common definitions between clients for different environments interface. - * @interface IBasicClient - * @extends IStatusInterface - */ -interface IBasicClient extends IStatusInterface { + interface ILoggerAPI { + /** + * Enables SDK logging to the console. + * @function enable + * @returns {void} + */ + enable(): void; + /** + * Disables SDK logging. + * @function disable + * @returns {void} + */ + disable(): void; + /** + * Sets a log level for the SDK logs. + * @function setLogLevel + * @returns {void} + */ + setLogLevel(logLevel: LogLevel): void; + /** + * Log level constants. Use this to pass them to setLogLevel function. + */ + LogLevel: { + [level in LogLevel]: LogLevel; + }; + } /** - * Destroys the client instance. - * - * In 'standalone' and 'partial consumer' modes, this method will flush any pending impressions and events. - * In 'standalone' mode, it also stops the synchronization of feature flag definitions with the backend. - * In 'consumer' and 'partial consumer' modes, this method also disconnects the SDK from the Pluggable storage. - * - * @function destroy - * @returns {Promise} A promise that resolves once the client is destroyed. + * User consent API + * @interface IUserConsentAPI */ - destroy(): Promise; -} -/** - * Common definitions between SDK instances for different environments interface. - * @interface IBasicSDK - */ -interface IBasicSDK { + interface IUserConsentAPI { + /** + * Sets or updates the user consent status. Possible values are `true` and `false`, which represent user consent `'GRANTED'` and `'DECLINED'` respectively. + * - `true ('GRANTED')`: the user has granted consent for tracking events and impressions. The SDK will send them to Split cloud. + * - `false ('DECLINED')`: the user has declined consent for tracking events and impressions. The SDK will not send them to Split cloud. + * + * NOTE: calling this method updates the user consent at a factory level, affecting all clients of the same factory. + * + * @function setStatus + * @param {boolean} userConsent The user consent status, true for 'GRANTED' and false for 'DECLINED'. + * @returns {boolean} Whether the provided param is a valid value (i.e., a boolean value) or not. + */ + setStatus(userConsent: boolean): boolean; + /** + * Gets the user consent status. + * + * @function getStatus + * @returns {ConsentStatus} The user consent status. + */ + getStatus(): SplitIO.ConsentStatus; + /** + * Consent status constants. Use this to compare with the getStatus function result. + */ + Status: { + [status in SplitIO.ConsentStatus]: SplitIO.ConsentStatus; + }; + } /** - * Current settings of the SDK instance. - * @property settings + * Common API for entities that expose status handlers. + * @interface IStatusInterface + * @extends EventEmitter */ - settings: ISettings; + interface IStatusInterface extends EventEmitter { + /** + * Constant object containing the SDK events for you to use. + * @property {EventConsts} Event + */ + Event: EventConsts; + /** + * Returns a promise that resolves once the SDK has finished loading (`SDK_READY` event emitted) or rejected if the SDK has timedout (`SDK_READY_TIMED_OUT` event emitted). + * As it's meant to provide similar flexibility to the event approach, given that the SDK might be eventually ready after a timeout event, the `ready` method will return a resolved promise once the SDK is ready. + * + * Caveats: the method was designed to avoid an unhandled Promise rejection if the rejection case is not handled, so that `onRejected` handler is optional when using promises. + * However, when using async/await syntax, the rejection should be explicitly propagated like in the following example: + * ``` + * try { + * await client.ready().catch((e) => { throw e; }); + * // SDK is ready + * } catch(e) { + * // SDK has timedout + * } + * ``` + * + * @function ready + * @returns {Promise} + */ + ready(): Promise; + } /** - * Logger API. - * @property Logger + * Common definitions between clients for different environments interface. + * @interface IBasicClient + * @extends IStatusInterface */ - Logger: ILoggerAPI; + interface IBasicClient extends IStatusInterface { + /** + * Destroys the client instance. + * + * In 'standalone' and 'partial consumer' modes, this method will flush any pending impressions and events. + * In 'standalone' mode, it also stops the synchronization of feature flag definitions with the backend. + * In 'consumer' and 'partial consumer' modes, this method also disconnects the SDK from the Pluggable storage. + * + * @function destroy + * @returns {Promise} A promise that resolves once the client is destroyed. + */ + destroy(): Promise; + } /** - * Destroys all the clients created by this factory. - * @function destroy - * @returns {Promise} + * Common definitions between SDK instances for different environments interface. + * @interface IBasicSDK */ - destroy(): Promise; -} -/****** Exposed namespace ******/ -/** - * Shared types and interfaces for `@splitsoftware` packages for usage when integrating JavaScript SDKs with TypeScript. - */ -declare namespace SplitIO { + interface IBasicSDK { + /** + * Current settings of the SDK instance. + * @property settings + */ + settings: ISettings; + /** + * Logger API. + * @property Logger + */ + Logger: ILoggerAPI; + /** + * Destroys all the clients created by this factory. + * @function destroy + * @returns {Promise} + */ + destroy(): Promise; + } /** * Feature flag treatment value, returned by getTreatment. * @typedef {string} Treatment @@ -380,25 +381,30 @@ declare namespace SplitIO { type MockedFeaturesMap = { [featureName: string]: string | TreatmentWithConfig; }; + /** + * Impression DTO generated by the SDK when processing evaluations. + * @typedef {Object} ImpressionDTO + */ + type ImpressionDTO = { + feature: string; + keyName: string; + treatment: string; + time: number; + bucketingKey?: string; + label: string; + changeNumber: number; + pt?: number; + } /** * Object with information about an impression. It contains the generated impression DTO as well as * complementary information around where and how it was generated in that way. * @typedef {Object} ImpressionData */ type ImpressionData = { - impression: { - feature: string; - keyName: string; - treatment: string; - time: number; - bucketingKey?: string; - label: string; - changeNumber: number; - pt?: number; - }; + impression: ImpressionDTO; attributes?: SplitIO.Attributes; - ip: string; - hostname: string; + ip: string | false; + hostname: string | false; sdkLanguageVersion: string; }; /** @@ -451,10 +457,10 @@ declare namespace SplitIO { defaultTreatment: string; }; /** - * A promise that resolves to a feature flag view. - * @typedef {Promise} SplitView + * A promise that resolves to a feature flag view or null if the feature flag is not found. + * @typedef {Promise} SplitViewAsync */ - type SplitViewAsync = Promise; + type SplitViewAsync = Promise; /** * An array containing the SplitIO.SplitView elements. */ @@ -478,15 +484,14 @@ declare namespace SplitIO { * Storage for synchronous (standalone) SDK. * Its interface details are not part of the public API. */ - type StorageSync = {}; + type StorageSync = any; /** * Storage builder for synchronous (standalone) SDK. - * By returning undefined, the SDK will use the default IN MEMORY storage. * Input parameter details are not part of the public API. */ type StorageSyncFactory = { readonly type: StorageType; - (params: {}): (StorageSync | undefined); + (params: any): (StorageSync | undefined); } /** * Configuration params for `InLocalStorage` @@ -503,14 +508,14 @@ declare namespace SplitIO { * Storage for asynchronous (consumer) SDK. * Its interface details are not part of the public API. */ - type StorageAsync = {} + type StorageAsync = any /** * Storage builder for asynchronous (consumer) SDK. * Input parameter details are not part of the public API. */ type StorageAsyncFactory = { - readonly type: 'PLUGGABLE'; - (params: {}): StorageAsync; + readonly type: StorageType; + (params: any): StorageAsync; } /** * Configuration params for `PluggableStorage` @@ -564,7 +569,7 @@ declare namespace SplitIO { * SDK integration instance. * Its interface details are not part of the public API. */ - type Integration = {}; + type Integration = any; /** * SDK integration factory. * By returning an integration, the SDK will queue events and impressions into it. @@ -572,7 +577,7 @@ declare namespace SplitIO { */ type IntegrationFactory = { readonly type: string; - (params: {}): (Integration | void); + (params: any): (Integration | void); } /** * A pair of user key and it's trafficType, required for tracking valid Split events. @@ -594,7 +599,7 @@ declare namespace SplitIO { properties?: Properties; trafficTypeName?: string; key?: string; - timestamp?: number; + timestamp: number; }; /** * Object representing the data sent by Split (events and impressions). From 0ff9901064dc27a79e17f3a8fdcae6356390b67e Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Mon, 28 Oct 2024 21:41:48 -0300 Subject: [PATCH 131/146] Fixed some types --- types/splitio.d.ts | 54 +++++++++++++++++++++++++--------------------- 1 file changed, 29 insertions(+), 25 deletions(-) diff --git a/types/splitio.d.ts b/types/splitio.d.ts index de3db1ee..76109338 100644 --- a/types/splitio.d.ts +++ b/types/splitio.d.ts @@ -268,7 +268,7 @@ interface IBasicSDK { } /****** Exposed namespace ******/ /** - * Shared types and interfaces for `@splitsoftware` packages for usage when integrating JavaScript SDKs with TypeScript. + * Shared types and interfaces for `@splitsoftware` packages, to support integrating JavaScript SDKs with TypeScript. */ declare namespace SplitIO { /** @@ -380,25 +380,30 @@ declare namespace SplitIO { type MockedFeaturesMap = { [featureName: string]: string | TreatmentWithConfig; }; + /** + * Impression DTO generated by the SDK when processing evaluations. + * @typedef {Object} ImpressionDTO + */ + type ImpressionDTO = { + feature: string; + keyName: string; + treatment: string; + time: number; + bucketingKey?: string; + label: string; + changeNumber: number; + pt?: number; + } /** * Object with information about an impression. It contains the generated impression DTO as well as * complementary information around where and how it was generated in that way. * @typedef {Object} ImpressionData */ type ImpressionData = { - impression: { - feature: string; - keyName: string; - treatment: string; - time: number; - bucketingKey?: string; - label: string; - changeNumber: number; - pt?: number; - }; + impression: ImpressionDTO; attributes?: SplitIO.Attributes; - ip: string; - hostname: string; + ip: string | false; + hostname: string | false; sdkLanguageVersion: string; }; /** @@ -451,10 +456,10 @@ declare namespace SplitIO { defaultTreatment: string; }; /** - * A promise that resolves to a feature flag view. - * @typedef {Promise} SplitView + * A promise that resolves to a feature flag view or null if the feature flag is not found. + * @typedef {Promise} SplitViewAsync */ - type SplitViewAsync = Promise; + type SplitViewAsync = Promise; /** * An array containing the SplitIO.SplitView elements. */ @@ -478,15 +483,14 @@ declare namespace SplitIO { * Storage for synchronous (standalone) SDK. * Its interface details are not part of the public API. */ - type StorageSync = {}; + type StorageSync = any; /** * Storage builder for synchronous (standalone) SDK. - * By returning undefined, the SDK will use the default IN MEMORY storage. * Input parameter details are not part of the public API. */ type StorageSyncFactory = { readonly type: StorageType; - (params: {}): (StorageSync | undefined); + (params: any): (StorageSync | undefined); } /** * Configuration params for `InLocalStorage` @@ -503,14 +507,14 @@ declare namespace SplitIO { * Storage for asynchronous (consumer) SDK. * Its interface details are not part of the public API. */ - type StorageAsync = {} + type StorageAsync = any /** * Storage builder for asynchronous (consumer) SDK. * Input parameter details are not part of the public API. */ type StorageAsyncFactory = { - readonly type: 'PLUGGABLE'; - (params: {}): StorageAsync; + readonly type: StorageType; + (params: any): StorageAsync; } /** * Configuration params for `PluggableStorage` @@ -564,7 +568,7 @@ declare namespace SplitIO { * SDK integration instance. * Its interface details are not part of the public API. */ - type Integration = {}; + type Integration = any; /** * SDK integration factory. * By returning an integration, the SDK will queue events and impressions into it. @@ -572,7 +576,7 @@ declare namespace SplitIO { */ type IntegrationFactory = { readonly type: string; - (params: {}): (Integration | void); + (params: any): (Integration | void); } /** * A pair of user key and it's trafficType, required for tracking valid Split events. @@ -594,7 +598,7 @@ declare namespace SplitIO { properties?: Properties; trafficTypeName?: string; key?: string; - timestamp?: number; + timestamp: number; }; /** * Object representing the data sent by Split (events and impressions). From 2e5df35dd02f6fe46af41d0362a153dd5ea4b5bd Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Mon, 28 Oct 2024 23:37:50 -0300 Subject: [PATCH 132/146] Polishing --- types/splitio.d.ts | 243 +++++++++++++++++++++++---------------------- 1 file changed, 124 insertions(+), 119 deletions(-) diff --git a/types/splitio.d.ts b/types/splitio.d.ts index 76109338..2dd06839 100644 --- a/types/splitio.d.ts +++ b/types/splitio.d.ts @@ -489,7 +489,7 @@ declare namespace SplitIO { * Input parameter details are not part of the public API. */ type StorageSyncFactory = { - readonly type: StorageType; + readonly type: BrowserStorage; (params: any): (StorageSync | undefined); } /** @@ -513,7 +513,7 @@ declare namespace SplitIO { * Input parameter details are not part of the public API. */ type StorageAsyncFactory = { - readonly type: StorageType; + readonly type: PluggableAsyncStorage; (params: any): StorageAsync; } /** @@ -542,6 +542,11 @@ declare namespace SplitIO { * @typedef {string} NodeAsyncStorage */ type NodeAsyncStorage = 'REDIS'; + /** + * Asynchronous storages valid types for NodeJS. + * @typedef {string} NodeAsyncStorage + */ + type PluggableAsyncStorage = 'PLUGGABLE'; /** * Storage valid types for the browser. * @typedef {string} BrowserStorage @@ -1147,26 +1152,28 @@ declare namespace SplitIO { }; } /** - * Common settings interface for SDK instances on NodeJS. - * @interface INodeBasicSettings + * Settings interface for JavaScript SDK instances created on the browser, with client-side API and synchronous storage. + * + * @interface IBrowserSettings * @extends ISharedSettings + * @see {@link https://help.split.io/hc/en-us/articles/360020448791-JavaScript-SDK#configuration} */ - interface INodeBasicSettings extends ISharedSettings { + interface IBrowserSettings extends ISharedSettings { /** - * SDK Startup settings for NodeJS. + * SDK Startup settings for the Browser. * @property {Object} startup */ startup?: { /** * Maximum amount of time used before notify a timeout. * @property {number} readyTimeout - * @default 15 + * @default 10 */ readyTimeout?: number; /** * Time to wait for a request before the SDK is ready. If this time expires, JS Sdk will retry 'retriesOnFailureBeforeReady' times before notifying its failure to be 'ready'. * @property {number} requestTimeoutBeforeReady - * @default 15 + * @default 5 */ requestTimeoutBeforeReady?: number; /** @@ -1180,7 +1187,7 @@ declare namespace SplitIO { * to better control on browsers. This number defines that window before the first events push. * * @property {number} eventsFirstPushWindow - * @default 0 + * @default 10 */ eventsFirstPushWindow?: number; }; @@ -1198,7 +1205,7 @@ declare namespace SplitIO { /** * The SDK sends information on who got what treatment at what time back to Split servers to power analytics. This parameter controls how often this data is sent to Split servers. The parameter should be in seconds. * @property {number} impressionsRefreshRate - * @default 300 + * @default 60 */ impressionsRefreshRate?: number; /** @@ -1242,7 +1249,7 @@ declare namespace SplitIO { eventsQueueSize?: number; /** * For mocking/testing only. The SDK will refresh the features mocked data when mode is set to "localhost" by defining the key. - * For more information see {@link https://help.split.io/hc/en-us/articles/360020564931-Node-js-SDK#localhost-mode} + * For more information see {@link https://help.split.io/hc/en-us/articles/360020448791-JavaScript-SDK#localhost-mode} * @property {number} offlineRefreshRate * @default 15 */ @@ -1256,7 +1263,7 @@ declare namespace SplitIO { pushRetryBackoffBase?: number; }; /** - * SDK Core settings for NodeJS. + * SDK Core settings for the browser. * @property {Object} core */ core: { @@ -1267,34 +1274,35 @@ declare namespace SplitIO { */ authorizationKey: string; /** - * Disable labels from being sent to Split backend. Labels may contain sensitive information. - * @property {boolean} labelsEnabled - * @default true + * Customer identifier. Whatever this means to you. + * @see {@link https://help.split.io/hc/en-us/articles/360019916311-Traffic-type} + * @property {SplitKey} key */ - labelsEnabled?: boolean; + key: SplitKey; /** - * Disable machine IP and Name from being sent to Split backend. - * @property {boolean} IPAddressesEnabled + * Disable labels from being sent to Split backend. Labels may contain sensitive information. + * @property {boolean} labelsEnabled * @default true */ - IPAddressesEnabled?: boolean; + labelsEnabled?: boolean }; /** - * Defines which kind of storage we should instantiate. + * Mocked features map. For testing purposes only. For using this you should specify "localhost" as authorizationKey on core settings. + * @see {@link https://help.split.io/hc/en-us/articles/360020448791-JavaScript-SDK#localhost-mode} + */ + features?: MockedFeaturesMap; + /** + * Defines which kind of storage we can instantiate on the browser. + * Possible storage types are 'MEMORY', which is the default, and 'LOCALSTORAGE'. * @property {Object} storage */ storage?: { /** * Storage type to be instantiated by the SDK. - * @property {StorageType} type + * @property {BrowserStorage} type * @default 'MEMORY' */ - type?: StorageType; - /** - * Options to be passed to the selected storage. - * @property {Object} options - */ - options?: Object; + type?: BrowserStorage; /** * Optional prefix to prevent any kind of data collision between SDK versions. * @property {string} prefix @@ -1303,43 +1311,77 @@ declare namespace SplitIO { prefix?: string; }; /** - * The SDK mode. Possible values are "standalone", which is the default when using a synchronous storage, like 'MEMORY' and 'LOCALSTORAGE', - * and "consumer", which must be set when using an asynchronous storage, like 'REDIS'. For "localhost" mode, use "localhost" as authorizationKey. - * @property {SDKMode} mode - * @default 'standalone' + * List of URLs that the SDK will use as base for it's synchronization functionalities, applicable only when running as standalone. + * Do not change these settings unless you're working an advanced use case, like connecting to the Split proxy. + * @property {Object} urls */ - mode?: SDKMode; + urls?: UrlSettings; /** - * Mocked features file path. For testing purposes only. For using this you should specify "localhost" as authorizationKey on core settings. - * @see {@link https://help.split.io/hc/en-us/articles/360020564931-Node-js-SDK#localhost-mode} - * @property {MockedFeaturesFilePath} features - * @default '$HOME/.split' + * User consent status. Possible values are `'GRANTED'`, which is the default, `'DECLINED'` or `'UNKNOWN'`. + * - `'GRANTED'`: the user grants consent for tracking events and impressions. The SDK sends them to Split cloud. + * - `'DECLINED'`: the user declines consent for tracking events and impressions. The SDK does not send them to Split cloud. + * - `'UNKNOWN'`: the user neither grants nor declines consent for tracking events and impressions. The SDK tracks them in its internal storage, and eventually either sends + * them or not if the consent status is updated to 'GRANTED' or 'DECLINED' respectively. The status can be updated at any time with the `UserConsent.setStatus` factory method. + * + * @typedef {string} userConsent + * @default 'GRANTED' */ - features?: SplitIO.MockedFeaturesFilePath; + userConsent?: ConsentStatus; + sync?: ISharedSettings['sync'] & { + /** + * Custom options object for HTTP(S) requests in the Browser. + * If provided, this object is merged with the options object passed by the SDK for EventSource and Fetch calls. + */ + requestOptions?: { + /** + * Custom function called before each request, allowing you to add or update headers in SDK HTTP requests. + * Some headers, such as `SplitSDKVersion`, are required by the SDK and cannot be overridden. + * To pass multiple headers with the same name, combine their values into a single line, separated by commas. Example: `{ 'Authorization': 'value1, value2' }` + * Or provide keys with different case since headers are case-insensitive. Example: `{ 'authorization': 'value1', 'Authorization': 'value2' }` + * + * NOTE: to pass custom headers to the streaming connection in Browser, you should polyfill the `window.EventSource` object with a library that supports headers, + * like https://www.npmjs.com/package/event-source-polyfill, since native EventSource does not support them and will be ignored. + * + * @property getHeaderOverrides + * @default undefined + * + * @param context - The context for the request. + * @param context.headers - The current headers in the request. + * @returns A set of headers to be merged with the current headers. + * + * @example + * const getHeaderOverrides = (context) => { + * return { + * 'Authorization': context.headers['Authorization'] + ', other-value', + * 'custom-header': 'custom-value' + * }; + * }; + */ + getHeaderOverrides?: (context: { headers: Record }) => Record; + }; + }; } /** - * Settings interface for JavaScript SDK instances created on the browser, with client-side API and synchronous storage. - * - * @interface IBrowserSettings + * Common settings interface for SDK instances on NodeJS. + * @interface INodeBasicSettings * @extends ISharedSettings - * @see {@link https://help.split.io/hc/en-us/articles/360020448791-JavaScript-SDK#configuration} */ - interface IBrowserSettings extends ISharedSettings { + interface INodeBasicSettings extends ISharedSettings { /** - * SDK Startup settings for the Browser. + * SDK Startup settings for NodeJS. * @property {Object} startup */ startup?: { /** * Maximum amount of time used before notify a timeout. * @property {number} readyTimeout - * @default 10 + * @default 15 */ readyTimeout?: number; /** * Time to wait for a request before the SDK is ready. If this time expires, JS Sdk will retry 'retriesOnFailureBeforeReady' times before notifying its failure to be 'ready'. * @property {number} requestTimeoutBeforeReady - * @default 5 + * @default 15 */ requestTimeoutBeforeReady?: number; /** @@ -1353,7 +1395,7 @@ declare namespace SplitIO { * to better control on browsers. This number defines that window before the first events push. * * @property {number} eventsFirstPushWindow - * @default 10 + * @default 0 */ eventsFirstPushWindow?: number; }; @@ -1371,7 +1413,7 @@ declare namespace SplitIO { /** * The SDK sends information on who got what treatment at what time back to Split servers to power analytics. This parameter controls how often this data is sent to Split servers. The parameter should be in seconds. * @property {number} impressionsRefreshRate - * @default 60 + * @default 300 */ impressionsRefreshRate?: number; /** @@ -1415,7 +1457,7 @@ declare namespace SplitIO { eventsQueueSize?: number; /** * For mocking/testing only. The SDK will refresh the features mocked data when mode is set to "localhost" by defining the key. - * For more information see {@link https://help.split.io/hc/en-us/articles/360020448791-JavaScript-SDK#localhost-mode} + * For more information see {@link https://help.split.io/hc/en-us/articles/360020564931-Node-js-SDK#localhost-mode} * @property {number} offlineRefreshRate * @default 15 */ @@ -1429,7 +1471,7 @@ declare namespace SplitIO { pushRetryBackoffBase?: number; }; /** - * SDK Core settings for the browser. + * SDK Core settings for NodeJS. * @property {Object} core */ core: { @@ -1439,36 +1481,35 @@ declare namespace SplitIO { * @property {string} authorizationKey */ authorizationKey: string; - /** - * Customer identifier. Whatever this means to you. - * @see {@link https://help.split.io/hc/en-us/articles/360019916311-Traffic-type} - * @property {SplitKey} key - */ - key: SplitKey; /** * Disable labels from being sent to Split backend. Labels may contain sensitive information. * @property {boolean} labelsEnabled * @default true */ - labelsEnabled?: boolean + labelsEnabled?: boolean; + /** + * Disable machine IP and Name from being sent to Split backend. + * @property {boolean} IPAddressesEnabled + * @default true + */ + IPAddressesEnabled?: boolean; }; /** - * Mocked features map. For testing purposes only. For using this you should specify "localhost" as authorizationKey on core settings. - * @see {@link https://help.split.io/hc/en-us/articles/360020448791-JavaScript-SDK#localhost-mode} - */ - features?: MockedFeaturesMap; - /** - * Defines which kind of storage we can instantiate on the browser. - * Possible storage types are 'MEMORY', which is the default, and 'LOCALSTORAGE'. + * Defines which kind of storage we should instantiate. * @property {Object} storage */ storage?: { /** * Storage type to be instantiated by the SDK. - * @property {BrowserStorage} type + * @property {StorageType} type * @default 'MEMORY' */ - type?: BrowserStorage; + type?: StorageType; + /** + * Options to be passed to the selected storage. + * @property {Object} options + */ + options?: Object; /** * Optional prefix to prevent any kind of data collision between SDK versions. * @property {string} prefix @@ -1477,55 +1518,19 @@ declare namespace SplitIO { prefix?: string; }; /** - * List of URLs that the SDK will use as base for it's synchronization functionalities, applicable only when running as standalone. - * Do not change these settings unless you're working an advanced use case, like connecting to the Split proxy. - * @property {Object} urls + * The SDK mode. Possible values are "standalone", which is the default when using a synchronous storage, like 'MEMORY' and 'LOCALSTORAGE', + * and "consumer", which must be set when using an asynchronous storage, like 'REDIS'. For "localhost" mode, use "localhost" as authorizationKey. + * @property {SDKMode} mode + * @default 'standalone' */ - urls?: UrlSettings; + mode?: SDKMode; /** - * User consent status. Possible values are `'GRANTED'`, which is the default, `'DECLINED'` or `'UNKNOWN'`. - * - `'GRANTED'`: the user grants consent for tracking events and impressions. The SDK sends them to Split cloud. - * - `'DECLINED'`: the user declines consent for tracking events and impressions. The SDK does not send them to Split cloud. - * - `'UNKNOWN'`: the user neither grants nor declines consent for tracking events and impressions. The SDK tracks them in its internal storage, and eventually either sends - * them or not if the consent status is updated to 'GRANTED' or 'DECLINED' respectively. The status can be updated at any time with the `UserConsent.setStatus` factory method. - * - * @typedef {string} userConsent - * @default 'GRANTED' + * Mocked features file path. For testing purposes only. For using this you should specify "localhost" as authorizationKey on core settings. + * @see {@link https://help.split.io/hc/en-us/articles/360020564931-Node-js-SDK#localhost-mode} + * @property {MockedFeaturesFilePath} features + * @default '$HOME/.split' */ - userConsent?: ConsentStatus; - sync?: ISharedSettings['sync'] & { - /** - * Custom options object for HTTP(S) requests in the Browser. - * If provided, this object is merged with the options object passed by the SDK for EventSource and Fetch calls. - */ - requestOptions?: { - /** - * Custom function called before each request, allowing you to add or update headers in SDK HTTP requests. - * Some headers, such as `SplitSDKVersion`, are required by the SDK and cannot be overridden. - * To pass multiple headers with the same name, combine their values into a single line, separated by commas. Example: `{ 'Authorization': 'value1, value2' }` - * Or provide keys with different case since headers are case-insensitive. Example: `{ 'authorization': 'value1', 'Authorization': 'value2' }` - * - * NOTE: to pass custom headers to the streaming connection in Browser, you should polyfill the `window.EventSource` object with a library that supports headers, - * like https://www.npmjs.com/package/event-source-polyfill, since native EventSource does not support them and will be ignored. - * - * @property getHeaderOverrides - * @default undefined - * - * @param context - The context for the request. - * @param context.headers - The current headers in the request. - * @returns A set of headers to be merged with the current headers. - * - * @example - * const getHeaderOverrides = (context) => { - * return { - * 'Authorization': context.headers['Authorization'] + ', other-value', - * 'custom-header': 'custom-value' - * }; - * }; - */ - getHeaderOverrides?: (context: { headers: Record }) => Record; - }; - }; + features?: SplitIO.MockedFeaturesFilePath; } /** * Settings interface for JavaScript SDK instances created on NodeJS, with server-side API and synchronous storage. @@ -1636,6 +1641,14 @@ declare namespace SplitIO { * @see {@link https://help.split.io/hc/en-us/articles/360020564931-Node-js-SDK#configuration} */ interface INodeAsyncSettings extends INodeBasicSettings { + /** + * The SDK mode. When using 'REDIS' storage type, the only possible value is "consumer", which is required. + * + * @see {@link https://help.split.io/hc/en-us/articles/360020564931-Node-js-SDK#state-sharing-redis-integration} + * + * @property {'consumer'} mode + */ + mode: 'consumer'; /** * Defines which kind of async storage we can instantiate on NodeJS for 'consumer' mode. * The only possible storage type is 'REDIS'. @@ -1717,14 +1730,6 @@ declare namespace SplitIO { */ prefix?: string; }; - /** - * The SDK mode. When using 'REDIS' storage type, the only possible value is "consumer", which is required. - * - * @see {@link https://help.split.io/hc/en-us/articles/360020564931-Node-js-SDK#state-sharing-redis-integration} - * - * @property {'consumer'} mode - */ - mode: 'consumer'; } /** * This represents the interface for the SDK instance with synchronous storage and client-side API, From e40a824f70227cc47228f938a4d4d6388a6648c3 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Mon, 28 Oct 2024 23:37:50 -0300 Subject: [PATCH 133/146] Polishing --- types/splitio.d.ts | 234 ++++++++++++++++++++++----------------------- 1 file changed, 117 insertions(+), 117 deletions(-) diff --git a/types/splitio.d.ts b/types/splitio.d.ts index 76109338..805f533f 100644 --- a/types/splitio.d.ts +++ b/types/splitio.d.ts @@ -1147,26 +1147,28 @@ declare namespace SplitIO { }; } /** - * Common settings interface for SDK instances on NodeJS. - * @interface INodeBasicSettings + * Settings interface for JavaScript SDK instances created on the browser, with client-side API and synchronous storage. + * + * @interface IBrowserSettings * @extends ISharedSettings + * @see {@link https://help.split.io/hc/en-us/articles/360020448791-JavaScript-SDK#configuration} */ - interface INodeBasicSettings extends ISharedSettings { + interface IBrowserSettings extends ISharedSettings { /** - * SDK Startup settings for NodeJS. + * SDK Startup settings for the Browser. * @property {Object} startup */ startup?: { /** * Maximum amount of time used before notify a timeout. * @property {number} readyTimeout - * @default 15 + * @default 10 */ readyTimeout?: number; /** * Time to wait for a request before the SDK is ready. If this time expires, JS Sdk will retry 'retriesOnFailureBeforeReady' times before notifying its failure to be 'ready'. * @property {number} requestTimeoutBeforeReady - * @default 15 + * @default 5 */ requestTimeoutBeforeReady?: number; /** @@ -1180,7 +1182,7 @@ declare namespace SplitIO { * to better control on browsers. This number defines that window before the first events push. * * @property {number} eventsFirstPushWindow - * @default 0 + * @default 10 */ eventsFirstPushWindow?: number; }; @@ -1198,7 +1200,7 @@ declare namespace SplitIO { /** * The SDK sends information on who got what treatment at what time back to Split servers to power analytics. This parameter controls how often this data is sent to Split servers. The parameter should be in seconds. * @property {number} impressionsRefreshRate - * @default 300 + * @default 60 */ impressionsRefreshRate?: number; /** @@ -1242,7 +1244,7 @@ declare namespace SplitIO { eventsQueueSize?: number; /** * For mocking/testing only. The SDK will refresh the features mocked data when mode is set to "localhost" by defining the key. - * For more information see {@link https://help.split.io/hc/en-us/articles/360020564931-Node-js-SDK#localhost-mode} + * For more information see {@link https://help.split.io/hc/en-us/articles/360020448791-JavaScript-SDK#localhost-mode} * @property {number} offlineRefreshRate * @default 15 */ @@ -1256,7 +1258,7 @@ declare namespace SplitIO { pushRetryBackoffBase?: number; }; /** - * SDK Core settings for NodeJS. + * SDK Core settings for the browser. * @property {Object} core */ core: { @@ -1267,34 +1269,35 @@ declare namespace SplitIO { */ authorizationKey: string; /** - * Disable labels from being sent to Split backend. Labels may contain sensitive information. - * @property {boolean} labelsEnabled - * @default true + * Customer identifier. Whatever this means to you. + * @see {@link https://help.split.io/hc/en-us/articles/360019916311-Traffic-type} + * @property {SplitKey} key */ - labelsEnabled?: boolean; + key: SplitKey; /** - * Disable machine IP and Name from being sent to Split backend. - * @property {boolean} IPAddressesEnabled + * Disable labels from being sent to Split backend. Labels may contain sensitive information. + * @property {boolean} labelsEnabled * @default true */ - IPAddressesEnabled?: boolean; + labelsEnabled?: boolean }; /** - * Defines which kind of storage we should instantiate. + * Mocked features map. For testing purposes only. For using this you should specify "localhost" as authorizationKey on core settings. + * @see {@link https://help.split.io/hc/en-us/articles/360020448791-JavaScript-SDK#localhost-mode} + */ + features?: MockedFeaturesMap; + /** + * Defines which kind of storage we can instantiate on the browser. + * Possible storage types are 'MEMORY', which is the default, and 'LOCALSTORAGE'. * @property {Object} storage */ storage?: { /** * Storage type to be instantiated by the SDK. - * @property {StorageType} type + * @property {BrowserStorage} type * @default 'MEMORY' */ - type?: StorageType; - /** - * Options to be passed to the selected storage. - * @property {Object} options - */ - options?: Object; + type?: BrowserStorage; /** * Optional prefix to prevent any kind of data collision between SDK versions. * @property {string} prefix @@ -1303,43 +1306,77 @@ declare namespace SplitIO { prefix?: string; }; /** - * The SDK mode. Possible values are "standalone", which is the default when using a synchronous storage, like 'MEMORY' and 'LOCALSTORAGE', - * and "consumer", which must be set when using an asynchronous storage, like 'REDIS'. For "localhost" mode, use "localhost" as authorizationKey. - * @property {SDKMode} mode - * @default 'standalone' + * List of URLs that the SDK will use as base for it's synchronization functionalities, applicable only when running as standalone. + * Do not change these settings unless you're working an advanced use case, like connecting to the Split proxy. + * @property {Object} urls */ - mode?: SDKMode; + urls?: UrlSettings; /** - * Mocked features file path. For testing purposes only. For using this you should specify "localhost" as authorizationKey on core settings. - * @see {@link https://help.split.io/hc/en-us/articles/360020564931-Node-js-SDK#localhost-mode} - * @property {MockedFeaturesFilePath} features - * @default '$HOME/.split' + * User consent status. Possible values are `'GRANTED'`, which is the default, `'DECLINED'` or `'UNKNOWN'`. + * - `'GRANTED'`: the user grants consent for tracking events and impressions. The SDK sends them to Split cloud. + * - `'DECLINED'`: the user declines consent for tracking events and impressions. The SDK does not send them to Split cloud. + * - `'UNKNOWN'`: the user neither grants nor declines consent for tracking events and impressions. The SDK tracks them in its internal storage, and eventually either sends + * them or not if the consent status is updated to 'GRANTED' or 'DECLINED' respectively. The status can be updated at any time with the `UserConsent.setStatus` factory method. + * + * @typedef {string} userConsent + * @default 'GRANTED' */ - features?: SplitIO.MockedFeaturesFilePath; + userConsent?: ConsentStatus; + sync?: ISharedSettings['sync'] & { + /** + * Custom options object for HTTP(S) requests in the Browser. + * If provided, this object is merged with the options object passed by the SDK for EventSource and Fetch calls. + */ + requestOptions?: { + /** + * Custom function called before each request, allowing you to add or update headers in SDK HTTP requests. + * Some headers, such as `SplitSDKVersion`, are required by the SDK and cannot be overridden. + * To pass multiple headers with the same name, combine their values into a single line, separated by commas. Example: `{ 'Authorization': 'value1, value2' }` + * Or provide keys with different case since headers are case-insensitive. Example: `{ 'authorization': 'value1', 'Authorization': 'value2' }` + * + * NOTE: to pass custom headers to the streaming connection in Browser, you should polyfill the `window.EventSource` object with a library that supports headers, + * like https://www.npmjs.com/package/event-source-polyfill, since native EventSource does not support them and will be ignored. + * + * @property getHeaderOverrides + * @default undefined + * + * @param context - The context for the request. + * @param context.headers - The current headers in the request. + * @returns A set of headers to be merged with the current headers. + * + * @example + * const getHeaderOverrides = (context) => { + * return { + * 'Authorization': context.headers['Authorization'] + ', other-value', + * 'custom-header': 'custom-value' + * }; + * }; + */ + getHeaderOverrides?: (context: { headers: Record }) => Record; + }; + }; } /** - * Settings interface for JavaScript SDK instances created on the browser, with client-side API and synchronous storage. - * - * @interface IBrowserSettings + * Common settings interface for SDK instances on NodeJS. + * @interface INodeBasicSettings * @extends ISharedSettings - * @see {@link https://help.split.io/hc/en-us/articles/360020448791-JavaScript-SDK#configuration} */ - interface IBrowserSettings extends ISharedSettings { + interface INodeBasicSettings extends ISharedSettings { /** - * SDK Startup settings for the Browser. + * SDK Startup settings for NodeJS. * @property {Object} startup */ startup?: { /** * Maximum amount of time used before notify a timeout. * @property {number} readyTimeout - * @default 10 + * @default 15 */ readyTimeout?: number; /** * Time to wait for a request before the SDK is ready. If this time expires, JS Sdk will retry 'retriesOnFailureBeforeReady' times before notifying its failure to be 'ready'. * @property {number} requestTimeoutBeforeReady - * @default 5 + * @default 15 */ requestTimeoutBeforeReady?: number; /** @@ -1353,7 +1390,7 @@ declare namespace SplitIO { * to better control on browsers. This number defines that window before the first events push. * * @property {number} eventsFirstPushWindow - * @default 10 + * @default 0 */ eventsFirstPushWindow?: number; }; @@ -1371,7 +1408,7 @@ declare namespace SplitIO { /** * The SDK sends information on who got what treatment at what time back to Split servers to power analytics. This parameter controls how often this data is sent to Split servers. The parameter should be in seconds. * @property {number} impressionsRefreshRate - * @default 60 + * @default 300 */ impressionsRefreshRate?: number; /** @@ -1415,7 +1452,7 @@ declare namespace SplitIO { eventsQueueSize?: number; /** * For mocking/testing only. The SDK will refresh the features mocked data when mode is set to "localhost" by defining the key. - * For more information see {@link https://help.split.io/hc/en-us/articles/360020448791-JavaScript-SDK#localhost-mode} + * For more information see {@link https://help.split.io/hc/en-us/articles/360020564931-Node-js-SDK#localhost-mode} * @property {number} offlineRefreshRate * @default 15 */ @@ -1429,7 +1466,7 @@ declare namespace SplitIO { pushRetryBackoffBase?: number; }; /** - * SDK Core settings for the browser. + * SDK Core settings for NodeJS. * @property {Object} core */ core: { @@ -1439,36 +1476,35 @@ declare namespace SplitIO { * @property {string} authorizationKey */ authorizationKey: string; - /** - * Customer identifier. Whatever this means to you. - * @see {@link https://help.split.io/hc/en-us/articles/360019916311-Traffic-type} - * @property {SplitKey} key - */ - key: SplitKey; /** * Disable labels from being sent to Split backend. Labels may contain sensitive information. * @property {boolean} labelsEnabled * @default true */ - labelsEnabled?: boolean + labelsEnabled?: boolean; + /** + * Disable machine IP and Name from being sent to Split backend. + * @property {boolean} IPAddressesEnabled + * @default true + */ + IPAddressesEnabled?: boolean; }; /** - * Mocked features map. For testing purposes only. For using this you should specify "localhost" as authorizationKey on core settings. - * @see {@link https://help.split.io/hc/en-us/articles/360020448791-JavaScript-SDK#localhost-mode} - */ - features?: MockedFeaturesMap; - /** - * Defines which kind of storage we can instantiate on the browser. - * Possible storage types are 'MEMORY', which is the default, and 'LOCALSTORAGE'. + * Defines which kind of storage we should instantiate. * @property {Object} storage */ storage?: { /** * Storage type to be instantiated by the SDK. - * @property {BrowserStorage} type + * @property {StorageType} type * @default 'MEMORY' */ - type?: BrowserStorage; + type?: StorageType; + /** + * Options to be passed to the selected storage. + * @property {Object} options + */ + options?: Object; /** * Optional prefix to prevent any kind of data collision between SDK versions. * @property {string} prefix @@ -1477,55 +1513,19 @@ declare namespace SplitIO { prefix?: string; }; /** - * List of URLs that the SDK will use as base for it's synchronization functionalities, applicable only when running as standalone. - * Do not change these settings unless you're working an advanced use case, like connecting to the Split proxy. - * @property {Object} urls + * The SDK mode. Possible values are "standalone", which is the default when using a synchronous storage, like 'MEMORY' and 'LOCALSTORAGE', + * and "consumer", which must be set when using an asynchronous storage, like 'REDIS'. For "localhost" mode, use "localhost" as authorizationKey. + * @property {SDKMode} mode + * @default 'standalone' */ - urls?: UrlSettings; + mode?: SDKMode; /** - * User consent status. Possible values are `'GRANTED'`, which is the default, `'DECLINED'` or `'UNKNOWN'`. - * - `'GRANTED'`: the user grants consent for tracking events and impressions. The SDK sends them to Split cloud. - * - `'DECLINED'`: the user declines consent for tracking events and impressions. The SDK does not send them to Split cloud. - * - `'UNKNOWN'`: the user neither grants nor declines consent for tracking events and impressions. The SDK tracks them in its internal storage, and eventually either sends - * them or not if the consent status is updated to 'GRANTED' or 'DECLINED' respectively. The status can be updated at any time with the `UserConsent.setStatus` factory method. - * - * @typedef {string} userConsent - * @default 'GRANTED' + * Mocked features file path. For testing purposes only. For using this you should specify "localhost" as authorizationKey on core settings. + * @see {@link https://help.split.io/hc/en-us/articles/360020564931-Node-js-SDK#localhost-mode} + * @property {MockedFeaturesFilePath} features + * @default '$HOME/.split' */ - userConsent?: ConsentStatus; - sync?: ISharedSettings['sync'] & { - /** - * Custom options object for HTTP(S) requests in the Browser. - * If provided, this object is merged with the options object passed by the SDK for EventSource and Fetch calls. - */ - requestOptions?: { - /** - * Custom function called before each request, allowing you to add or update headers in SDK HTTP requests. - * Some headers, such as `SplitSDKVersion`, are required by the SDK and cannot be overridden. - * To pass multiple headers with the same name, combine their values into a single line, separated by commas. Example: `{ 'Authorization': 'value1, value2' }` - * Or provide keys with different case since headers are case-insensitive. Example: `{ 'authorization': 'value1', 'Authorization': 'value2' }` - * - * NOTE: to pass custom headers to the streaming connection in Browser, you should polyfill the `window.EventSource` object with a library that supports headers, - * like https://www.npmjs.com/package/event-source-polyfill, since native EventSource does not support them and will be ignored. - * - * @property getHeaderOverrides - * @default undefined - * - * @param context - The context for the request. - * @param context.headers - The current headers in the request. - * @returns A set of headers to be merged with the current headers. - * - * @example - * const getHeaderOverrides = (context) => { - * return { - * 'Authorization': context.headers['Authorization'] + ', other-value', - * 'custom-header': 'custom-value' - * }; - * }; - */ - getHeaderOverrides?: (context: { headers: Record }) => Record; - }; - }; + features?: SplitIO.MockedFeaturesFilePath; } /** * Settings interface for JavaScript SDK instances created on NodeJS, with server-side API and synchronous storage. @@ -1636,6 +1636,14 @@ declare namespace SplitIO { * @see {@link https://help.split.io/hc/en-us/articles/360020564931-Node-js-SDK#configuration} */ interface INodeAsyncSettings extends INodeBasicSettings { + /** + * The SDK mode. When using 'REDIS' storage type, the only possible value is "consumer", which is required. + * + * @see {@link https://help.split.io/hc/en-us/articles/360020564931-Node-js-SDK#state-sharing-redis-integration} + * + * @property {'consumer'} mode + */ + mode: 'consumer'; /** * Defines which kind of async storage we can instantiate on NodeJS for 'consumer' mode. * The only possible storage type is 'REDIS'. @@ -1717,14 +1725,6 @@ declare namespace SplitIO { */ prefix?: string; }; - /** - * The SDK mode. When using 'REDIS' storage type, the only possible value is "consumer", which is required. - * - * @see {@link https://help.split.io/hc/en-us/articles/360020564931-Node-js-SDK#state-sharing-redis-integration} - * - * @property {'consumer'} mode - */ - mode: 'consumer'; } /** * This represents the interface for the SDK instance with synchronous storage and client-side API, From 1e64d1995e8aee8c7a381742b5e9b7632a5357e7 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Tue, 29 Oct 2024 11:16:40 -0300 Subject: [PATCH 134/146] Refactor Settings interfaces --- types/splitio.d.ts | 685 ++++++++++++++++----------------------------- 1 file changed, 235 insertions(+), 450 deletions(-) diff --git a/types/splitio.d.ts b/types/splitio.d.ts index 805f533f..bf68c4bc 100644 --- a/types/splitio.d.ts +++ b/types/splitio.d.ts @@ -123,6 +123,10 @@ interface ISettings { getHeaderOverrides?: (context: { headers: Record }) => Record; }; }; + readonly runtime: { + ip: string | false; + hostname: string | false; + }; readonly impressionListener?: SplitIO.IImpressionListener; /** * User consent status if using in client-side. Undefined if using in server-side (NodeJS). @@ -278,9 +282,9 @@ declare namespace SplitIO { type Treatment = string; /** * Feature flag treatment promise that resolves to actual treatment value. - * @typedef {Promise} AsyncTreatment + * @typedef {Promise} AsyncTreatment */ - type AsyncTreatment = Promise; + type AsyncTreatment = Promise; /** * An object with the treatments for a bulk of feature flags, returned by getTreatments. For example: * { @@ -308,7 +312,7 @@ declare namespace SplitIO { config: string | null; }; /** - * Feature flag treatment promise that resolves to actual treatment with config value. + * Feature flag treatment promise that resolves to actual SplitIO.TreatmentWithConfig object. * @typedef {Promise} AsyncTreatmentWithConfig */ type AsyncTreatmentWithConfig = Promise; @@ -685,50 +689,11 @@ declare namespace SplitIO { setLogLevel(logLevel: LogLevel): void; } /** - * Common settings interface for SDK instances created for client-side. + * Common settings properties. * - * @interface IClientSideBasicSettings + * @interface ISharedSettings */ - interface IClientSideBasicSettings { - /** - * SDK Core settings for client-side. - * @property {Object} core - */ - core: { - /** - * Your SDK key. - * @see {@link https://help.split.io/hc/en-us/articles/360019916211-API-keys} - * @property {string} authorizationKey - */ - authorizationKey: string; - /** - * Customer identifier. Whatever this means to you. - * @see {@link https://help.split.io/hc/en-us/articles/360019916311-Traffic-type} - * @property {SplitKey} key - */ - key: SplitKey; - /** - * Disable labels from being sent to Split backend. Labels may contain sensitive information. - * @property {boolean} labelsEnabled - * @default true - */ - labelsEnabled?: boolean; - }; - /** - * Boolean value to indicate whether the logger should be enabled or disabled by default, or a log level string or a Logger object. - * Passing a logger object is required to get descriptive log messages. Otherwise most logs will print with message codes. - * @see {@link https://help.split.io/hc/en-us/articles/360058730852-Browser-SDK#logging}. - * - * Examples: - * ``` - * config.debug = true - * config.debug = 'WARN' - * config.debug = ErrorLogger() - * ``` - * @property {boolean | LogLevel | ILogger} debug - * @default false - */ - debug?: boolean | LogLevel | SplitIO.ILogger; + interface ISharedSettings { /** * The impression listener, which is optional. Whatever you provide here needs to comply with the SplitIO.IImpressionListener interface, * which will check for the logImpression method. @@ -736,13 +701,6 @@ declare namespace SplitIO { * @default undefined */ impressionListener?: SplitIO.IImpressionListener; - /** - * Boolean flag to enable the streaming service as default synchronization mechanism. In the event of any issue with streaming, - * the SDK would fallback to the polling mechanism. If false, the SDK would poll for changes as usual without attempting to use streaming. - * @property {boolean} streamingEnabled - * @default true - */ - streamingEnabled?: boolean; /** * SDK synchronization settings. * @property {Object} sync @@ -758,7 +716,7 @@ declare namespace SplitIO { * ]` * @property {SplitIO.SplitFilter[]} splitFilters */ - splitFilters?: SplitIO.SplitFilter[] + splitFilters?: SplitIO.SplitFilter[]; /** * Impressions Collection Mode. Option to determine how impressions are going to be sent to Split servers. * Possible values are 'DEBUG', 'OPTIMIZED', and 'NONE'. @@ -770,16 +728,6 @@ declare namespace SplitIO { * @default 'OPTIMIZED' */ impressionsMode?: SplitIO.ImpressionsMode; - /** - * Controls the SDK continuous synchronization flags. - * - * When `true` a running SDK will process rollout plan updates performed on the UI (default). - * When false it'll just fetch all data upon init. - * - * @property {boolean} enabled - * @default true - */ - enabled?: boolean /** * Custom options object for HTTP(S) requests. * If provided, this object is merged with the options object passed by the SDK for EventSource and Fetch calls. @@ -814,11 +762,72 @@ declare namespace SplitIO { }; }; /** - * List of URLs that the SDK will use as base for it's synchronization functionalities, applicable only when running as standalone. + * List of URLs that the SDK will use as base for it's synchronization functionalities, applicable only when running as standalone and partial consumer modes. * Do not change these settings unless you're working an advanced use case, like connecting to the Split proxy. * @property {Object} urls */ urls?: UrlSettings; + } + /** + * Common settings properties for SDKs with synchronous API (standalone and localhost modes). + * + * @interface ISyncSharedSettings + * @extends ISharedSettings + */ + interface ISyncSharedSettings extends ISharedSettings { + /** + * The SDK mode. When using the default in-memory storage or `InLocalStorage` as storage, the only possible value is "standalone", which is the default. + * For "localhost" mode, use "localhost" as authorizationKey. + * + * @property {'standalone'} mode + * @default 'standalone' + */ + mode?: 'standalone'; + /** + * Boolean flag to enable the streaming service as default synchronization mechanism. In the event of any issue with streaming, + * the SDK would fallback to the polling mechanism. If false, the SDK would poll for changes as usual without attempting to use streaming. + * @property {boolean} streamingEnabled + * @default true + */ + streamingEnabled?: boolean; + /** + * SDK synchronization settings. + * @property {Object} sync + */ + sync?: ISharedSettings['sync'] & { + /** + * Controls the SDK continuous synchronization flags. + * + * When `true` a running SDK will process rollout plan updates performed on the UI (default). + * When false it'll just fetch all data upon init. + * + * @property {boolean} enabled + * @default true + */ + enabled?: boolean; + }; + } + /** + * Common settings properties for SDKs with pluggable configuration. + * + * @interface IPluggableSettings + */ + interface IPluggableSettings { + /** + * Boolean value to indicate whether the logger should be enabled or disabled by default, or a log level string or a Logger object. + * Passing a logger object is required to get descriptive log messages. Otherwise most logs will print with message codes. + * @see {@link https://help.split.io/hc/en-us/articles/360058730852-Browser-SDK#logging}. + * + * Examples: + * ``` + * config.debug = true + * config.debug = 'WARN' + * config.debug = ErrorLogger() + * ``` + * @property {boolean | LogLevel | ILogger} debug + * @default false + */ + debug?: boolean | LogLevel | SplitIO.ILogger; /** * Defines an optional list of factory functions used to instantiate SDK integrations. * @@ -827,6 +836,87 @@ declare namespace SplitIO { * @property {Object} integrations */ integrations?: IntegrationFactory[]; + } + /** + * Common settings properties for SDKs without pluggable configuration. + * + * @interface INonPluggableSettings + */ + interface INonPluggableSettings { + /** + * Boolean value to indicate whether the logger should be enabled or disabled, or a log level string. + * + * Examples: + * ``` + * config.debug = true + * config.debug = 'WARN' + * ``` + * @property {boolean | LogLevel} debug + * @default false + */ + debug?: boolean | LogLevel; + } + /** + * Common settings properties for server-side SDKs. + * + * @interface IServerSideSharedSettings + */ + interface IServerSideSharedSettings { + /** + * SDK Core settings for NodeJS. + * @property {Object} core + */ + core: { + /** + * Your SDK key. + * @see {@link https://help.split.io/hc/en-us/articles/360019916211-API-keys} + * @property {string} authorizationKey + */ + authorizationKey: string; + /** + * Disable labels from being sent to Split backend. Labels may contain sensitive information. + * @property {boolean} labelsEnabled + * @default true + */ + labelsEnabled?: boolean; + /** + * Disable machine IP and Name from being sent to Split backend. + * @property {boolean} IPAddressesEnabled + * @default true + */ + IPAddressesEnabled?: boolean; + }; + } + /** + * Common settings properties for client-side SDKs. + * + * @interface IClientSideSharedSettings + */ + interface IClientSideSharedSettings { + /** + * SDK Core settings for client-side. + * @property {Object} core + */ + core: { + /** + * Your SDK key. + * @see {@link https://help.split.io/hc/en-us/articles/360019916211-API-keys} + * @property {string} authorizationKey + */ + authorizationKey: string; + /** + * Customer identifier. Whatever this means to you. + * @see {@link https://help.split.io/hc/en-us/articles/360019916311-Traffic-type} + * @property {SplitKey} key + */ + key: SplitKey; + /** + * Disable labels from being sent to Split backend. Labels may contain sensitive information. + * @property {boolean} labelsEnabled + * @default true + */ + labelsEnabled?: boolean; + }; /** * User consent status. Possible values are `'GRANTED'`, which is the default, `'DECLINED'` or `'UNKNOWN'`. * - `'GRANTED'`: the user grants consent for tracking events and impressions. The SDK sends them to Split cloud. @@ -840,41 +930,16 @@ declare namespace SplitIO { userConsent?: ConsentStatus; } /** - * Settings interface for SDK instances created with client-side API and synchronous storage (e.g., Browser or React Native). + * Common settings properties for client-side standalone SDKs. * - * @interface IClientSideSettings - * @extends IClientSideBasicSettings - * @see {@link https://help.split.io/hc/en-us/articles/360058730852-Browser-SDK#configuration} + * @interface IClientSideSyncSharedSettings */ - interface IClientSideSettings extends IClientSideBasicSettings { - /** - * The SDK mode. When using the default in memory storage or `InLocalStorage` as storage, the only possible value is "standalone", which is the default. - * For "localhost" mode, use "localhost" as authorizationKey. - * - * @property {'standalone'} mode - * @default 'standalone' - */ - mode?: 'standalone'; + interface IClientSideSyncSharedSettings extends IClientSideSharedSettings, ISyncSharedSettings { /** * Mocked features map. For testing purposes only. For using this you should specify "localhost" as authorizationKey on core settings. - * @see {@link https://help.split.io/hc/en-us/articles/360058730852-Browser-SDK#localhost-mode} + * @see {@link https://help.split.io/hc/en-us/articles/360020448791-JavaScript-SDK#localhost-mode} */ features?: MockedFeaturesMap; - /** - * Defines the factory function to instantiate the storage. If not provided, the default IN MEMORY storage is used. - * - * NOTE: at the moment there are not storages to plug in React Native SDK, only `InLocalStorage` for Browser SDK. - * - * Example: - * ``` - * SplitFactory({ - * ... - * storage: InLocalStorage() - * }) - * ``` - * @property {Object} storage - */ - storage?: StorageSyncFactory; /** * SDK Startup settings. * @property {Object} startup @@ -887,7 +952,7 @@ declare namespace SplitIO { */ readyTimeout?: number; /** - * Time to wait for a request before the SDK is ready. If this time expires, JS Sdk will retry 'retriesOnFailureBeforeReady' times before notifying its failure to be 'ready'. + * Time to wait for a request before the SDK is ready. If this time expires, JS SDK will retry 'retriesOnFailureBeforeReady' times before notifying its failure to be 'ready'. * @property {number} requestTimeoutBeforeReady * @default 5 */ @@ -931,6 +996,13 @@ declare namespace SplitIO { * @default 30000 */ impressionsQueueSize?: number; + /** + * The SDK sends diagnostic metrics to Split servers. This parameters controls this metric flush period in seconds. + * @property {number} metricsRefreshRate + * @default 120 + * @deprecated This parameter is ignored now. Use `telemetryRefreshRate` instead. + */ + metricsRefreshRate?: number; /** * The SDK sends diagnostic metrics to Split servers. This parameters controls this metric flush period in seconds. * @property {number} telemetryRefreshRate @@ -958,7 +1030,7 @@ declare namespace SplitIO { eventsQueueSize?: number; /** * For mocking/testing only. The SDK will refresh the features mocked data when mode is set to "localhost" by defining the key. - * For more information see {@link https://help.split.io/hc/en-us/articles/360058730852-Browser-SDK#localhost-mode} + * For more information see {@link https://help.split.io/hc/en-us/articles/360020448791-JavaScript-SDK#localhost-mode} * @property {number} offlineRefreshRate * @default 15 */ @@ -970,10 +1042,34 @@ declare namespace SplitIO { * @default 1 */ pushRetryBackoffBase?: number; - } + }; + } + /** + * Settings interface for Browser SDK instances created with client-side API and synchronous storage (e.g., in-memory or local storage). + * + * @interface IClientSideSettings + * @extends IClientSideSyncSharedSettings + * @see {@link https://help.split.io/hc/en-us/articles/360058730852-Browser-SDK#configuration} + */ + interface IClientSideSettings extends IClientSideSyncSharedSettings, IPluggableSettings { + /** + * Defines the factory function to instantiate the storage. If not provided, the default in-memory storage is used. + * + * NOTE: Currently, there is no persistent storage option available for the React Native SDK; only `InLocalStorage` for the Browser SDK. + * + * Example: + * ``` + * SplitFactory({ + * ... + * storage: InLocalStorage() + * }) + * ``` + * @property {Object} storage + */ + storage?: StorageSyncFactory; } /** - * Settings interface for SDK instances created in React Native, with client-side API and synchronous storage. + * Settings interface for React Native SDK instances, with client-side API and synchronous storage. * * @interface IReactNativeSettings * @extends IClientSideSettings @@ -981,14 +1077,14 @@ declare namespace SplitIO { */ interface IReactNativeSettings extends IClientSideSettings { } /** - * Settings interface for SDK instances created with client-side API and asynchronous storage (e.g., serverless environments with a persistent storage). + * Settings interface for Browser SDK instances created with client-side API and asynchronous storage (e.g., serverless environments with a persistent storage). * If your storage is synchronous (by default we use memory, which is sync) use SplitIO.IClientSideSettings instead. * * @interface IClientSideAsyncSettings - * @extends IClientSideBasicSettings + * @extends IClientSideSharedSettings * @see {@link https://help.split.io/hc/en-us/articles/360058730852-Browser-SDK#sharing-state-with-a-pluggable-storage} */ - interface IClientSideAsyncSettings extends IClientSideBasicSettings { + interface IClientSideAsyncSettings extends IClientSideSharedSettings, ISharedSettings, IPluggableSettings { /** * The SDK mode. When using `PluggableStorage` as storage, the possible values are "consumer" and "consumer_partial". * @@ -1047,12 +1143,16 @@ declare namespace SplitIO { /** * The maximum number of impression items we want to queue. If we queue more values, it will trigger a flush and reset the timer. * If you use a 0 here, the queue will have no maximum size. + * + * NOTE: this param is ignored in 'consumer' mode. * @property {number} impressionsQueueSize * @default 30000 */ impressionsQueueSize?: number; /** * The SDK sends diagnostic metrics to Split servers. This parameters controls this metric flush period in seconds. + * + * NOTE: this param is ignored in 'consumer' mode. * @property {number} telemetryRefreshRate * @default 3600 */ @@ -1074,218 +1174,16 @@ declare namespace SplitIO { * @default 500 */ eventsQueueSize?: number; - } + }; } /** - * Common settings between Browser and NodeJS settings interface. - * @interface ISharedSettings + * Settings interface for JavaScript SDK instances created on the browser, with client-side API and synchronous storage (e.g., in-memory or local storage). + * + * @interface IBrowserSettings + * @extends IClientSideSyncSharedSettings + * @see {@link https://help.split.io/hc/en-us/articles/360020448791-JavaScript-SDK#configuration} */ - interface ISharedSettings { - /** - * Boolean value to indicate whether the logger should be enabled or disabled, or a log level string. - * - * Examples: - * ``` - * config.debug = true - * config.debug = 'WARN' - * ``` - * @property {boolean | LogLevel} debug - * @default false - */ - debug?: boolean | LogLevel; - /** - * The impression listener, which is optional. Whatever you provide here needs to comply with the SplitIO.IImpressionListener interface, - * which will check for the logImpression method. - * @property {IImpressionListener} impressionListener - * @default undefined - */ - impressionListener?: SplitIO.IImpressionListener; - /** - * Boolean flag to enable the streaming service as default synchronization mechanism. In the event of any issue with streaming, - * the SDK would fallback to the polling mechanism. If false, the SDK would poll for changes as usual without attempting to use streaming. - * @property {boolean} streamingEnabled - * @default true - */ - streamingEnabled?: boolean; - /** - * SDK synchronization settings. - * @property {Object} sync - */ - sync?: { - /** - * List of feature flag filters. These filters are used to fetch a subset of the feature flag definitions in your environment, in order to reduce the delay of the SDK to be ready. - * This configuration is only meaningful when the SDK is working in "standalone" mode. - * - * Example: - * `splitFilter: [ - * { type: 'byName', values: ['my_feature_flag_1', 'my_feature_flag_2'] }, // will fetch feature flags named 'my_feature_flag_1' and 'my_feature_flag_2' - * ]` - * @property {SplitIO.SplitFilter[]} splitFilters - */ - splitFilters?: SplitIO.SplitFilter[]; - /** - * Impressions Collection Mode. Option to determine how impressions are going to be sent to Split servers. - * Possible values are 'DEBUG', 'OPTIMIZED', and 'NONE'. - * - DEBUG: will send all the impressions generated (recommended only for debugging purposes). - * - OPTIMIZED: will send unique impressions to Split servers, avoiding a considerable amount of traffic that duplicated impressions could generate. - * - NONE: will send unique keys evaluated per feature to Split servers instead of full blown impressions, avoiding a considerable amount of traffic that impressions could generate. - * - * @property {string} impressionsMode - * @default 'OPTIMIZED' - */ - impressionsMode?: SplitIO.ImpressionsMode; - /** - * Controls the SDK continuous synchronization flags. - * - * When `true` a running SDK will process rollout plan updates performed on the UI (default). - * When false it'll just fetch all data upon init. - * - * @property {boolean} enabled - * @default true - */ - enabled?: boolean; - }; - } - /** - * Settings interface for JavaScript SDK instances created on the browser, with client-side API and synchronous storage. - * - * @interface IBrowserSettings - * @extends ISharedSettings - * @see {@link https://help.split.io/hc/en-us/articles/360020448791-JavaScript-SDK#configuration} - */ - interface IBrowserSettings extends ISharedSettings { - /** - * SDK Startup settings for the Browser. - * @property {Object} startup - */ - startup?: { - /** - * Maximum amount of time used before notify a timeout. - * @property {number} readyTimeout - * @default 10 - */ - readyTimeout?: number; - /** - * Time to wait for a request before the SDK is ready. If this time expires, JS Sdk will retry 'retriesOnFailureBeforeReady' times before notifying its failure to be 'ready'. - * @property {number} requestTimeoutBeforeReady - * @default 5 - */ - requestTimeoutBeforeReady?: number; - /** - * How many quick retries we will do while starting up the SDK. - * @property {number} retriesOnFailureBeforeReady - * @default 1 - */ - retriesOnFailureBeforeReady?: number; - /** - * For SDK posts the queued events data in bulks with a given rate, but the first push window is defined separately, - * to better control on browsers. This number defines that window before the first events push. - * - * @property {number} eventsFirstPushWindow - * @default 10 - */ - eventsFirstPushWindow?: number; - }; - /** - * SDK scheduler settings. - * @property {Object} scheduler - */ - scheduler?: { - /** - * The SDK polls Split servers for changes to feature flag definitions. This parameter controls this polling period in seconds. - * @property {number} featuresRefreshRate - * @default 60 - */ - featuresRefreshRate?: number; - /** - * The SDK sends information on who got what treatment at what time back to Split servers to power analytics. This parameter controls how often this data is sent to Split servers. The parameter should be in seconds. - * @property {number} impressionsRefreshRate - * @default 60 - */ - impressionsRefreshRate?: number; - /** - * The maximum number of impression items we want to queue. If we queue more values, it will trigger a flush and reset the timer. - * If you use a 0 here, the queue will have no maximum size. - * @property {number} impressionsQueueSize - * @default 30000 - */ - impressionsQueueSize?: number; - /** - * The SDK sends diagnostic metrics to Split servers. This parameters controls this metric flush period in seconds. - * @property {number} metricsRefreshRate - * @default 120 - * @deprecated This parameter is ignored now. Use `telemetryRefreshRate` instead. - */ - metricsRefreshRate?: number; - /** - * The SDK sends diagnostic metrics to Split servers. This parameters controls this metric flush period in seconds. - * @property {number} telemetryRefreshRate - * @default 3600 - */ - telemetryRefreshRate?: number; - /** - * The SDK polls Split servers for changes to segment definitions. This parameter controls this polling period in seconds. - * @property {number} segmentsRefreshRate - * @default 60 - */ - segmentsRefreshRate?: number; - /** - * The SDK posts the queued events data in bulks. This parameter controls the posting rate in seconds. - * @property {number} eventsPushRate - * @default 60 - */ - eventsPushRate?: number; - /** - * The maximum number of event items we want to queue. If we queue more values, it will trigger a flush and reset the timer. - * If you use a 0 here, the queue will have no maximum size. - * @property {number} eventsQueueSize - * @default 500 - */ - eventsQueueSize?: number; - /** - * For mocking/testing only. The SDK will refresh the features mocked data when mode is set to "localhost" by defining the key. - * For more information see {@link https://help.split.io/hc/en-us/articles/360020448791-JavaScript-SDK#localhost-mode} - * @property {number} offlineRefreshRate - * @default 15 - */ - offlineRefreshRate?: number; - /** - * When using streaming mode, seconds to wait before re attempting to connect for push notifications. - * Next attempts follow intervals in power of two: base seconds, base x 2 seconds, base x 4 seconds, ... - * @property {number} pushRetryBackoffBase - * @default 1 - */ - pushRetryBackoffBase?: number; - }; - /** - * SDK Core settings for the browser. - * @property {Object} core - */ - core: { - /** - * Your SDK key. - * @see {@link https://help.split.io/hc/en-us/articles/360019916211-API-keys} - * @property {string} authorizationKey - */ - authorizationKey: string; - /** - * Customer identifier. Whatever this means to you. - * @see {@link https://help.split.io/hc/en-us/articles/360019916311-Traffic-type} - * @property {SplitKey} key - */ - key: SplitKey; - /** - * Disable labels from being sent to Split backend. Labels may contain sensitive information. - * @property {boolean} labelsEnabled - * @default true - */ - labelsEnabled?: boolean - }; - /** - * Mocked features map. For testing purposes only. For using this you should specify "localhost" as authorizationKey on core settings. - * @see {@link https://help.split.io/hc/en-us/articles/360020448791-JavaScript-SDK#localhost-mode} - */ - features?: MockedFeaturesMap; + interface IBrowserSettings extends IClientSideSyncSharedSettings, INonPluggableSettings { /** * Defines which kind of storage we can instantiate on the browser. * Possible storage types are 'MEMORY', which is the default, and 'LOCALSTORAGE'. @@ -1305,63 +1203,16 @@ declare namespace SplitIO { */ prefix?: string; }; - /** - * List of URLs that the SDK will use as base for it's synchronization functionalities, applicable only when running as standalone. - * Do not change these settings unless you're working an advanced use case, like connecting to the Split proxy. - * @property {Object} urls - */ - urls?: UrlSettings; - /** - * User consent status. Possible values are `'GRANTED'`, which is the default, `'DECLINED'` or `'UNKNOWN'`. - * - `'GRANTED'`: the user grants consent for tracking events and impressions. The SDK sends them to Split cloud. - * - `'DECLINED'`: the user declines consent for tracking events and impressions. The SDK does not send them to Split cloud. - * - `'UNKNOWN'`: the user neither grants nor declines consent for tracking events and impressions. The SDK tracks them in its internal storage, and eventually either sends - * them or not if the consent status is updated to 'GRANTED' or 'DECLINED' respectively. The status can be updated at any time with the `UserConsent.setStatus` factory method. - * - * @typedef {string} userConsent - * @default 'GRANTED' - */ - userConsent?: ConsentStatus; - sync?: ISharedSettings['sync'] & { - /** - * Custom options object for HTTP(S) requests in the Browser. - * If provided, this object is merged with the options object passed by the SDK for EventSource and Fetch calls. - */ - requestOptions?: { - /** - * Custom function called before each request, allowing you to add or update headers in SDK HTTP requests. - * Some headers, such as `SplitSDKVersion`, are required by the SDK and cannot be overridden. - * To pass multiple headers with the same name, combine their values into a single line, separated by commas. Example: `{ 'Authorization': 'value1, value2' }` - * Or provide keys with different case since headers are case-insensitive. Example: `{ 'authorization': 'value1', 'Authorization': 'value2' }` - * - * NOTE: to pass custom headers to the streaming connection in Browser, you should polyfill the `window.EventSource` object with a library that supports headers, - * like https://www.npmjs.com/package/event-source-polyfill, since native EventSource does not support them and will be ignored. - * - * @property getHeaderOverrides - * @default undefined - * - * @param context - The context for the request. - * @param context.headers - The current headers in the request. - * @returns A set of headers to be merged with the current headers. - * - * @example - * const getHeaderOverrides = (context) => { - * return { - * 'Authorization': context.headers['Authorization'] + ', other-value', - * 'custom-header': 'custom-value' - * }; - * }; - */ - getHeaderOverrides?: (context: { headers: Record }) => Record; - }; - }; } /** - * Common settings interface for SDK instances on NodeJS. - * @interface INodeBasicSettings - * @extends ISharedSettings + * Settings interface for JavaScript SDK instances created on NodeJS, with server-side API and synchronous in-memory storage. + * If your storage is asynchronous (Redis for example) use SplitIO.INodeAsyncSettings instead. + * + * @interface INodeSettings + * @extends IServerSideSharedSettings + * @see {@link https://help.split.io/hc/en-us/articles/360020564931-Node-js-SDK#configuration} */ - interface INodeBasicSettings extends ISharedSettings { + interface INodeSettings extends IServerSideSharedSettings, ISyncSharedSettings, INonPluggableSettings { /** * SDK Startup settings for NodeJS. * @property {Object} startup @@ -1374,7 +1225,7 @@ declare namespace SplitIO { */ readyTimeout?: number; /** - * Time to wait for a request before the SDK is ready. If this time expires, JS Sdk will retry 'retriesOnFailureBeforeReady' times before notifying its failure to be 'ready'. + * Time to wait for a request before the SDK is ready. If this time expires, JS SDK will retry 'retriesOnFailureBeforeReady' times before notifying its failure to be 'ready'. * @property {number} requestTimeoutBeforeReady * @default 15 */ @@ -1465,60 +1316,6 @@ declare namespace SplitIO { */ pushRetryBackoffBase?: number; }; - /** - * SDK Core settings for NodeJS. - * @property {Object} core - */ - core: { - /** - * Your SDK key. - * @see {@link https://help.split.io/hc/en-us/articles/360019916211-API-keys} - * @property {string} authorizationKey - */ - authorizationKey: string; - /** - * Disable labels from being sent to Split backend. Labels may contain sensitive information. - * @property {boolean} labelsEnabled - * @default true - */ - labelsEnabled?: boolean; - /** - * Disable machine IP and Name from being sent to Split backend. - * @property {boolean} IPAddressesEnabled - * @default true - */ - IPAddressesEnabled?: boolean; - }; - /** - * Defines which kind of storage we should instantiate. - * @property {Object} storage - */ - storage?: { - /** - * Storage type to be instantiated by the SDK. - * @property {StorageType} type - * @default 'MEMORY' - */ - type?: StorageType; - /** - * Options to be passed to the selected storage. - * @property {Object} options - */ - options?: Object; - /** - * Optional prefix to prevent any kind of data collision between SDK versions. - * @property {string} prefix - * @default 'SPLITIO' - */ - prefix?: string; - }; - /** - * The SDK mode. Possible values are "standalone", which is the default when using a synchronous storage, like 'MEMORY' and 'LOCALSTORAGE', - * and "consumer", which must be set when using an asynchronous storage, like 'REDIS'. For "localhost" mode, use "localhost" as authorizationKey. - * @property {SDKMode} mode - * @default 'standalone' - */ - mode?: SDKMode; /** * Mocked features file path. For testing purposes only. For using this you should specify "localhost" as authorizationKey on core settings. * @see {@link https://help.split.io/hc/en-us/articles/360020564931-Node-js-SDK#localhost-mode} @@ -1526,22 +1323,6 @@ declare namespace SplitIO { * @default '$HOME/.split' */ features?: SplitIO.MockedFeaturesFilePath; - } - /** - * Settings interface for JavaScript SDK instances created on NodeJS, with server-side API and synchronous storage. - * If your storage is asynchronous (Redis for example) use SplitIO.INodeAsyncSettings instead. - * - * @interface INodeSettings - * @extends INodeBasicSettings - * @see {@link https://help.split.io/hc/en-us/articles/360020564931-Node-js-SDK#configuration} - */ - interface INodeSettings extends INodeBasicSettings { - /** - * List of URLs that the SDK will use as base for it's synchronization functionalities, applicable only when running as standalone. - * Do not change these settings unless you're working an advanced use case, like connecting to the Split proxy. - * @property {Object} urls - */ - urls?: UrlSettings; /** * Defines which kind of storage we can instantiate on NodeJS for 'standalone' mode. * The only possible storage type is 'MEMORY', which is the default. @@ -1561,15 +1342,7 @@ declare namespace SplitIO { */ prefix?: string; }; - /** - * The SDK mode. When using the default 'MEMORY' storage, the only possible value is "standalone", which is the default. - * For "localhost" mode, use "localhost" as authorizationKey. - * - * @property {'standalone'} mode - * @default 'standalone' - */ - mode?: 'standalone'; - sync?: INodeBasicSettings['sync'] & { + sync?: ISyncSharedSettings['sync'] & { /** * Custom options object for HTTP(S) requests in NodeJS. * If provided, this object is merged with the options object passed by the SDK for EventSource and Node-Fetch calls. @@ -1632,10 +1405,10 @@ declare namespace SplitIO { * If your storage is synchronous (by default we use memory, which is sync) use SplitIO.INodeSettings instead. * * @interface INodeAsyncSettings - * @extends INodeBasicSettings + * @extends IServerSideSharedSettings * @see {@link https://help.split.io/hc/en-us/articles/360020564931-Node-js-SDK#configuration} */ - interface INodeAsyncSettings extends INodeBasicSettings { + interface INodeAsyncSettings extends IServerSideSharedSettings, ISharedSettings, INonPluggableSettings { /** * The SDK mode. When using 'REDIS' storage type, the only possible value is "consumer", which is required. * @@ -1644,6 +1417,18 @@ declare namespace SplitIO { * @property {'consumer'} mode */ mode: 'consumer'; + /** + * SDK Startup settings for NodeJS. + * @property {Object} startup + */ + startup?: { + /** + * Maximum amount of time used before notify a timeout. + * @property {number} readyTimeout + * @default 15 + */ + readyTimeout?: number; + }; /** * Defines which kind of async storage we can instantiate on NodeJS for 'consumer' mode. * The only possible storage type is 'REDIS'. @@ -2034,7 +1819,7 @@ declare namespace SplitIO { } interface IClientWithAttributes extends IBasicClient { /** - * Add an attribute to client's in memory attributes storage. + * Add an attribute to client's in-memory attributes storage. * * @param {string} attributeName Attribute name * @param {AttributeType} attributeValue Attribute value @@ -2049,27 +1834,27 @@ declare namespace SplitIO { */ getAttribute(attributeName: string): AttributeType; /** - * Removes from client's in memory attributes storage the attribute with the given name. + * Removes from client's in-memory attributes storage the attribute with the given name. * * @param {string} attributeName * @returns {boolean} true if attribute was removed and false otherwise */ removeAttribute(attributeName: string): boolean; /** - * Add to client's in memory attributes storage the attributes in 'attributes'. + * Add to client's in-memory attributes storage the attributes in 'attributes'. * * @param {Attributes} attributes Object with attributes to store * @returns true if attributes were stored an false otherwise */ setAttributes(attributes: Attributes): boolean; /** - * Return all the attributes stored in client's in memory attributes storage. + * Return all the attributes stored in client's in-memory attributes storage. * * @returns {Attributes} returns all the stored attributes */ getAttributes(): Attributes; /** - * Remove all the stored attributes in the client's in memory attribute storage. + * Remove all the stored attributes in the client's in-memory attribute storage. * * @returns {boolean} true if all attribute were removed and false otherwise */ From 912f20ec553f646cddbd9879f8328154c5c081f2 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Tue, 29 Oct 2024 13:28:23 -0300 Subject: [PATCH 135/146] Add TSDoc linter rules --- .eslintrc | 14 + package-lock.json | 247 ++++- package.json | 3 +- src/__tests__/testUtils/index.ts | 8 +- src/integrations/pluggable.ts | 4 +- src/logger/sdkLogger.ts | 4 +- src/readiness/sdkReadinessManager.ts | 24 +- src/sdkClient/clientAttributesDecoration.ts | 18 +- src/sdkClient/clientCS.ts | 4 +- src/services/splitApi.ts | 26 +- src/services/splitHttpClient.ts | 4 +- src/storages/AbstractMySegmentsCacheSync.ts | 2 +- src/storages/AbstractSplitsCacheAsync.ts | 5 +- src/storages/AbstractSplitsCacheSync.ts | 5 +- src/storages/dataLoader.ts | 11 +- src/storages/findLatencyIndex.ts | 2 +- .../inLocalStorage/SplitsCacheInLocal.ts | 7 +- .../inMemory/AttributesCacheInMemory.ts | 20 +- src/storages/inMemory/EventsCacheInMemory.ts | 2 +- .../inMemory/ImpressionsCacheInMemory.ts | 2 +- src/storages/inMemory/InMemoryStorage.ts | 2 +- src/storages/inMemory/InMemoryStorageCS.ts | 2 +- .../inMemory/UniqueKeysCacheInMemoryCS.ts | 2 +- src/storages/inRedis/SplitsCacheInRedis.ts | 4 +- src/storages/inRedis/TelemetryCacheInRedis.ts | 6 +- .../inRedis/UniqueKeysCacheInRedis.ts | 2 +- .../pluggable/EventsCachePluggable.ts | 2 +- .../pluggable/ImpressionsCachePluggable.ts | 2 +- .../pluggable/SegmentsCachePluggable.ts | 2 +- .../pluggable/SplitsCachePluggable.ts | 6 +- .../pluggable/TelemetryCachePluggable.ts | 6 +- .../pluggable/UniqueKeysCachePluggable.ts | 2 +- src/storages/pluggable/inMemoryWrapper.ts | 2 +- src/storages/pluggable/index.ts | 2 +- src/storages/pluggable/wrapperAdapter.ts | 4 +- src/storages/types.ts | 99 +- .../splitsParser/splitsParserFromSettings.ts | 2 +- src/sync/offline/syncManagerOffline.ts | 2 +- .../polling/updaters/mySegmentsUpdater.ts | 6 +- .../polling/updaters/segmentChangesUpdater.ts | 16 +- .../polling/updaters/splitChangesUpdater.ts | 26 +- src/sync/streaming/AuthClient/index.ts | 4 +- src/sync/streaming/SSEClient/index.ts | 11 +- .../SSEHandler/NotificationKeeper.ts | 2 +- .../SSEHandler/NotificationParser.ts | 9 +- src/sync/streaming/SSEHandler/index.ts | 4 +- .../UpdateWorkers/MySegmentsUpdateWorker.ts | 6 +- .../UpdateWorkers/SegmentsUpdateWorker.ts | 4 +- .../UpdateWorkers/SplitsUpdateWorker.ts | 8 +- src/sync/streaming/parseUtils.ts | 26 +- src/sync/syncManagerOnline.ts | 4 +- src/sync/syncTask.ts | 8 +- src/trackers/eventTracker.ts | 4 +- src/trackers/impressionsTracker.ts | 10 +- src/trackers/strategy/strategyDebug.ts | 2 +- src/trackers/strategy/strategyNone.ts | 4 +- src/trackers/strategy/strategyOptimized.ts | 4 +- src/trackers/uniqueKeysTracker.ts | 6 +- src/types.ts | 7 +- src/utils/Backoff.ts | 4 - src/utils/Semver.ts | 2 +- src/utils/base64/index.ts | 4 +- src/utils/decompress/index.ts | 8 +- src/utils/lang/binarySearch.ts | 4 +- src/utils/murmur3/murmur3_128.ts | 2 +- src/utils/murmur3/murmur3_128_x86.ts | 2 +- src/utils/murmur3/murmur3_64.ts | 2 +- src/utils/murmur3/utfx.ts | 27 +- src/utils/promise/wrapper.ts | 4 +- src/utils/settingsValidation/index.ts | 4 +- .../settingsValidation/integrations/common.ts | 8 +- .../integrations/configurable.ts | 6 +- .../integrations/pluggable.ts | 4 +- .../logger/builtinLogger.ts | 2 +- .../settingsValidation/logger/commons.ts | 2 +- .../logger/pluggableLogger.ts | 2 +- src/utils/settingsValidation/splitFilters.ts | 19 +- .../settingsValidation/storage/storageCS.ts | 4 +- src/utils/settingsValidation/url.ts | 6 +- types/splitio.d.ts | 936 ++++++++---------- 80 files changed, 866 insertions(+), 918 deletions(-) diff --git a/.eslintrc b/.eslintrc index c9af6d19..be7b42d7 100644 --- a/.eslintrc +++ b/.eslintrc @@ -5,6 +5,7 @@ "parser": "@typescript-eslint/parser", "plugins": [ "@typescript-eslint", + "eslint-plugin-tsdoc", "import" ], @@ -90,6 +91,19 @@ "sourceType": "module" } }, + { + "files": ["types/**"], + "rules": { + "no-use-before-define": "off" + } + }, + { + // Enable TSDoc rules for TypeScript files, allowing the use of JSDoc in JS files. + "files": ["**/*.ts"], + "rules": { + "tsdoc/syntax": "warn" + } + }, // @TODO remove when moving InLocalStorage to js-browser { "files": ["src/storages/inLocalStorage/**/*.ts"], diff --git a/package-lock.json b/package-lock.json index da662c9d..589820a5 100644 --- a/package-lock.json +++ b/package-lock.json @@ -23,6 +23,7 @@ "eslint": "^8.48.0", "eslint-plugin-compat": "^6.0.1", "eslint-plugin-import": "^2.25.3", + "eslint-plugin-tsdoc": "^0.3.0", "fetch-mock": "^9.11.0", "ioredis": "^4.28.0", "jest": "^27.2.3", @@ -1369,6 +1370,46 @@ "integrity": "sha512-bOOF4GGzn0exmvNHpSWmTfOXB9beTpIFCm2KPY2UVoCdn1YVfr8heuHr1C++BYI9Tun8REgi5TNVdKbBs249CA==", "dev": true }, + "node_modules/@microsoft/tsdoc": { + "version": "0.15.0", + "resolved": "https://registry.npmjs.org/@microsoft/tsdoc/-/tsdoc-0.15.0.tgz", + "integrity": "sha512-HZpPoABogPvjeJOdzCOSJsXeL/SMCBgBZMVC3X3d7YYp2gf31MfxhUoYUNwf1ERPJOnQc0wkFn9trqI6ZEdZuA==", + "dev": true + }, + "node_modules/@microsoft/tsdoc-config": { + "version": "0.17.0", + "resolved": "https://registry.npmjs.org/@microsoft/tsdoc-config/-/tsdoc-config-0.17.0.tgz", + "integrity": "sha512-v/EYRXnCAIHxOHW+Plb6OWuUoMotxTN0GLatnpOb1xq0KuTNw/WI3pamJx/UbsoJP5k9MCw1QxvvhPcF9pH3Zg==", + "dev": true, + "dependencies": { + "@microsoft/tsdoc": "0.15.0", + "ajv": "~8.12.0", + "jju": "~1.4.0", + "resolve": "~1.22.2" + } + }, + "node_modules/@microsoft/tsdoc-config/node_modules/ajv": { + "version": "8.12.0", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.12.0.tgz", + "integrity": "sha512-sRu1kpcO9yLtYxBKvqfTeh9KzZEwO3STyX1HT+4CaDzC6HpTGYhIhPIzj9XuKU7KYDwnaeh5hcOwjy1QuJzBPA==", + "dev": true, + "dependencies": { + "fast-deep-equal": "^3.1.1", + "json-schema-traverse": "^1.0.0", + "require-from-string": "^2.0.2", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/@microsoft/tsdoc-config/node_modules/json-schema-traverse": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", + "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", + "dev": true + }, "node_modules/@nodelib/fs.scandir": { "version": "2.1.5", "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", @@ -3183,24 +3224,22 @@ "node": ">=0.10.0" } }, - "node_modules/eslint-plugin-import/node_modules/is-core-module": { - "version": "2.8.0", - "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.8.0.tgz", - "integrity": "sha512-vd15qHsaqrRL7dtH6QNuy0ndJmRDrS9HAM1CAiSifNUFv4x1a0CCVsj18hJ1mShxIG6T2i1sO78MkP56r0nYRw==", - "dev": true, - "dependencies": { - "has": "^1.0.3" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/eslint-plugin-import/node_modules/ms": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=", "dev": true }, + "node_modules/eslint-plugin-tsdoc": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-tsdoc/-/eslint-plugin-tsdoc-0.3.0.tgz", + "integrity": "sha512-0MuFdBrrJVBjT/gyhkP2BqpD0np1NxNLfQ38xXDlSs/KVVpKI2A6vN7jx2Rve/CyUsvOsMGwp9KKrinv7q9g3A==", + "dev": true, + "dependencies": { + "@microsoft/tsdoc": "0.15.0", + "@microsoft/tsdoc-config": "0.17.0" + } + }, "node_modules/eslint-scope": { "version": "7.2.2", "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.2.2.tgz", @@ -3762,10 +3801,13 @@ } }, "node_modules/function-bind": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", - "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==", - "dev": true + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "dev": true, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } }, "node_modules/gensync": { "version": "1.0.0-beta.2", @@ -3972,6 +4014,18 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/hasown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", + "dev": true, + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, "node_modules/html-encoding-sniffer": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/html-encoding-sniffer/-/html-encoding-sniffer-2.0.1.tgz", @@ -4203,12 +4257,15 @@ } }, "node_modules/is-core-module": { - "version": "2.7.0", - "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.7.0.tgz", - "integrity": "sha512-ByY+tjCciCr+9nLryBYcSD50EOGWt95c7tIsKTG1J2ixKKXPvF7Ej3AVd+UfDydAJom3biBGDBALaO79ktwgEQ==", + "version": "2.15.1", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.15.1.tgz", + "integrity": "sha512-z0vtXSwucUJtANQWldhbtbt7BnL0vxiFjIdDLAatwhDYty2bad6s+rijD6Ri4YuYJubLzIJLUidCh09e1djEVQ==", "dev": true, "dependencies": { - "has": "^1.0.3" + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -6199,6 +6256,12 @@ "url": "https://github.com/chalk/supports-color?sponsor=1" } }, + "node_modules/jju": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/jju/-/jju-1.4.0.tgz", + "integrity": "sha512-8wb9Yw966OSxApiCt0K3yNJL8pnNeIv+OEq2YMidz4FKP6nonSRoOXc80iXY4JaN2FC11B9qsNmDsm+ZOfMROA==", + "dev": true + }, "node_modules/js-tokens": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", @@ -7056,6 +7119,15 @@ "node": ">=0.10.0" } }, + "node_modules/require-from-string": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz", + "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/requires-port": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz", @@ -7063,13 +7135,17 @@ "dev": true }, "node_modules/resolve": { - "version": "1.20.0", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.20.0.tgz", - "integrity": "sha512-wENBPt4ySzg4ybFQW2TT1zMQucPK95HSh/nq2CFTZVOGut2+pQvSsgtda4d26YrYcr067wjbmzOG8byDPBX63A==", + "version": "1.22.8", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.8.tgz", + "integrity": "sha512-oKWePCxqpd6FlLvGV1VU0x7bkPmmCNolxzjMf4NczoDnQcIWrAF+cPtZn5i6n+RfD2d9i0tzpKnG6Yk168yIyw==", "dev": true, "dependencies": { - "is-core-module": "^2.2.0", - "path-parse": "^1.0.6" + "is-core-module": "^2.13.0", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -7435,6 +7511,18 @@ "node": ">=8" } }, + "node_modules/supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/symbol-tree": { "version": "3.2.4", "resolved": "https://registry.npmjs.org/symbol-tree/-/symbol-tree-3.2.4.tgz", @@ -9059,6 +9147,44 @@ "integrity": "sha512-bOOF4GGzn0exmvNHpSWmTfOXB9beTpIFCm2KPY2UVoCdn1YVfr8heuHr1C++BYI9Tun8REgi5TNVdKbBs249CA==", "dev": true }, + "@microsoft/tsdoc": { + "version": "0.15.0", + "resolved": "https://registry.npmjs.org/@microsoft/tsdoc/-/tsdoc-0.15.0.tgz", + "integrity": "sha512-HZpPoABogPvjeJOdzCOSJsXeL/SMCBgBZMVC3X3d7YYp2gf31MfxhUoYUNwf1ERPJOnQc0wkFn9trqI6ZEdZuA==", + "dev": true + }, + "@microsoft/tsdoc-config": { + "version": "0.17.0", + "resolved": "https://registry.npmjs.org/@microsoft/tsdoc-config/-/tsdoc-config-0.17.0.tgz", + "integrity": "sha512-v/EYRXnCAIHxOHW+Plb6OWuUoMotxTN0GLatnpOb1xq0KuTNw/WI3pamJx/UbsoJP5k9MCw1QxvvhPcF9pH3Zg==", + "dev": true, + "requires": { + "@microsoft/tsdoc": "0.15.0", + "ajv": "~8.12.0", + "jju": "~1.4.0", + "resolve": "~1.22.2" + }, + "dependencies": { + "ajv": { + "version": "8.12.0", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.12.0.tgz", + "integrity": "sha512-sRu1kpcO9yLtYxBKvqfTeh9KzZEwO3STyX1HT+4CaDzC6HpTGYhIhPIzj9XuKU7KYDwnaeh5hcOwjy1QuJzBPA==", + "dev": true, + "requires": { + "fast-deep-equal": "^3.1.1", + "json-schema-traverse": "^1.0.0", + "require-from-string": "^2.0.2", + "uri-js": "^4.2.2" + } + }, + "json-schema-traverse": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", + "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", + "dev": true + } + } + }, "@nodelib/fs.scandir": { "version": "2.1.5", "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", @@ -10538,15 +10664,6 @@ "esutils": "^2.0.2" } }, - "is-core-module": { - "version": "2.8.0", - "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.8.0.tgz", - "integrity": "sha512-vd15qHsaqrRL7dtH6QNuy0ndJmRDrS9HAM1CAiSifNUFv4x1a0CCVsj18hJ1mShxIG6T2i1sO78MkP56r0nYRw==", - "dev": true, - "requires": { - "has": "^1.0.3" - } - }, "ms": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", @@ -10555,6 +10672,16 @@ } } }, + "eslint-plugin-tsdoc": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-tsdoc/-/eslint-plugin-tsdoc-0.3.0.tgz", + "integrity": "sha512-0MuFdBrrJVBjT/gyhkP2BqpD0np1NxNLfQ38xXDlSs/KVVpKI2A6vN7jx2Rve/CyUsvOsMGwp9KKrinv7q9g3A==", + "dev": true, + "requires": { + "@microsoft/tsdoc": "0.15.0", + "@microsoft/tsdoc-config": "0.17.0" + } + }, "eslint-scope": { "version": "7.2.2", "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.2.2.tgz", @@ -10836,9 +10963,9 @@ "optional": true }, "function-bind": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", - "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==", + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", "dev": true }, "gensync": { @@ -10983,6 +11110,15 @@ "has-symbols": "^1.0.2" } }, + "hasown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", + "dev": true, + "requires": { + "function-bind": "^1.1.2" + } + }, "html-encoding-sniffer": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/html-encoding-sniffer/-/html-encoding-sniffer-2.0.1.tgz", @@ -11152,12 +11288,12 @@ "dev": true }, "is-core-module": { - "version": "2.7.0", - "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.7.0.tgz", - "integrity": "sha512-ByY+tjCciCr+9nLryBYcSD50EOGWt95c7tIsKTG1J2ixKKXPvF7Ej3AVd+UfDydAJom3biBGDBALaO79ktwgEQ==", + "version": "2.15.1", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.15.1.tgz", + "integrity": "sha512-z0vtXSwucUJtANQWldhbtbt7BnL0vxiFjIdDLAatwhDYty2bad6s+rijD6Ri4YuYJubLzIJLUidCh09e1djEVQ==", "dev": true, "requires": { - "has": "^1.0.3" + "hasown": "^2.0.2" } }, "is-date-object": { @@ -12627,6 +12763,12 @@ } } }, + "jju": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/jju/-/jju-1.4.0.tgz", + "integrity": "sha512-8wb9Yw966OSxApiCt0K3yNJL8pnNeIv+OEq2YMidz4FKP6nonSRoOXc80iXY4JaN2FC11B9qsNmDsm+ZOfMROA==", + "dev": true + }, "js-tokens": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", @@ -13276,6 +13418,12 @@ "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", "dev": true }, + "require-from-string": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz", + "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==", + "dev": true + }, "requires-port": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz", @@ -13283,13 +13431,14 @@ "dev": true }, "resolve": { - "version": "1.20.0", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.20.0.tgz", - "integrity": "sha512-wENBPt4ySzg4ybFQW2TT1zMQucPK95HSh/nq2CFTZVOGut2+pQvSsgtda4d26YrYcr067wjbmzOG8byDPBX63A==", + "version": "1.22.8", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.8.tgz", + "integrity": "sha512-oKWePCxqpd6FlLvGV1VU0x7bkPmmCNolxzjMf4NczoDnQcIWrAF+cPtZn5i6n+RfD2d9i0tzpKnG6Yk168yIyw==", "dev": true, "requires": { - "is-core-module": "^2.2.0", - "path-parse": "^1.0.6" + "is-core-module": "^2.13.0", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" } }, "resolve-cwd": { @@ -13557,6 +13706,12 @@ } } }, + "supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "dev": true + }, "symbol-tree": { "version": "3.2.4", "resolved": "https://registry.npmjs.org/symbol-tree/-/symbol-tree-3.2.4.tgz", diff --git a/package.json b/package.json index 0d4dadd0..f774a4e4 100644 --- a/package.json +++ b/package.json @@ -17,7 +17,7 @@ ], "scripts": { "check": "npm run check:lint && npm run check:types", - "check:lint": "eslint src --ext .js,.ts", + "check:lint": "eslint src types --ext .js,.ts,.d.ts", "check:types": "tsc --noEmit", "build": "npm run build:cjs && npm run build:esm", "build:esm": "rimraf esm && tsc -m es2015 --outDir esm", @@ -67,6 +67,7 @@ "eslint": "^8.48.0", "eslint-plugin-compat": "^6.0.1", "eslint-plugin-import": "^2.25.3", + "eslint-plugin-tsdoc": "^0.3.0", "fetch-mock": "^9.11.0", "ioredis": "^4.28.0", "jest": "^27.2.3", diff --git a/src/__tests__/testUtils/index.ts b/src/__tests__/testUtils/index.ts index 5c3db2f7..9824f0cd 100644 --- a/src/__tests__/testUtils/index.ts +++ b/src/__tests__/testUtils/index.ts @@ -3,10 +3,10 @@ const DEFAULT_ERROR_MARGIN = 75; // 0.075 secs if numbers are timestamps in mill /** * Assert if an `actual` and `expected` numeric values are nearly equal. * - * @param {number} actual actual time lapse in millis - * @param {number} expected expected time lapse in millis - * @param {number} epsilon error margin in millis - * @returns {boolean} whether the absolute difference is minor to epsilon value or not + * @param actual - actual time lapse in millis + * @param expected - expected time lapse in millis + * @param epsilon - error margin in millis + * @returns whether the absolute difference is minor to epsilon value or not */ export function nearlyEqual(actual: number, expected: number, epsilon = DEFAULT_ERROR_MARGIN) { const diff = Math.abs(actual - expected); diff --git a/src/integrations/pluggable.ts b/src/integrations/pluggable.ts index 64dce8bb..b1b7a12f 100644 --- a/src/integrations/pluggable.ts +++ b/src/integrations/pluggable.ts @@ -6,8 +6,8 @@ import { IIntegration, IIntegrationManager, IIntegrationFactoryParams } from './ * Factory function for IntegrationsManager with pluggable integrations. * The integrations manager instantiates integration, and bypass tracked events and impressions to them. * - * @param integrations validated list of integration factories - * @param params information of the Sdk factory instance that integrations can access to + * @param integrations - validated list of integration factories + * @param params - information of the Sdk factory instance that integrations can access to * * @returns integration manager or undefined if `integrations` are not present in settings. */ diff --git a/src/logger/sdkLogger.ts b/src/logger/sdkLogger.ts index bbe3043e..42d9be42 100644 --- a/src/logger/sdkLogger.ts +++ b/src/logger/sdkLogger.ts @@ -6,7 +6,7 @@ import { ERROR_LOGLEVEL_INVALID } from './constants'; /** * The public Logger utility API exposed via SplitFactory, used to update the log level. * - * @param log the factory logger instance to handle + * @param log - the factory logger instance to handle */ export function createLoggerAPI(log: ILogger): SplitIO.ILoggerAPI { @@ -27,7 +27,7 @@ export function createLoggerAPI(log: ILogger): SplitIO.ILoggerAPI { }, /** * Sets a custom log Level for the SDK. - * @param {string} logLevel - Custom LogLevel value. + * @param logLevel - Custom LogLevel value. */ setLogLevel, /** diff --git a/src/readiness/sdkReadinessManager.ts b/src/readiness/sdkReadinessManager.ts index 61cabfe6..ee558d47 100644 --- a/src/readiness/sdkReadinessManager.ts +++ b/src/readiness/sdkReadinessManager.ts @@ -14,8 +14,8 @@ const REMOVE_LISTENER_EVENT = 'removeListener'; * SdkReadinessManager factory, which provides the public status API of SDK clients and manager: ready promise, readiness event emitter and constants (SDK_READY, etc). * It also updates logs related warnings and errors. * - * @param readyTimeout time in millis to emit SDK_READY_TIME_OUT event - * @param readinessManager optional readinessManager to use. only used internally for `shared` method + * @param readyTimeout - time in millis to emit SDK_READY_TIME_OUT event + * @param readinessManager - optional readinessManager to use. only used internally for `shared` method */ export function sdkReadinessManagerFactory( EventEmitter: new () => SplitIO.IEventEmitter, @@ -92,25 +92,7 @@ export function sdkReadinessManagerFactory( SDK_UPDATE, SDK_READY_TIMED_OUT, }, - /** - * Returns a promise that will be resolved once the SDK has finished loading (SDK_READY event emitted) or rejected if the SDK has timedout (SDK_READY_TIMED_OUT event emitted). - * As it's meant to provide similar flexibility to the event approach, given that the SDK might be eventually ready after a timeout event, calling the `ready` method after the - * SDK had timed out will return a new promise that should eventually resolve if the SDK gets ready. - * - * Caveats: the method was designed to avoid an unhandled Promise rejection if the rejection case is not handled, so that `onRejected` handler is optional when using promises. - * However, when using async/await syntax, the rejection should be explicitly propagated like in the following example: - * ``` - * try { - * await client.ready().catch((e) => { throw e; }); - * // SDK is ready - * } catch(e) { - * // SDK has timedout - * } - * ``` - * - * @function ready - * @returns {Promise} - */ + ready() { if (readinessManager.hasTimedout()) { if (!readinessManager.isReady()) { diff --git a/src/sdkClient/clientAttributesDecoration.ts b/src/sdkClient/clientAttributesDecoration.ts index 4f4d9070..621b017d 100644 --- a/src/sdkClient/clientAttributesDecoration.ts +++ b/src/sdkClient/clientAttributesDecoration.ts @@ -80,9 +80,9 @@ export function clientAttributesDecoration = {}; @@ -95,8 +95,8 @@ export function clientAttributesDecoration) { @@ -117,7 +117,7 @@ export function clientAttributesDecoration) { const url = `${urls.events}/events/bulk`; @@ -92,8 +92,8 @@ export function splitApiFactory( /** * Post impressions. * - * @param body Impressions bulk payload - * @param headers Optionals headers to overwrite default ones. For example, it is used in producer mode to overwrite metadata headers. + * @param body - Impressions bulk payload + * @param headers - Optionals headers to overwrite default ones. For example, it is used in producer mode to overwrite metadata headers. */ postTestImpressionsBulk(body: string, headers?: Record) { const url = `${urls.events}/testImpressions/bulk`; @@ -106,8 +106,8 @@ export function splitApiFactory( /** * Post impressions counts. * - * @param body Impressions counts payload - * @param headers Optionals headers to overwrite default ones. For example, it is used in producer mode to overwrite metadata headers. + * @param body - Impressions counts payload + * @param headers - Optionals headers to overwrite default ones. For example, it is used in producer mode to overwrite metadata headers. */ postTestImpressionsCount(body: string, headers?: Record) { const url = `${urls.events}/testImpressions/count`; @@ -117,8 +117,8 @@ export function splitApiFactory( /** * Post unique keys for client side. * - * @param body unique keys payload - * @param headers Optionals headers to overwrite default ones. For example, it is used in producer mode to overwrite metadata headers. + * @param body - unique keys payload + * @param headers - Optionals headers to overwrite default ones. For example, it is used in producer mode to overwrite metadata headers. */ postUniqueKeysBulkCs(body: string, headers?: Record) { const url = `${urls.telemetry}/v1/keys/cs`; @@ -128,8 +128,8 @@ export function splitApiFactory( /** * Post unique keys for server side. * - * @param body unique keys payload - * @param headers Optionals headers to overwrite default ones. For example, it is used in producer mode to overwrite metadata headers. + * @param body - unique keys payload + * @param headers - Optionals headers to overwrite default ones. For example, it is used in producer mode to overwrite metadata headers. */ postUniqueKeysBulkSs(body: string, headers?: Record) { const url = `${urls.telemetry}/v1/keys/ss`; diff --git a/src/services/splitHttpClient.ts b/src/services/splitHttpClient.ts index b42cc641..2fcebfc1 100644 --- a/src/services/splitHttpClient.ts +++ b/src/services/splitHttpClient.ts @@ -10,8 +10,8 @@ const messageNoFetch = 'Global fetch API is not available.'; /** * Factory of Split HTTP clients, which are HTTP clients with predefined headers for Split endpoints. * - * @param settings SDK settings, used to access authorizationKey, logger instance and metadata (SDK version, ip and hostname) to set additional headers - * @param platform object containing environment-specific dependencies + * @param settings - SDK settings, used to access authorizationKey, logger instance and metadata (SDK version, ip and hostname) to set additional headers + * @param platform - object containing environment-specific dependencies */ export function splitHttpClientFactory(settings: ISettings, { getOptions, getFetch }: Pick): ISplitHttpClient { diff --git a/src/storages/AbstractMySegmentsCacheSync.ts b/src/storages/AbstractMySegmentsCacheSync.ts index 740b9644..a03fc416 100644 --- a/src/storages/AbstractMySegmentsCacheSync.ts +++ b/src/storages/AbstractMySegmentsCacheSync.ts @@ -38,8 +38,8 @@ export abstract class AbstractMySegmentsCacheSync implements ISegmentsCacheSync /** * Only used for the `skC`(segment keys count) telemetry stat: 1 for client-side, and total count of keys in server-side. - * @TODO for client-side it should be the number of clients, but it requires a refactor of MySegments caches to simplify the code. */ + // @TODO for client-side it should be the number of clients, but it requires a refactor of MySegments caches to simplify the code. abstract getKeysCount(): number abstract getChangeNumber(name: string): number diff --git a/src/storages/AbstractSplitsCacheAsync.ts b/src/storages/AbstractSplitsCacheAsync.ts index 8374c8ae..dcf059ed 100644 --- a/src/storages/AbstractSplitsCacheAsync.ts +++ b/src/storages/AbstractSplitsCacheAsync.ts @@ -39,10 +39,7 @@ export abstract class AbstractSplitsCacheAsync implements ISplitsCacheAsync { * Kill `name` split and set `defaultTreatment` and `changeNumber`. * Used for SPLIT_KILL push notifications. * - * @param {string} name - * @param {string} defaultTreatment - * @param {number} changeNumber - * @returns {Promise} a promise that is resolved once the split kill operation is performed. The fulfillment value is a boolean: `true` if the operation successed updating the split or `false` if no split is updated, + * @returns a promise that is resolved once the split kill operation is performed. The fulfillment value is a boolean: `true` if the operation successed updating the split or `false` if no split is updated, * for instance, if the `changeNumber` is old, or if the split is not found (e.g., `/splitchanges` hasn't been fetched yet), or if the storage fails to apply the update. * The promise will never be rejected. */ diff --git a/src/storages/AbstractSplitsCacheSync.ts b/src/storages/AbstractSplitsCacheSync.ts index 92df46d5..f82ebbd6 100644 --- a/src/storages/AbstractSplitsCacheSync.ts +++ b/src/storages/AbstractSplitsCacheSync.ts @@ -59,10 +59,7 @@ export abstract class AbstractSplitsCacheSync implements ISplitsCacheSync { * Kill `name` split and set `defaultTreatment` and `changeNumber`. * Used for SPLIT_KILL push notifications. * - * @param {string} name - * @param {string} defaultTreatment - * @param {number} changeNumber - * @returns {boolean} `true` if the operation successed updating the split, or `false` if no split is updated, + * @returns `true` if the operation successed updating the split, or `false` if no split is updated, * for instance, if the `changeNumber` is old, or if the split is not found (e.g., `/splitchanges` hasn't been fetched yet), or if the storage fails to apply the update. */ killLocally(name: string, defaultTreatment: string, changeNumber: number): boolean { diff --git a/src/storages/dataLoader.ts b/src/storages/dataLoader.ts index 5e70db86..ce288868 100644 --- a/src/storages/dataLoader.ts +++ b/src/storages/dataLoader.ts @@ -5,7 +5,7 @@ import { DataLoader, ISegmentsCacheSync, ISplitsCacheSync } from './types'; /** * Factory of client-side storage loader * - * @param preloadedData validated data following the format proposed in https://github.com/godaddy/split-javascript-data-loader + * @param preloadedData - validated data following the format proposed in https://github.com/godaddy/split-javascript-data-loader * and extended with a `mySegmentsData` property. * @returns function to preload the storage */ @@ -15,12 +15,11 @@ export function dataLoaderFactory(preloadedData: PreloadedData): DataLoader { * Storage-agnostic adaptation of `loadDataIntoLocalStorage` function * (https://github.com/godaddy/split-javascript-data-loader/blob/master/src/load-data.js) * - * @param storage object containing `splits` and `segments` cache (client-side variant) - * @param userId user key string of the provided MySegmentsCache - * - * @TODO extend to support SegmentsCache (server-side variant) by making `userId` optional and adding the corresponding logic. - * @TODO extend to load data on shared mySegments storages. Be specific when emitting SDK_READY_FROM_CACHE on shared clients. Maybe the serializer should provide the `useSegments` flag. + * @param storage - object containing `splits` and `segments` cache (client-side variant) + * @param userId - user key string of the provided MySegmentsCache */ + // @TODO extend to support SegmentsCache (server-side variant) by making `userId` optional and adding the corresponding logic. + // @TODO extend to load data on shared mySegments storages. Be specific when emitting SDK_READY_FROM_CACHE on shared clients. Maybe the serializer should provide the `useSegments` flag. return function loadData(storage: { splits: ISplitsCacheSync, segments: ISegmentsCacheSync }, userId: string) { // Do not load data if current preloadedData is empty if (Object.keys(preloadedData).length === 0) return; diff --git a/src/storages/findLatencyIndex.ts b/src/storages/findLatencyIndex.ts index aac73e7a..3208bacc 100644 --- a/src/storages/findLatencyIndex.ts +++ b/src/storages/findLatencyIndex.ts @@ -7,7 +7,7 @@ const BASE = 1.5; /** * Calculates buckets from latency in milliseconds * - * @param latencyInMs + * @param latencyInMs - latency in milliseconds * @returns a bucket index from 0 to 22 inclusive */ export function findLatencyIndex(latencyInMs: number): number { diff --git a/src/storages/inLocalStorage/SplitsCacheInLocal.ts b/src/storages/inLocalStorage/SplitsCacheInLocal.ts index a777e081..93eb6f32 100644 --- a/src/storages/inLocalStorage/SplitsCacheInLocal.ts +++ b/src/storages/inLocalStorage/SplitsCacheInLocal.ts @@ -20,11 +20,6 @@ export class SplitsCacheInLocal extends AbstractSplitsCacheSync { private hasSync?: boolean; private updateNewFilter?: boolean; - /** - * @param {KeyBuilderCS} keys - * @param {number | undefined} expirationTimestamp - * @param {ISplitFiltersValidation} splitFiltersValidation - */ constructor(settings: ISettings, keys: KeyBuilderCS, expirationTimestamp?: number) { super(); this.keys = keys; @@ -229,7 +224,7 @@ export class SplitsCacheInLocal extends AbstractSplitsCacheSync { /** * Clean Splits cache if its `lastUpdated` timestamp is older than the given `expirationTimestamp`, * - * @param {number | undefined} expirationTimestamp if the value is not a number, data will not be cleaned + * @param expirationTimestamp - if the value is not a number, data will not be cleaned */ private _checkExpiration(expirationTimestamp?: number) { let value: string | number | null = localStorage.getItem(this.keys.buildLastUpdatedKey()); diff --git a/src/storages/inMemory/AttributesCacheInMemory.ts b/src/storages/inMemory/AttributesCacheInMemory.ts index 80870e55..a8b084ea 100644 --- a/src/storages/inMemory/AttributesCacheInMemory.ts +++ b/src/storages/inMemory/AttributesCacheInMemory.ts @@ -9,9 +9,9 @@ export class AttributesCacheInMemory { /** * Create or update the value for the given attribute * - * @param {string} attributeName attribute name - * @param {Object} attributeValue attribute value - * @returns {boolean} the attribute was stored + * @param attributeName - attribute name + * @param attributeValue - attribute value + * @returns the attribute was stored */ setAttribute(attributeName: string, attributeValue: SplitIO.AttributeType) { this.attributesCache[attributeName] = attributeValue; @@ -21,8 +21,8 @@ export class AttributesCacheInMemory { /** * Retrieves the value of a given attribute * - * @param {string} attributeName attribute name - * @returns {Object?} stored attribute value + * @param attributeName - attribute name + * @returns stored attribute value */ getAttribute(attributeName: string) { return this.attributesCache[attributeName]; @@ -31,8 +31,8 @@ export class AttributesCacheInMemory { /** * Create or update all the given attributes * - * @param {[string, Object]} attributes attributes to create or update - * @returns {boolean} attributes were stored + * @param attributes - attributes to create or update + * @returns attributes were stored */ setAttributes(attributes: Record) { this.attributesCache = objectAssign(this.attributesCache, attributes); @@ -42,7 +42,7 @@ export class AttributesCacheInMemory { /** * Retrieve the full attributes map * - * @returns {Map} stored attributes + * @returns stored attributes */ getAll() { return this.attributesCache; @@ -51,8 +51,8 @@ export class AttributesCacheInMemory { /** * Removes a given attribute from the map * - * @param {string} attributeName attribute to remove - * @returns {boolean} attribute removed + * @param attributeName - attribute to remove + * @returns attribute removed */ removeAttribute(attributeName: string) { if (Object.keys(this.attributesCache).indexOf(attributeName) >= 0) { diff --git a/src/storages/inMemory/EventsCacheInMemory.ts b/src/storages/inMemory/EventsCacheInMemory.ts index 92152aa0..5c897868 100644 --- a/src/storages/inMemory/EventsCacheInMemory.ts +++ b/src/storages/inMemory/EventsCacheInMemory.ts @@ -12,7 +12,7 @@ export class EventsCacheInMemory implements IEventsCacheSync { /** * - * @param eventsQueueSize number of queued events to call onFullQueueCb. + * @param eventsQueueSize - number of queued events to call onFullQueueCb. * Default value is 0, that means no maximum value, in case we want to avoid this being triggered. */ constructor(eventsQueueSize: number = 0) { diff --git a/src/storages/inMemory/ImpressionsCacheInMemory.ts b/src/storages/inMemory/ImpressionsCacheInMemory.ts index 9b959c31..6995481a 100644 --- a/src/storages/inMemory/ImpressionsCacheInMemory.ts +++ b/src/storages/inMemory/ImpressionsCacheInMemory.ts @@ -9,7 +9,7 @@ export class ImpressionsCacheInMemory implements IImpressionsCacheSync { /** * - * @param impressionsQueueSize number of queued impressions to call onFullQueueCb. + * @param impressionsQueueSize - number of queued impressions to call onFullQueueCb. * Default value is 0, that means no maximum value, in case we want to avoid this being triggered. */ constructor(impressionsQueueSize: number = 0) { diff --git a/src/storages/inMemory/InMemoryStorage.ts b/src/storages/inMemory/InMemoryStorage.ts index ccf3bd6a..e91ce8c6 100644 --- a/src/storages/inMemory/InMemoryStorage.ts +++ b/src/storages/inMemory/InMemoryStorage.ts @@ -11,7 +11,7 @@ import { UniqueKeysCacheInMemory } from './UniqueKeysCacheInMemory'; /** * InMemory storage factory for standalone server-side SplitFactory * - * @param params parameters required by EventsCacheSync + * @param params - parameters required by EventsCacheSync */ export function InMemoryStorageFactory(params: IStorageFactoryParams): IStorageSync { const { settings: { scheduler: { impressionsQueueSize, eventsQueueSize, }, sync: { impressionsMode, __splitFiltersValidation } } } = params; diff --git a/src/storages/inMemory/InMemoryStorageCS.ts b/src/storages/inMemory/InMemoryStorageCS.ts index 0ddf58f0..dd4262c2 100644 --- a/src/storages/inMemory/InMemoryStorageCS.ts +++ b/src/storages/inMemory/InMemoryStorageCS.ts @@ -11,7 +11,7 @@ import { UniqueKeysCacheInMemoryCS } from './UniqueKeysCacheInMemoryCS'; /** * InMemory storage factory for standalone client-side SplitFactory * - * @param params parameters required by EventsCacheSync + * @param params - parameters required by EventsCacheSync */ export function InMemoryStorageCSFactory(params: IStorageFactoryParams): IStorageSync { const { settings: { scheduler: { impressionsQueueSize, eventsQueueSize, }, sync: { impressionsMode, __splitFiltersValidation } } } = params; diff --git a/src/storages/inMemory/UniqueKeysCacheInMemoryCS.ts b/src/storages/inMemory/UniqueKeysCacheInMemoryCS.ts index c49fddd1..d5fb8375 100644 --- a/src/storages/inMemory/UniqueKeysCacheInMemoryCS.ts +++ b/src/storages/inMemory/UniqueKeysCacheInMemoryCS.ts @@ -12,7 +12,7 @@ export class UniqueKeysCacheInMemoryCS implements IUniqueKeysCacheBase { /** * - * @param impressionsQueueSize number of queued impressions to call onFullQueueCb. + * @param impressionsQueueSize - number of queued impressions to call onFullQueueCb. * Default value is 0, that means no maximum value, in case we want to avoid this being triggered. */ constructor(uniqueKeysQueueSize = DEFAULT_CACHE_SIZE) { diff --git a/src/storages/inRedis/SplitsCacheInRedis.ts b/src/storages/inRedis/SplitsCacheInRedis.ts index 428efb94..c98dca6e 100644 --- a/src/storages/inRedis/SplitsCacheInRedis.ts +++ b/src/storages/inRedis/SplitsCacheInRedis.ts @@ -186,10 +186,8 @@ export class SplitsCacheInRedis extends AbstractSplitsCacheAsync { * Get list of all split definitions. * The returned promise is resolved with the list of split definitions, * or rejected if redis operation fails. - * - * @TODO we need to benchmark which is the maximun number of commands we could - * pipeline without kill redis performance. */ + // @TODO we need to benchmark which is the maximun number of commands we could pipeline without kill redis performance. getAll(): Promise { return this.redis.keys(this.keys.searchPatternForSplitKeys()) .then((listOfKeys) => this.redis.pipeline(listOfKeys.map(k => ['get', k])).exec()) diff --git a/src/storages/inRedis/TelemetryCacheInRedis.ts b/src/storages/inRedis/TelemetryCacheInRedis.ts index 9cb44711..c4564f79 100644 --- a/src/storages/inRedis/TelemetryCacheInRedis.ts +++ b/src/storages/inRedis/TelemetryCacheInRedis.ts @@ -14,9 +14,9 @@ export class TelemetryCacheInRedis implements ITelemetryCacheAsync { /** * Create a Telemetry cache that uses Redis as storage. - * @param log Logger instance. - * @param keys Key builder. - * @param redis Redis client. + * @param log - Logger instance. + * @param keys - Key builder. + * @param redis - Redis client. */ constructor(private readonly log: ILogger, private readonly keys: KeyBuilderSS, private readonly redis: RedisAdapter) { } diff --git a/src/storages/inRedis/UniqueKeysCacheInRedis.ts b/src/storages/inRedis/UniqueKeysCacheInRedis.ts index d74e2f03..2bebe84d 100644 --- a/src/storages/inRedis/UniqueKeysCacheInRedis.ts +++ b/src/storages/inRedis/UniqueKeysCacheInRedis.ts @@ -63,7 +63,7 @@ export class UniqueKeysCacheInRedis extends UniqueKeysCacheInMemory implements I /** * Async consumer API, used by synchronizer. - * @param count number of items to pop from the queue. If not provided or equal 0, all items will be popped. + * @param count - number of items to pop from the queue. If not provided or equal 0, all items will be popped. */ popNRaw(count = 0): Promise { return this.redis.lrange(this.key, 0, count - 1).then(uniqueKeyItems => { diff --git a/src/storages/pluggable/EventsCachePluggable.ts b/src/storages/pluggable/EventsCachePluggable.ts index 1fa99030..ce87a4c6 100644 --- a/src/storages/pluggable/EventsCachePluggable.ts +++ b/src/storages/pluggable/EventsCachePluggable.ts @@ -21,7 +21,7 @@ export class EventsCachePluggable implements IEventsCacheAsync { /** * Push given event to the storage. - * @param eventData Event item to push. + * @param eventData - Event item to push. * @returns A promise that is resolved with a boolean value indicating if the push operation succeeded or failed. * Unlike `impressions::track`, The promise will never be rejected. */ diff --git a/src/storages/pluggable/ImpressionsCachePluggable.ts b/src/storages/pluggable/ImpressionsCachePluggable.ts index e7dadab6..0be57ef5 100644 --- a/src/storages/pluggable/ImpressionsCachePluggable.ts +++ b/src/storages/pluggable/ImpressionsCachePluggable.ts @@ -21,7 +21,7 @@ export class ImpressionsCachePluggable implements IImpressionsCacheAsync { /** * Push given impressions to the storage. - * @param impressions List of impresions to push. + * @param impressions - List of impresions to push. * @returns A promise that is resolved if the push operation succeeded * or rejected if the wrapper operation fails. */ diff --git a/src/storages/pluggable/SegmentsCachePluggable.ts b/src/storages/pluggable/SegmentsCachePluggable.ts index 3a057557..033b1a49 100644 --- a/src/storages/pluggable/SegmentsCachePluggable.ts +++ b/src/storages/pluggable/SegmentsCachePluggable.ts @@ -83,7 +83,7 @@ export class SegmentsCachePluggable implements ISegmentsCacheAsync { return this.wrapper.getItems(this.keys.buildRegisteredSegmentsKey()); } - /** @TODO implement if required by DataLoader or Producer mode */ + // @TODO implement if required by DataLoader or Producer mode clear(): Promise { return Promise.resolve(true); } diff --git a/src/storages/pluggable/SplitsCachePluggable.ts b/src/storages/pluggable/SplitsCachePluggable.ts index c2bc17fa..ddb06149 100644 --- a/src/storages/pluggable/SplitsCachePluggable.ts +++ b/src/storages/pluggable/SplitsCachePluggable.ts @@ -19,9 +19,9 @@ export class SplitsCachePluggable extends AbstractSplitsCacheAsync { /** * Create a SplitsCache that uses a storage wrapper. - * @param log Logger instance. - * @param keys Key builder. - * @param wrapper Adapted wrapper storage. + * @param log - Logger instance. + * @param keys - Key builder. + * @param wrapper - Adapted wrapper storage. */ constructor(log: ILogger, keys: KeyBuilder, wrapper: IPluggableStorageWrapper, splitFiltersValidation?: ISplitFiltersValidation) { super(); diff --git a/src/storages/pluggable/TelemetryCachePluggable.ts b/src/storages/pluggable/TelemetryCachePluggable.ts index 995fc6b0..16ddd45f 100644 --- a/src/storages/pluggable/TelemetryCachePluggable.ts +++ b/src/storages/pluggable/TelemetryCachePluggable.ts @@ -13,9 +13,9 @@ export class TelemetryCachePluggable implements ITelemetryCacheAsync { /** * Create a Telemetry cache that uses a storage wrapper. - * @param log Logger instance. - * @param keys Key builder. - * @param wrapper Adapted wrapper storage. + * @param log - Logger instance. + * @param keys - Key builder. + * @param wrapper - Adapted wrapper storage. */ constructor(private readonly log: ILogger, private readonly keys: KeyBuilderSS, private readonly wrapper: IPluggableStorageWrapper) { } diff --git a/src/storages/pluggable/UniqueKeysCachePluggable.ts b/src/storages/pluggable/UniqueKeysCachePluggable.ts index 9deddac4..f78831f9 100644 --- a/src/storages/pluggable/UniqueKeysCachePluggable.ts +++ b/src/storages/pluggable/UniqueKeysCachePluggable.ts @@ -56,7 +56,7 @@ export class UniqueKeysCachePluggable extends UniqueKeysCacheInMemory implements /** * Async consumer API, used by synchronizer. - * @param count number of items to pop from the queue. If not provided or equal 0, all items will be popped. + * @param count - number of items to pop from the queue. If not provided or equal 0, all items will be popped. */ popNRaw(count = 0): Promise { return Promise.resolve(count || this.wrapper.getItemsCount(this.key)) diff --git a/src/storages/pluggable/inMemoryWrapper.ts b/src/storages/pluggable/inMemoryWrapper.ts index ba2f10ed..8193b3e3 100644 --- a/src/storages/pluggable/inMemoryWrapper.ts +++ b/src/storages/pluggable/inMemoryWrapper.ts @@ -7,7 +7,7 @@ import { setToArray } from '../../utils/lang/sets'; * The `_cache` property is the object were items are stored. * Intended for testing purposes. * - * @param connDelay delay in millis for `connect` resolve. If not provided, `connect` resolves immediately. + * @param connDelay - delay in millis for `connect` resolve. If not provided, `connect` resolves immediately. */ export function inMemoryWrapperFactory(connDelay?: number): IPluggableStorageWrapper & { _cache: Record>, _setConnDelay(connDelay: number): void } { diff --git a/src/storages/pluggable/index.ts b/src/storages/pluggable/index.ts index 60350d66..372eeeb4 100644 --- a/src/storages/pluggable/index.ts +++ b/src/storages/pluggable/index.ts @@ -32,7 +32,7 @@ export interface PluggableStorageOptions { /** * Validate pluggable storage factory options. * - * @param options user options + * @param options - user options * @throws Will throw an error if the options are invalid. Example: wrapper is not provided or doesn't have some methods. */ function validatePluggableStorageOptions(options: any) { diff --git a/src/storages/pluggable/wrapperAdapter.ts b/src/storages/pluggable/wrapperAdapter.ts index c47a5d8b..f56a1c90 100644 --- a/src/storages/pluggable/wrapperAdapter.ts +++ b/src/storages/pluggable/wrapperAdapter.ts @@ -26,8 +26,8 @@ export const METHODS_TO_PROMISE_WRAP: string[] = [ * Adapter of the Pluggable Storage Wrapper. * Used to handle exceptions as rejected promises, in order to simplify the error handling on storages. * - * @param log logger instance - * @param wrapper storage wrapper to adapt + * @param log - logger instance + * @param wrapper - storage wrapper to adapt * @returns an adapted version of the given storage wrapper */ export function wrapperAdapter(log: ILogger, wrapper: IPluggableStorageWrapper): IPluggableStorageWrapper { diff --git a/src/storages/types.ts b/src/storages/types.ts index 8e1516e9..83f388b1 100644 --- a/src/storages/types.ts +++ b/src/storages/types.ts @@ -14,56 +14,50 @@ export interface IPluggableStorageWrapper { /** * Get the value of given `key`. * - * @function get - * @param {string} key Item to retrieve - * @returns {Promise} A promise that resolves with the element value associated with the specified `key`, + * @param key - Item to retrieve + * @returns A promise that resolves with the element value associated with the specified `key`, * or null if the key does not exist. The promise rejects if the operation fails. */ get: (key: string) => Promise /** * Add or update an item with a specified `key` and `value`. * - * @function set - * @param {string} key Item to update - * @param {string} value Value to set - * @returns {Promise} A promise that resolves if the operation success, whether the key was added or updated. + * @param key - Item to update + * @param value - Value to set + * @returns A promise that resolves if the operation success, whether the key was added or updated. * The promise rejects if the operation fails. */ set: (key: string, value: string) => Promise /** * Add or update an item with a specified `key` and `value`. * - * @function getAndSet - * @param {string} key Item to update - * @param {string} value Value to set - * @returns {Promise} A promise that resolves with the previous value associated to the given `key`, or null if not set. + * @param key - Item to update + * @param value - Value to set + * @returns A promise that resolves with the previous value associated to the given `key`, or null if not set. * The promise rejects if the operation fails. */ getAndSet: (key: string, value: string) => Promise /** * Removes the specified item by `key`. * - * @function del - * @param {string} key Item to delete - * @returns {Promise} A promise that resolves if the operation success, whether the key existed and was removed (resolves with true) or it didn't exist (resolves with false). + * @param key - Item to delete + * @returns A promise that resolves if the operation success, whether the key existed and was removed (resolves with true) or it didn't exist (resolves with false). * The promise rejects if the operation fails, for example, if there is a connection error. */ del: (key: string) => Promise /** * Returns all keys matching the given prefix. * - * @function getKeysByPrefix - * @param {string} prefix String prefix to match - * @returns {Promise} A promise that resolves with the list of keys that match the given `prefix`. + * @param prefix - String prefix to match + * @returns A promise that resolves with the list of keys that match the given `prefix`. * The promise rejects if the operation fails. */ getKeysByPrefix: (prefix: string) => Promise /** * Returns the values of all given `keys`. * - * @function getMany - * @param {string[]} keys List of keys to retrieve - * @returns {Promise<(string | null)[]>} A promise that resolves with the list of items associated with the specified list of `keys`. + * @param keys - List of keys to retrieve + * @returns A promise that resolves with the list of items associated with the specified list of `keys`. * For every key that does not hold a string value or does not exist, null is returned. The promise rejects if the operation fails. */ getMany: (keys: string[]) => Promise<(string | null)[]> @@ -73,20 +67,18 @@ export interface IPluggableStorageWrapper { /** * Increments the number stored at `key` by `increment`, or set it to `increment` if the value doesn't exist. * - * @function incr - * @param {string} key Key to increment - * @param {number} increment Value to increment by. Defaults to 1. - * @returns {Promise} A promise that resolves with the value of key after the increment. The promise rejects if the operation fails, + * @param key - Key to increment + * @param increment - Value to increment by. Defaults to 1. + * @returns A promise that resolves with the value of key after the increment. The promise rejects if the operation fails, * for example, if there is a connection error or the key contains a string that can not be represented as integer. */ incr: (key: string, increment?: number) => Promise /** * Decrements the number stored at `key` by `decrement`, or set it to minus `decrement` if the value doesn't exist. * - * @function decr - * @param {string} key Key to decrement - * @param {number} decrement Value to decrement by. Defaults to 1. - * @returns {Promise} A promise that resolves with the value of key after the decrement. The promise rejects if the operation fails, + * @param key - Key to decrement + * @param decrement - Value to decrement by. Defaults to 1. + * @returns A promise that resolves with the value of key after the decrement. The promise rejects if the operation fails, * for example, if there is a connection error or the key contains a string that can not be represented as integer. */ decr: (key: string, decrement?: number) => Promise @@ -96,29 +88,26 @@ export interface IPluggableStorageWrapper { /** * Inserts given items at the tail of `key` list. If `key` does not exist, an empty list is created before pushing the items. * - * @function pushItems - * @param {string} key List key - * @param {string[]} items List of items to push - * @returns {Promise} A promise that resolves if the operation success. + * @param key - List key + * @param items - List of items to push + * @returns A promise that resolves if the operation success. * The promise rejects if the operation fails, for example, if there is a connection error or the key holds a value that is not a list. */ pushItems: (key: string, items: string[]) => Promise /** * Removes and returns the first `count` items from a list. If `key` does not exist, an empty list is items is returned. * - * @function popItems - * @param {string} key List key - * @param {number} count Number of items to pop - * @returns {Promise} A promise that resolves with the list of removed items from the list, or an empty array when key does not exist. + * @param key - List key + * @param count - Number of items to pop + * @returns A promise that resolves with the list of removed items from the list, or an empty array when key does not exist. * The promise rejects if the operation fails, for example, if there is a connection error or the key holds a value that is not a list. */ popItems: (key: string, count: number) => Promise /** * Returns the count of items in a list, or 0 if `key` does not exist. * - * @function getItemsCount - * @param {string} key List key - * @returns {Promise} A promise that resolves with the number of items at the `key` list, or 0 when `key` does not exist. + * @param key - List key + * @returns A promise that resolves with the number of items at the `key` list, or 0 when `key` does not exist. * The promise rejects if the operation fails, for example, if there is a connection error or the key holds a value that is not a list. */ getItemsCount: (key: string) => Promise @@ -128,10 +117,9 @@ export interface IPluggableStorageWrapper { /** * Returns if item is a member of a set. * - * @function itemContains - * @param {string} key Set key - * @param {string} item Item value - * @returns {Promise} A promise that resolves with true boolean value if `item` is a member of the set stored at `key`, + * @param key - Set key + * @param item - Item value + * @returns A promise that resolves with true boolean value if `item` is a member of the set stored at `key`, * or false if it is not a member or `key` set does not exist. The promise rejects if the operation fails, for example, * if there is a connection error or the key holds a value that is not a set. */ @@ -140,29 +128,26 @@ export interface IPluggableStorageWrapper { * Add the specified `items` to the set stored at `key`. Those items that are already part of the set are ignored. * If key does not exist, an empty set is created before adding the items. * - * @function addItems - * @param {string} key Set key - * @param {string} items Items to add - * @returns {Promise} A promise that resolves if the operation success. + * @param key - Set key + * @param items - Items to add + * @returns A promise that resolves if the operation success. * The promise rejects if the operation fails, for example, if there is a connection error or the key holds a value that is not a set. */ addItems: (key: string, items: string[]) => Promise /** * Remove the specified `items` from the set stored at `key`. Those items that are not part of the set are ignored. * - * @function removeItems - * @param {string} key Set key - * @param {string} items Items to remove - * @returns {Promise} A promise that resolves if the operation success. If key does not exist, the promise also resolves. + * @param key - Set key + * @param items - Items to remove + * @returns A promise that resolves if the operation success. If key does not exist, the promise also resolves. * The promise rejects if the operation fails, for example, if there is a connection error or the key holds a value that is not a set. */ removeItems: (key: string, items: string[]) => Promise /** * Returns all the items of the `key` set. * - * @function getItems - * @param {string} key Set key - * @returns {Promise} A promise that resolves with the list of items. If key does not exist, the result is an empty list. + * @param key - Set key + * @returns A promise that resolves with the list of items. If key does not exist, the result is an empty list. * The promise rejects if the operation fails, for example, if there is a connection error or the key holds a value that is not a set. */ getItems: (key: string) => Promise @@ -174,8 +159,7 @@ export interface IPluggableStorageWrapper { * It is meant for storages that requires to be connected to some database or server. Otherwise it can just return a resolved promise. * Note: will be called once on SplitFactory instantiation and once per each shared client instantiation. * - * @function connect - * @returns {Promise} A promise that resolves when the wrapper successfully connect to the underlying storage. + * @returns A promise that resolves when the wrapper successfully connect to the underlying storage. * The promise rejects with the corresponding error if the wrapper fails to connect. */ connect: () => Promise @@ -184,8 +168,7 @@ export interface IPluggableStorageWrapper { * It is meant for storages that requires to be closed, in order to release resources. Otherwise it can just return a resolved promise. * Note: will be called once on SplitFactory main client destroy. * - * @function disconnect - * @returns {Promise} A promise that resolves when the operation ends. + * @returns A promise that resolves when the operation ends. * The promise never rejects. */ disconnect: () => Promise diff --git a/src/sync/offline/splitsParser/splitsParserFromSettings.ts b/src/sync/offline/splitsParser/splitsParserFromSettings.ts index d0cafc9f..f242b26c 100644 --- a/src/sync/offline/splitsParser/splitsParserFromSettings.ts +++ b/src/sync/offline/splitsParser/splitsParserFromSettings.ts @@ -39,7 +39,7 @@ export function splitsParserFromSettingsFactory() { /** * - * @param settings validated object with mocked features mapping. + * @param settings - validated object with mocked features mapping. */ return function splitsParserFromSettings(settings: Pick): false | Record { const features = settings.features as SplitIO.MockedFeaturesMap || {}; diff --git a/src/sync/offline/syncManagerOffline.ts b/src/sync/offline/syncManagerOffline.ts index cd3f0bcd..31ac6dd0 100644 --- a/src/sync/offline/syncManagerOffline.ts +++ b/src/sync/offline/syncManagerOffline.ts @@ -14,7 +14,7 @@ function flush() { * Offline SyncManager factory. * Can be used for server-side API, and client-side API with or without multiple clients. * - * @param splitsParser e.g., `splitsParserFromFile`, `splitsParserFromSettings`. + * @param splitsParser - e.g., `splitsParserFromFile`, `splitsParserFromSettings`. */ export function syncManagerOfflineFactory( splitsParserFactory: () => ISplitsParser diff --git a/src/sync/polling/updaters/mySegmentsUpdater.ts b/src/sync/polling/updaters/mySegmentsUpdater.ts index 165c270b..32d9f78e 100644 --- a/src/sync/polling/updaters/mySegmentsUpdater.ts +++ b/src/sync/polling/updaters/mySegmentsUpdater.ts @@ -87,12 +87,12 @@ export function mySegmentsUpdaterFactory( * MySegments updater returns a promise that resolves with a `false` boolean value if it fails to fetch mySegments or synchronize them with the storage. * Returned promise will not be rejected. * - * @param {SegmentsData | undefined} segmentsData it can be: + * @param segmentsData - it can be: * (1) the list of mySegments names to sync in the storage, * (2) an object with a segment name and action (true: add, or false: delete) to update the storage, * (3) or `undefined`, for which the updater will fetch mySegments in order to sync the storage. - * @param {boolean | undefined} noCache true to revalidate data to fetch - * @param {boolean | undefined} till query param to bypass CDN requests + * @param noCache - true to revalidate data to fetch + * @param till - query param to bypass CDN requests */ return function mySegmentsUpdater(segmentsData?: MySegmentsData, noCache?: boolean, till?: number) { return _mySegmentsUpdater(0, segmentsData, noCache, till); diff --git a/src/sync/polling/updaters/segmentChangesUpdater.ts b/src/sync/polling/updaters/segmentChangesUpdater.ts index 770f68c6..5f9db114 100644 --- a/src/sync/polling/updaters/segmentChangesUpdater.ts +++ b/src/sync/polling/updaters/segmentChangesUpdater.ts @@ -13,10 +13,10 @@ type ISegmentChangesUpdater = (fetchOnlyNew?: boolean, segmentName?: string, noC * - updates `segmentsCache` * - uses `segmentsEventEmitter` to emit events related to segments data updates * - * @param log logger instance - * @param segmentChangesFetcher fetcher of `/segmentChanges` - * @param segments segments storage, with sync or async methods - * @param readiness optional readiness manager. Not required for synchronizer or producer mode. + * @param log - logger instance + * @param segmentChangesFetcher - fetcher of `/segmentChanges` + * @param segments - segments storage, with sync or async methods + * @param readiness - optional readiness manager. Not required for synchronizer or producer mode. */ export function segmentChangesUpdaterFactory( log: ILogger, @@ -50,11 +50,11 @@ export function segmentChangesUpdaterFactory( * Thus, a false result doesn't imply that SDK_SEGMENTS_ARRIVED was not emitted. * Returned promise will not be rejected. * - * @param {boolean | undefined} fetchOnlyNew if true, only fetch the segments that not exists, i.e., which `changeNumber` is equal to -1. + * @param fetchOnlyNew - if true, only fetch the segments that not exists, i.e., which `changeNumber` is equal to -1. * This param is used by SplitUpdateWorker on server-side SDK, to fetch new registered segments on SPLIT_UPDATE notifications. - * @param {string | undefined} segmentName segment name to fetch. By passing `undefined` it fetches the list of segments registered at the storage - * @param {boolean | undefined} noCache true to revalidate data to fetch on a SEGMENT_UPDATE notifications. - * @param {number | undefined} till till target for the provided segmentName, for CDN bypass. + * @param segmentName - segment name to fetch. By passing `undefined` it fetches the list of segments registered at the storage + * @param noCache - true to revalidate data to fetch on a SEGMENT_UPDATE notifications. + * @param till - till target for the provided segmentName, for CDN bypass. */ return function segmentChangesUpdater(fetchOnlyNew?: boolean, segmentName?: string, noCache?: boolean, till?: number) { log.debug(`${LOG_PREFIX_SYNC_SEGMENTS}Started segments update`); diff --git a/src/sync/polling/updaters/splitChangesUpdater.ts b/src/sync/polling/updaters/splitChangesUpdater.ts index f3b9e824..bf8803a2 100644 --- a/src/sync/polling/updaters/splitChangesUpdater.ts +++ b/src/sync/polling/updaters/splitChangesUpdater.ts @@ -51,8 +51,8 @@ interface ISplitMutations { * If there are defined filters and one feature flag doesn't match with them, its status is changed to 'ARCHIVE' to avoid storing it * If there are set filter defined, names filter is ignored * - * @param featureFlag feature flag to be evaluated - * @param filters splitFiltersValidation bySet | byName + * @param featureFlag - feature flag to be evaluated + * @param filters - splitFiltersValidation bySet | byName */ function matchFilters(featureFlag: ISplit, filters: ISplitFiltersValidation) { const { bySet: setsFilter, byName: namesFilter, byPrefix: prefixFilter } = filters.groupedFilters; @@ -100,13 +100,13 @@ export function computeSplitsMutation(entries: ISplit[], filters: ISplitFiltersV * - updates `splitsCache` * - uses `splitsEventEmitter` to emit events related to split data updates * - * @param log Logger instance - * @param splitChangesFetcher Fetcher of `/splitChanges` - * @param splits Splits storage, with sync or async methods - * @param segments Segments storage, with sync or async methods - * @param splitsEventEmitter Optional readiness manager. Not required for synchronizer or producer mode. - * @param requestTimeoutBeforeReady How long the updater will wait for the request to timeout. Default 0, i.e., never timeout. - * @param retriesOnFailureBeforeReady How many retries on `/splitChanges` we the updater do in case of failure or timeout. Default 0, i.e., no retries. + * @param log - Logger instance + * @param splitChangesFetcher - Fetcher of `/splitChanges` + * @param splits - Splits storage, with sync or async methods + * @param segments - Segments storage, with sync or async methods + * @param splitsEventEmitter - Optional readiness manager. Not required for synchronizer or producer mode. + * @param requestTimeoutBeforeReady - How long the updater will wait for the request to timeout. Default 0, i.e., never timeout. + * @param retriesOnFailureBeforeReady - How many retries on `/splitChanges` we the updater do in case of failure or timeout. Default 0, i.e., no retries. */ export function splitChangesUpdaterFactory( log: ILogger, @@ -142,14 +142,14 @@ export function splitChangesUpdaterFactory( * SplitChanges updater returns a promise that resolves with a `false` boolean value if it fails to fetch splits or synchronize them with the storage. * Returned promise will not be rejected. * - * @param {boolean | undefined} noCache true to revalidate data to fetch - * @param {boolean | undefined} till query param to bypass CDN requests + * @param noCache - true to revalidate data to fetch + * @param till - query param to bypass CDN requests */ return function splitChangesUpdater(noCache?: boolean, till?: number, splitUpdateNotification?: { payload: ISplit, changeNumber: number }) { /** - * @param {number} since current changeNumber at splitsCache - * @param {number} retry current number of retry attempts + * @param since - current changeNumber at splitsCache + * @param retry - current number of retry attempts */ function _splitChangesUpdater(since: number, retry = 0): Promise { log.debug(SYNC_SPLITS_FETCH, [since]); diff --git a/src/sync/streaming/AuthClient/index.ts b/src/sync/streaming/AuthClient/index.ts index e7654c9d..8869674d 100644 --- a/src/sync/streaming/AuthClient/index.ts +++ b/src/sync/streaming/AuthClient/index.ts @@ -8,13 +8,13 @@ import { hash } from '../../../utils/murmur3/murmur3'; /** * Factory of authentication function. * - * @param fetchAuth `SplitAPI.fetchAuth` endpoint + * @param fetchAuth - `SplitAPI.fetchAuth` endpoint */ export function authenticateFactory(fetchAuth: IFetchAuth): IAuthenticate { /** * Run authentication requests to Auth Server, and returns a promise that resolves with the decoded JTW token. - * @param {string[] | undefined} userKeys set of user Keys to track membership updates. It is undefined for server-side API. + * @param userKeys - set of user Keys to track membership updates. It is undefined for server-side API. */ return function authenticate(userKeys?: string[]): Promise { return fetchAuth(userKeys) diff --git a/src/sync/streaming/SSEClient/index.ts b/src/sync/streaming/SSEClient/index.ts index 1ea8c8e8..c19c2817 100644 --- a/src/sync/streaming/SSEClient/index.ts +++ b/src/sync/streaming/SSEClient/index.ts @@ -14,8 +14,8 @@ const CONTROL_CHANNEL_REGEX = /^control_/; /** * Build metadata headers for SSE connection. * - * @param {ISettings} settings Validated settings. - * @returns {Record} Headers object + * @param settings - Validated settings. + * @returns Headers object */ function buildSSEHeaders(settings: ISettings) { const headers: Record = { @@ -45,8 +45,8 @@ export class SSEClient implements ISSEClient { /** * SSEClient constructor. * - * @param settings Validated settings. - * @param platform object containing environment-specific dependencies + * @param settings - Validated settings. + * @param platform - object containing environment-specific dependencies * @throws 'EventSource API is not available.' if EventSource is not available. */ constructor(private settings: ISettings, { getEventSource, getOptions }: IPlatform) { @@ -64,9 +64,6 @@ export class SSEClient implements ISSEClient { /** * Open the connection with a given authToken - * - * @param {IAuthTokenPushEnabled} authToken - * @throws {TypeError} Will throw an error if `authToken` is undefined */ open(authToken: IAuthTokenPushEnabled) { this.close(); // it closes connection if previously opened diff --git a/src/sync/streaming/SSEHandler/NotificationKeeper.ts b/src/sync/streaming/SSEHandler/NotificationKeeper.ts index a07c6761..8b40ae6d 100644 --- a/src/sync/streaming/SSEHandler/NotificationKeeper.ts +++ b/src/sync/streaming/SSEHandler/NotificationKeeper.ts @@ -10,7 +10,7 @@ const STREAMING_EVENT_TYPES: StreamingEventType[] = [OCCUPANCY_PRI, OCCUPANCY_SE /** * Factory of notification keeper, which process OCCUPANCY and CONTROL notifications and emits the corresponding push events. * - * @param pushEmitter emitter for events related to streaming support + * @param pushEmitter - emitter for events related to streaming support */ // @TODO update logic to handle OCCUPANCY for any region and rename according to new spec (e.g.: PUSH_SUBSYSTEM_UP --> PUSH_SUBSYSTEM_UP) export function notificationKeeperFactory(pushEmitter: IPushEventEmitter, telemetryTracker: ITelemetryTracker) { diff --git a/src/sync/streaming/SSEHandler/NotificationParser.ts b/src/sync/streaming/SSEHandler/NotificationParser.ts index 749fc332..e333a9f3 100644 --- a/src/sync/streaming/SSEHandler/NotificationParser.ts +++ b/src/sync/streaming/SSEHandler/NotificationParser.ts @@ -7,8 +7,9 @@ import { INotificationMessage, INotificationError } from './types'; * HTTP errors handled by Ably (e.g., 400 due to invalid token, 401 due to expired token, 500) have the `data` property. * Other network and HTTP errors do not have this property. * - * @param {Object} error - * @throws {SyntaxError} if `error.data` is an invalid JSON string + * @param error - The error event to parse + * @returns parsed notification error + * @throws SyntaxError if `error.data` is an invalid JSON string */ export function errorParser(error: Event): INotificationError { // @ts-ignore @@ -21,10 +22,10 @@ export function errorParser(error: Event): INotificationError { * Parses the `data` JSON string of a given SSE message notifications. * Also assigns the type OCCUPANCY, if it corresponds, so that all supported messages (e.g., SPLIT_UPDATE, CONTROL) have a type. * - * @param message + * @param message - The message event to parse * @returns parsed notification message or undefined if the given event data is falsy (e.g, '' or undefined). * For example, the EventSource implementation of React-Native for iOS emits a message event with empty data for Ably keepalive comments. - * @throws {SyntaxError} if `message.data` or `JSON.parse(message.data).data` are invalid JSON strings + * @throws SyntaxError if `message.data` or `JSON.parse(message.data).data` are invalid JSON strings */ export function messageParser(message: MessageEvent): INotificationMessage | undefined { if (!message.data) return; diff --git a/src/sync/streaming/SSEHandler/index.ts b/src/sync/streaming/SSEHandler/index.ts index bbe39d0f..fbbe329c 100644 --- a/src/sync/streaming/SSEHandler/index.ts +++ b/src/sync/streaming/SSEHandler/index.ts @@ -12,8 +12,8 @@ import { ITelemetryTracker } from '../../../trackers/types'; /** * Factory for SSEHandler, which processes SSEClient messages and emits the corresponding push events. * - * @param log factory logger - * @param pushEmitter emitter for events related to streaming support + * @param log - factory logger + * @param pushEmitter - emitter for events related to streaming support */ export function SSEHandlerFactory(log: ILogger, pushEmitter: IPushEventEmitter, telemetryTracker: ITelemetryTracker): ISseEventHandler { diff --git a/src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts b/src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts index ae44e7c2..bafdd37d 100644 --- a/src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts +++ b/src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts @@ -87,9 +87,9 @@ export function MySegmentsUpdateWorker(log: ILogger, storage: Pick, payload?: Pick, delay?: number) { const { type, cn } = mySegmentsData; diff --git a/src/sync/streaming/UpdateWorkers/SegmentsUpdateWorker.ts b/src/sync/streaming/UpdateWorkers/SegmentsUpdateWorker.ts index 8ca485cd..956b744a 100644 --- a/src/sync/streaming/UpdateWorkers/SegmentsUpdateWorker.ts +++ b/src/sync/streaming/UpdateWorkers/SegmentsUpdateWorker.ts @@ -84,8 +84,8 @@ export function SegmentsUpdateWorker(log: ILogger, segmentsSyncTask: ISegmentsSy /** * Invoked by NotificationProcessor on SEGMENT_UPDATE event * - * @param {number} changeNumber change number of the SEGMENT_UPDATE notification - * @param {string} segmentName segment name of the SEGMENT_UPDATE notification + * @param changeNumber - change number of the SEGMENT_UPDATE notification + * @param segmentName - segment name of the SEGMENT_UPDATE notification */ put({ changeNumber, segmentName }: ISegmentUpdateData) { if (!segments[segmentName]) segments[segmentName] = SegmentUpdateWorker(segmentName); diff --git a/src/sync/streaming/UpdateWorkers/SplitsUpdateWorker.ts b/src/sync/streaming/UpdateWorkers/SplitsUpdateWorker.ts index a01ed94a..580fe9cb 100644 --- a/src/sync/streaming/UpdateWorkers/SplitsUpdateWorker.ts +++ b/src/sync/streaming/UpdateWorkers/SplitsUpdateWorker.ts @@ -69,7 +69,7 @@ export function SplitsUpdateWorker(log: ILogger, splitsCache: ISplitsCacheSync, /** * Invoked by NotificationProcessor on SPLIT_UPDATE event * - * @param {number} changeNumber change number of the SPLIT_UPDATE notification + * @param changeNumber - change number of the SPLIT_UPDATE notification */ function put({ changeNumber, pcn }: ISplitUpdateData, _payload?: ISplit) { const currentChangeNumber = splitsCache.getChangeNumber(); @@ -94,9 +94,9 @@ export function SplitsUpdateWorker(log: ILogger, splitsCache: ISplitsCacheSync, /** * Invoked by NotificationProcessor on SPLIT_KILL event * - * @param {number} changeNumber change number of the SPLIT_UPDATE notification - * @param {string} splitName name of split to kill - * @param {string} defaultTreatment default treatment value + * @param changeNumber - change number of the SPLIT_UPDATE notification + * @param splitName - name of split to kill + * @param defaultTreatment - default treatment value */ killSplit({ changeNumber, splitName, defaultTreatment }: ISplitKillData) { if (splitsCache.killLocally(splitName, defaultTreatment, changeNumber)) { diff --git a/src/sync/streaming/parseUtils.ts b/src/sync/streaming/parseUtils.ts index 2f2a0c49..97fde935 100644 --- a/src/sync/streaming/parseUtils.ts +++ b/src/sync/streaming/parseUtils.ts @@ -19,9 +19,9 @@ function StringToUint8Array(myString: string) { /** * Decode and decompress 'data' with 'compression' algorithm * - * @param {string} data - * @param {number} compression 1 GZIP, 2 ZLIB - * @returns {Uint8Array} + * @param data - base64 encoded string + * @param compression - 1 GZIP, 2 ZLIB + * @returns * @throws if data string cannot be decoded, decompressed or the provided compression value is invalid (not 1 or 2) */ function decompress(data: string, compression: Compression) { @@ -37,10 +37,10 @@ function decompress(data: string, compression: Compression) { /** * Decode, decompress and parse the provided 'data' into a KeyList object * - * @param {string} data - * @param {number} compression - * @param {boolean} avoidPrecisionLoss true as default, set it as false if dont need to avoid precission loss - * @returns {{a?: string[], r?: string[] }} + * @param data - base64 encoded string + * @param compression - 1 GZIP, 2 ZLIB + * @param avoidPrecisionLoss - true as default, set it as false if dont need to avoid precission loss + * @returns keyList * @throws if data string cannot be decoded, decompressed or parsed */ export function parseKeyList(data: string, compression: Compression, avoidPrecisionLoss = true): KeyList { @@ -54,9 +54,9 @@ export function parseKeyList(data: string, compression: Compression, avoidPrecis /** * Decode, decompress and parse the provided 'data' into a Bitmap object * - * @param {string} data - * @param {number} compression - * @returns {Uint8Array} + * @param data - base64 encoded string + * @param compression - 1 GZIP, 2 ZLIB + * @returns Bitmap * @throws if data string cannot be decoded or decompressed */ export function parseBitmap(data: string, compression: Compression) { @@ -66,9 +66,9 @@ export function parseBitmap(data: string, compression: Compression) { /** * Check if the 'bitmap' bit at 'hash64hex' position is 1 * - * @param {Uint8Array} bitmap - * @param {string} hash64hex 16-chars string, representing a number in hexa - * @returns {boolean} + * @param bitmap - Uint8Array bitmap + * @param hash64hex - 16-chars string, representing a number in hexa + * @returns whether the provided 'hash64hex' index is set in the bitmap */ export function isInBitmap(bitmap: Uint8Array, hash64hex: string) { // using the lowest 32 bits as index, to avoid losing precision when converting to number diff --git a/src/sync/syncManagerOnline.ts b/src/sync/syncManagerOnline.ts index 9dfda547..071e9ea3 100644 --- a/src/sync/syncManagerOnline.ts +++ b/src/sync/syncManagerOnline.ts @@ -14,9 +14,9 @@ import { ISdkFactoryContextSync } from '../sdkFactory/types'; * Online SyncManager factory. * Can be used for server-side API, and client-side API with or without multiple clients. * - * @param pollingManagerFactory allows to specialize the SyncManager for server-side or client-side API by passing + * @param pollingManagerFactory - allows to specialize the SyncManager for server-side or client-side API by passing * `pollingManagerSSFactory` or `pollingManagerCSFactory` respectively. - * @param pushManagerFactory optional to build a SyncManager with or without streaming support + * @param pushManagerFactory - optional to build a SyncManager with or without streaming support */ export function syncManagerOnlineFactory( pollingManagerFactory?: (params: ISdkFactoryContextSync) => IPollingManager, diff --git a/src/sync/syncTask.ts b/src/sync/syncTask.ts index 035a9c59..5f22f6c0 100644 --- a/src/sync/syncTask.ts +++ b/src/sync/syncTask.ts @@ -6,10 +6,10 @@ import { ISyncTask } from './types'; * Creates an object that handles the periodic execution of a given task via "start" and "stop" methods. * The task can be also executed by calling the "execute" method. Multiple calls run sequentially to avoid race conditions (e.g., submitters executed on SDK destroy or full queue, while periodic execution is pending). * - * @param log Logger instance. - * @param task Task to execute that returns a promise that NEVER REJECTS. Otherwise, periodic execution can result in Unhandled Promise Rejections. - * @param period Period in milliseconds to execute the task. - * @param taskName Optional task name for logging. + * @param log - Logger instance. + * @param task - Task to execute that returns a promise that NEVER REJECTS. Otherwise, periodic execution can result in Unhandled Promise Rejections. + * @param period - Period in milliseconds to execute the task. + * @param taskName - Optional task name for logging. * @returns A sync task that wraps the given task. */ export function syncTaskFactory(log: ILogger, task: (...args: Input) => Promise, period: number, taskName = 'task'): ISyncTask { diff --git a/src/trackers/eventTracker.ts b/src/trackers/eventTracker.ts index 473d519c..335baf8d 100644 --- a/src/trackers/eventTracker.ts +++ b/src/trackers/eventTracker.ts @@ -11,8 +11,8 @@ import { isConsumerMode } from '../utils/settingsValidation/mode'; /** * Event tracker stores events in cache and pass them to the integrations manager if provided. * - * @param eventsCache cache to save events - * @param integrationsManager optional event handler used for integrations + * @param eventsCache - cache to save events + * @param integrationsManager - optional event handler used for integrations */ export function eventTrackerFactory( settings: ISettings, diff --git a/src/trackers/impressionsTracker.ts b/src/trackers/impressionsTracker.ts index ef58908b..485d0694 100644 --- a/src/trackers/impressionsTracker.ts +++ b/src/trackers/impressionsTracker.ts @@ -10,11 +10,11 @@ import SplitIO from '../../types/splitio'; /** * Impressions tracker stores impressions in cache and pass them to the listener and integrations manager if provided. * - * @param impressionsCache cache to save impressions - * @param metadata runtime metadata (ip, hostname and version) - * @param impressionListener optional impression listener - * @param integrationsManager optional integrations manager - * @param strategy strategy for impressions tracking. + * @param impressionsCache - cache to save impressions + * @param metadata - runtime metadata (ip, hostname and version) + * @param impressionListener - optional impression listener + * @param integrationsManager - optional integrations manager + * @param strategy - strategy for impressions tracking. */ export function impressionsTrackerFactory( settings: ISettings, diff --git a/src/trackers/strategy/strategyDebug.ts b/src/trackers/strategy/strategyDebug.ts index e8341152..21ca3a5d 100644 --- a/src/trackers/strategy/strategyDebug.ts +++ b/src/trackers/strategy/strategyDebug.ts @@ -5,7 +5,7 @@ import { IStrategy } from '../types'; /** * Debug strategy for impressions tracker. Wraps impressions to store and adds previousTime if it corresponds * - * @param impressionsObserver impression observer. Previous time (pt property) is included in impression instances + * @param impressionsObserver - impression observer. Previous time (pt property) is included in impression instances * @returns IStrategyResult */ export function strategyDebugFactory( diff --git a/src/trackers/strategy/strategyNone.ts b/src/trackers/strategy/strategyNone.ts index 54223de7..452ae594 100644 --- a/src/trackers/strategy/strategyNone.ts +++ b/src/trackers/strategy/strategyNone.ts @@ -5,8 +5,8 @@ import { IStrategy, IUniqueKeysTracker } from '../types'; /** * None strategy for impressions tracker. * - * @param impressionsCounter cache to save impressions count. impressions will be deduped (OPTIMIZED mode) - * @param uniqueKeysTracker unique keys tracker in charge of tracking the unique keys per split. + * @param impressionsCounter - cache to save impressions count. impressions will be deduped (OPTIMIZED mode) + * @param uniqueKeysTracker - unique keys tracker in charge of tracking the unique keys per split. * @returns IStrategyResult */ export function strategyNoneFactory( diff --git a/src/trackers/strategy/strategyOptimized.ts b/src/trackers/strategy/strategyOptimized.ts index d94e2ab2..9fe61af1 100644 --- a/src/trackers/strategy/strategyOptimized.ts +++ b/src/trackers/strategy/strategyOptimized.ts @@ -7,8 +7,8 @@ import { IStrategy } from '../types'; /** * Optimized strategy for impressions tracker. Wraps impressions to store and adds previousTime if it corresponds * - * @param impressionsObserver impression observer. previous time (pt property) is included in impression instances - * @param impressionsCounter cache to save impressions count. impressions will be deduped (OPTIMIZED mode) + * @param impressionsObserver - impression observer. previous time (pt property) is included in impression instances + * @param impressionsCounter - cache to save impressions count. impressions will be deduped (OPTIMIZED mode) * @returns IStrategyResult */ export function strategyOptimizedFactory( diff --git a/src/trackers/uniqueKeysTracker.ts b/src/trackers/uniqueKeysTracker.ts index 9faa57fc..f8de517c 100644 --- a/src/trackers/uniqueKeysTracker.ts +++ b/src/trackers/uniqueKeysTracker.ts @@ -14,9 +14,9 @@ const noopFilterAdapter = { * Unique Keys Tracker will be in charge of checking if the MTK was already sent to the BE in the last period * or schedule to be sent; if not it will be added in an internal cache and sent in the next post. * - * @param log Logger instance - * @param uniqueKeysCache cache to save unique keys - * @param filterAdapter filter adapter + * @param log - Logger instance + * @param uniqueKeysCache - cache to save unique keys + * @param filterAdapter - filter adapter */ export function uniqueKeysTrackerFactory( log: ILogger, diff --git a/src/types.ts b/src/types.ts index 2e368ca6..bdb0933c 100644 --- a/src/types.ts +++ b/src/types.ts @@ -33,8 +33,8 @@ export interface IStatusInterface extends SplitIO.IStatusInterface { export interface IBasicClient extends SplitIO.IBasicClient { /** * Flush data - * @function flush - * @return {Promise} + * + * @returns A promise that is resolved when the flush is completed. */ flush(): Promise; // Whether the client implements the client-side API, i.e, with bound key, (true), or the server-side API (false/undefined). @@ -49,8 +49,8 @@ export interface PreloadedData { /** * Timestamp of the last moment the data was synchronized with Split servers. * If this value is older than 10 days ago (expiration time policy), the data is not used to update the storage content. - * @TODO configurable expiration time policy? */ + // @TODO configurable expiration time policy? lastUpdated: number; /** * Change number of the preloaded data. @@ -65,7 +65,6 @@ export interface PreloadedData { }; /** * Optional map of user keys to their list of segments. - * @TODO remove when releasing first version */ mySegmentsData?: { [key: string]: string[]; diff --git a/src/utils/Backoff.ts b/src/utils/Backoff.ts index 95ff5c34..519b4890 100644 --- a/src/utils/Backoff.ts +++ b/src/utils/Backoff.ts @@ -15,10 +15,6 @@ export class Backoff { /** * Schedule function calls with exponential backoff - * - * @param {function} cb - * @param {number} baseMillis - * @param {number} maxMillis */ constructor(cb: (...args: any[]) => any, baseMillis?: number, maxMillis?: number) { this.baseMillis = Backoff.__TEST__BASE_MILLIS || baseMillis || Backoff.DEFAULT_BASE_MILLIS; diff --git a/src/utils/Semver.ts b/src/utils/Semver.ts index f94f3fa5..7f11d9ae 100644 --- a/src/utils/Semver.ts +++ b/src/utils/Semver.ts @@ -84,7 +84,7 @@ export class Semver { /** * Precedence comparision between 2 Semver objects. * - * @return `0` if `this === toCompare`, `-1` if `this < toCompare`, and `1` if `this > toCompare` + * @returns `0` if `this === toCompare`, `-1` if `this < toCompare`, and `1` if `this > toCompare` */ public compare(toCompare: Semver): number { if (this.version === toCompare.version) return 0; diff --git a/src/utils/base64/index.ts b/src/utils/base64/index.ts index 32077037..3fa878de 100644 --- a/src/utils/base64/index.ts +++ b/src/utils/base64/index.ts @@ -22,7 +22,7 @@ const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/= /** * Decode a given string value in Base64 format * - * @param {string} input value to decode + * @param input - value to decode */ export function decodeFromBase64(input: string): string { const str = (String(input)).replace(/[=]+$/, ''); // #31: ExtendScript bad parse of /= @@ -48,7 +48,7 @@ export function decodeFromBase64(input: string): string { /** * Encode a given string value to Base64 format. * - * @param {string} input value to encode + * @param input - value to encode */ export function encodeToBase64(input: string): string { const str = String(input); diff --git a/src/utils/decompress/index.ts b/src/utils/decompress/index.ts index e0f5c2cd..fdbead14 100644 --- a/src/utils/decompress/index.ts +++ b/src/utils/decompress/index.ts @@ -409,8 +409,8 @@ export const algorithms = (function iifeDecompress() { return { /** * Expands GZIP data - * @param data The data to decompress - * @param out Where to write the data. GZIP already encodes the output size, so providing this doesn't save memory. + * @param data - The data to decompress + * @param out - Where to write the data. GZIP already encodes the output size, so providing this doesn't save memory. * @returns The decompressed version of the data */ gunzipSync(data: Uint8Array, out?: Uint8Array): Uint8Array { @@ -418,8 +418,8 @@ export const algorithms = (function iifeDecompress() { }, /** * Expands Zlib data - * @param data The data to decompress - * @param out Where to write the data. Saves memory if you know the decompressed size and provide an output buffer of that length. + * @param data - The data to decompress + * @param out - Where to write the data. Saves memory if you know the decompressed size and provide an output buffer of that length. * @returns The decompressed version of the data */ unzlibSync(data: Uint8Array, out?: Uint8Array): Uint8Array { diff --git a/src/utils/lang/binarySearch.ts b/src/utils/lang/binarySearch.ts index 7ff83fa0..2c38e0da 100644 --- a/src/utils/lang/binarySearch.ts +++ b/src/utils/lang/binarySearch.ts @@ -1,8 +1,8 @@ /** * Searches the index of the specified `value` inside an ordered array of `items` using the binary search algorithm. * - * @param items the array to be searched - * @param value the value to be searched for + * @param items - the array to be searched + * @param value - the value to be searched for * @returns integer number between 0 and `items.length`. This value is the index of the search value, * if it is contained in the array, or the index at which the value should be inserted to keep the array ordered. */ diff --git a/src/utils/murmur3/murmur3_128.ts b/src/utils/murmur3/murmur3_128.ts index a9929324..954ba591 100644 --- a/src/utils/murmur3/murmur3_128.ts +++ b/src/utils/murmur3/murmur3_128.ts @@ -266,7 +266,7 @@ function hash128x64(key?: string, seed?: number) { /** * x64 version of Murmur3 for 128bits. * - * @param {string} str + * @param str - The string to hash. */ export function hash128(str: string, seed?: number): string { return hash128x64(UTF16ToUTF8(str), (seed as number) >>> 0); diff --git a/src/utils/murmur3/murmur3_128_x86.ts b/src/utils/murmur3/murmur3_128_x86.ts index a05ad3dc..a76d3374 100644 --- a/src/utils/murmur3/murmur3_128_x86.ts +++ b/src/utils/murmur3/murmur3_128_x86.ts @@ -181,7 +181,7 @@ function hash128x86(key?: string, seed?: number): string { * x86 version of Murmur3 for 128bits. * Used by hashImpression128 because in JS it is more efficient than the x64 version, no matter the underlying OS/CPU arch. * - * @param {string} str + * @param str - The string to hash. */ export function hash128(str: string, seed?: number): string { return hash128x86(UTF16ToUTF8(str), (seed as number) >>> 0); diff --git a/src/utils/murmur3/murmur3_64.ts b/src/utils/murmur3/murmur3_64.ts index aba873c7..92fdc76b 100644 --- a/src/utils/murmur3/murmur3_64.ts +++ b/src/utils/murmur3/murmur3_64.ts @@ -25,7 +25,7 @@ function hex2dec(s: string): string { /** * Gets the higher 64 bits of the x64 version of Murmur3 for 128bits, as decimal and hexadecimal number strings. * Used for MySegments channel V2 notifications. - * @param {string} str + * @param str - The string to hash */ export function hash64(str: string): Hash64 { const hex = hash128(str).slice(0, 16); diff --git a/src/utils/murmur3/utfx.ts b/src/utils/murmur3/utfx.ts index fd9125f4..c53322e3 100644 --- a/src/utils/murmur3/utfx.ts +++ b/src/utils/murmur3/utfx.ts @@ -14,10 +14,9 @@ export interface utfx { /** * Encodes UTF8 code points to UTF8 bytes. - * @param {(!function():number|null) | number} src Code points source, either as a function returning the next code point + * @param src - Code points source, either as a function returning the next code point * respectively `null` if there are no more code points left or a single numeric code point. - * @param {!function(number)} dst Bytes destination as a function successively called with the next byte - * @expose + * @param dst - Bytes destination as a function successively called with the next byte */ function encodeUTF8(src: (() => number | null) | number, dst: (cp: number) => void): void { var cp = null; @@ -38,11 +37,10 @@ function encodeUTF8(src: (() => number | null) | number, dst: (cp: number) => vo /** * Converts UTF16 characters to UTF8 code points. - * @param {!function():number|null} src Characters source as a function returning the next char code respectively + * @param src - Characters source as a function returning the next char code respectively * `null` if there are no more characters left. - * @param {!function(number)} dst Code points destination as a function successively called with each converted code + * @param dst - Code points destination as a function successively called with each converted code * point. - * @expose */ function UTF16toUTF8(src: () => number | null, dst: (cp: number) => void): void { var c1, c2 = null; @@ -65,10 +63,9 @@ function UTF16toUTF8(src: () => number | null, dst: (cp: number) => void): void /** * Converts and encodes UTF16 characters to UTF8 bytes. - * @param {!function():number|null} src Characters source as a function returning the next char code respectively `null` + * @param src - Characters source as a function returning the next char code respectively `null` * if there are no more characters left. - * @param {!function(number)} dst Bytes destination as a function successively called with the next byte. - * @expose + * @param dst - Bytes destination as a function successively called with the next byte. */ export function encodeUTF16toUTF8(src: () => number | null, dst: (...args: number[]) => string | undefined): void { UTF16toUTF8(src, function (cp) { @@ -78,18 +75,15 @@ export function encodeUTF16toUTF8(src: () => number | null, dst: (...args: numbe /** * String.fromCharCode reference for compile time renaming. - * @type {!function(...[number]):string} - * @inner */ var stringFromCharCode = String.fromCharCode; /** * Creates a source function for a string. - * @param {string} s String to read from - * @returns {!function():number|null} Source function returning the next char code respectively `null` if there are + * @param s - String to read from + * @returns Source function returning the next char code respectively `null` if there are * no more characters left. - * @throws {TypeError} If the argument is invalid - * @expose + * @throws If the argument is invalid */ export function stringSource(s: string): () => number | null { if (typeof s !== 'string') @@ -101,9 +95,8 @@ export function stringSource(s: string): () => number | null { /** * Creates a destination function for a string. - * @returns {function(number=):undefined|string} Destination function successively called with the next char code. + * @returns Destination function successively called with the next char code. * Returns the final string when called without arguments. - * @expose */ export function stringDestination(): (...args: number[]) => string | undefined { const cs: number[] = [], ps: string[] = []; return function () { diff --git a/src/utils/promise/wrapper.ts b/src/utils/promise/wrapper.ts index d0100fd6..62266457 100644 --- a/src/utils/promise/wrapper.ts +++ b/src/utils/promise/wrapper.ts @@ -11,8 +11,8 @@ * - If the wrapped promise is rejected when using native async/await syntax, the `defaultOnRejected` handler is invoked * and neither the catch block nor the remaining try block are executed. * - * @param customPromise promise to wrap - * @param defaultOnRejected default onRejected function + * @param customPromise - promise to wrap + * @param defaultOnRejected - default onRejected function * @returns a promise that doesn't need to be handled for rejection (except when using async/await syntax) and * includes a method named `hasOnFulfilled` that returns true if the promise has attached an onFulfilled handler. */ diff --git a/src/utils/settingsValidation/index.ts b/src/utils/settingsValidation/index.ts index 98883911..ce1d1772 100644 --- a/src/utils/settingsValidation/index.ts +++ b/src/utils/settingsValidation/index.ts @@ -96,8 +96,8 @@ function fromSecondsToMillis(n: number) { * Validates the given config and use it to build a settings object. * NOTE: it doesn't validate the SDK Key. Call `validateApiKey` or `validateAndTrackApiKey` for that after settings validation. * - * @param config user defined configuration - * @param validationParams defaults and fields validators used to validate and creates a settings object from a given config + * @param config - user defined configuration + * @param validationParams - defaults and fields validators used to validate and creates a settings object from a given config */ export function settingsValidation(config: unknown, validationParams: ISettingsValidationParams) { diff --git a/src/utils/settingsValidation/integrations/common.ts b/src/utils/settingsValidation/integrations/common.ts index 5ead4509..2ecb24b6 100644 --- a/src/utils/settingsValidation/integrations/common.ts +++ b/src/utils/settingsValidation/integrations/common.ts @@ -4,11 +4,11 @@ import { ILogger } from '../../../logger/types'; /** * This function validates `settings.integrations` object * - * @param {any} settings config object provided by the user to initialize the sdk - * @param {function} integrationValidator filter used to remove invalid integration items - * @param {string} extraWarning optional string used to better describe why an item might be invalid + * @param settings - config object provided by the user to initialize the sdk + * @param integrationValidator - filter used to remove invalid integration items + * @param extraWarning - optional string used to better describe why an item might be invalid * - * @returns {Array} array of valid integration items. The array might be empty if `settings` object does not have valid integrations. + * @returns array of valid integration items. The array might be empty if `settings` object does not have valid integrations. */ export function validateIntegrations(settings: { log: ILogger, integrations?: any }, integrationValidator: (integrationItem: any) => boolean, extraWarning?: string) { const { integrations, log } = settings; diff --git a/src/utils/settingsValidation/integrations/configurable.ts b/src/utils/settingsValidation/integrations/configurable.ts index fb24e35b..17283d3b 100644 --- a/src/utils/settingsValidation/integrations/configurable.ts +++ b/src/utils/settingsValidation/integrations/configurable.ts @@ -5,10 +5,10 @@ import { ILogger } from '../../../logger/types'; /** * This function validates `settings.integrations` object that consists of a list of configuration items, used by the isomorphic JS SDK. * - * @param {any} settings config object provided by the user to initialize the sdk - * @param {Array} validIntegrationTypes list of integration types to filter from `settings.integrations` + * @param settings - config object provided by the user to initialize the sdk + * @param validIntegrationTypes - list of integration types to filter from `settings.integrations` * - * @returns {Array} array of valid integration items. The array might be empty if `settings` object does not have valid integrations. + * @returns array of valid integration items. The array might be empty if `settings` object does not have valid integrations. */ export function validateConfigurableIntegrations(settings: { log: ILogger, integrations?: any }, validIntegrationTypes: string[] = []) { diff --git a/src/utils/settingsValidation/integrations/pluggable.ts b/src/utils/settingsValidation/integrations/pluggable.ts index b4a96ee9..f60cfce0 100644 --- a/src/utils/settingsValidation/integrations/pluggable.ts +++ b/src/utils/settingsValidation/integrations/pluggable.ts @@ -5,9 +5,9 @@ import { ILogger } from '../../../logger/types'; /** * This function validates `settings.integrations` object that consists of a list of pluggable integration factories. * - * @param {any} settings config object provided by the user to initialize the sdk + * @param settings - config object provided by the user to initialize the sdk * - * @returns {Array} array of valid integration factories. The array might be empty if `settings` object does not have valid integrations. + * @returns array of valid integration factories. The array might be empty if `settings` object does not have valid integrations. */ export function validatePluggableIntegrations(settings: { log: ILogger, integrations?: any }): ISettings['integrations'] { diff --git a/src/utils/settingsValidation/logger/builtinLogger.ts b/src/utils/settingsValidation/logger/builtinLogger.ts index d64f32a3..4f099c7c 100644 --- a/src/utils/settingsValidation/logger/builtinLogger.ts +++ b/src/utils/settingsValidation/logger/builtinLogger.ts @@ -37,7 +37,7 @@ if (/^(enabled?|on)/i.test(initialState)) { /** * Validates the `debug` property at config and use it to set the log level. * - * @param settings user config object, with an optional `debug` property of type boolean or string log level. + * @param settings - user config object, with an optional `debug` property of type boolean or string log level. * @returns a logger instance with the log level at `settings.debug`. If `settings.debug` is invalid or not provided, `initialLogLevel` is used. */ export function validateLogger(settings: { debug: unknown }): ILogger { diff --git a/src/utils/settingsValidation/logger/commons.ts b/src/utils/settingsValidation/logger/commons.ts index 3958baaf..8c11cbbb 100644 --- a/src/utils/settingsValidation/logger/commons.ts +++ b/src/utils/settingsValidation/logger/commons.ts @@ -6,7 +6,7 @@ import SplitIO from '../../../../types/splitio'; * Returns the LogLevel for the given debugValue or undefined if it is invalid, * i.e., if the debugValue is not a boolean or LogLevel string. * - * @param debugValue debug value at config + * @param debugValue - debug value at config * @returns LogLevel of the given debugValue or undefined if the provided value is invalid */ export function getLogLevel(debugValue: unknown): SplitIO.LogLevel | undefined { diff --git a/src/utils/settingsValidation/logger/pluggableLogger.ts b/src/utils/settingsValidation/logger/pluggableLogger.ts index 3b872877..063134c9 100644 --- a/src/utils/settingsValidation/logger/pluggableLogger.ts +++ b/src/utils/settingsValidation/logger/pluggableLogger.ts @@ -13,7 +13,7 @@ let initialLogLevel = LogLevels.NONE; /** * Validates the `debug` property at config and use it to set the logger. * - * @param settings user config object, with an optional `debug` property of type boolean, string log level or a Logger object. + * @param settings - user config object, with an optional `debug` property of type boolean, string log level or a Logger object. * @returns a logger instance, that might be: the provided logger at `settings.debug`, or one with the given `debug` log level, * or one with NONE log level if `debug` is not defined or invalid. */ diff --git a/src/utils/settingsValidation/splitFilters.ts b/src/utils/settingsValidation/splitFilters.ts index 21474322..cea3117f 100644 --- a/src/utils/settingsValidation/splitFilters.ts +++ b/src/utils/settingsValidation/splitFilters.ts @@ -40,9 +40,9 @@ function validateFilterType(maybeFilterType: any): maybeFilterType is SplitIO.Sp /** * Validate, deduplicate and sort a given list of filter values. * - * @param {string} type filter type string used for log messages - * @param {string[]} values list of values to validate, deduplicate and sort - * @param {number} maxLength + * @param type - filter type string used for log messages + * @param values - list of values to validate, deduplicate and sort + * @param maxLength - max length allowed for the list of values * @returns list of valid, unique and alphabetically sorted non-empty strings. The list is empty if `values` param is not a non-empty array or all its values are invalid. * * @throws Error if the sanitized list exceeds the length indicated by `maxLength` @@ -75,7 +75,7 @@ function validateSplitFilter(log: ILogger, type: SplitIO.SplitFilterType, values * - '&prefixes=': if only `byName` filter is undefined * - '&names=&prefixes=': if no one is undefined * - * @param {Object} groupedFilters object of filters. Each filter must be a list of valid, unique and ordered string values. + * @param groupedFilters - object of filters. Each filter must be a list of valid, unique and ordered string values. * @returns null or string with the `split filter query` component of the URL. */ function queryStringBuilder(groupedFilters: Record) { @@ -90,15 +90,12 @@ function queryStringBuilder(groupedFilters: Record; } /** - * @typedef {Object} EventConsts - * @property {string} SDK_READY The ready event. - * @property {string} SDK_READY_FROM_CACHE The ready event when fired with cached data. - * @property {string} SDK_READY_TIMED_OUT The timeout event. - * @property {string} SDK_UPDATE The update event. + * Event constants. */ type EventConsts = { + /** + * The ready event. + */ SDK_READY: 'init::ready'; + /** + * The ready event when fired with cached data. + */ SDK_READY_FROM_CACHE: 'init::cache-ready'; + /** + * The timeout event. + */ SDK_READY_TIMED_OUT: 'init::timeout'; + /** + * The update event. + */ SDK_UPDATE: 'state::update'; }; /** * SDK Modes. - * @typedef {string} SDKMode */ type SDKMode = 'standalone' | 'localhost' | 'consumer' | 'consumer_partial'; /** * Storage types. - * @typedef {string} StorageType */ type StorageType = 'MEMORY' | 'LOCALSTORAGE' | 'REDIS' | 'PLUGGABLE'; /** * Settings interface. This is a representation of the settings the SDK expose, that's why * most of it's props are readonly. Only features should be rewritten when localhost mode is active. - * @interface ISettings */ interface ISettings { readonly core: { @@ -88,7 +93,7 @@ declare namespace SplitIO { impressionsRefreshRate: number; impressionsQueueSize: number; /** - * @deprecated + * @deprecated Use `telemetryRefreshRate` instead. */ metricsRefreshRate?: number; telemetryRefreshRate: number; @@ -141,30 +146,22 @@ declare namespace SplitIO { } /** * Log levels. - * @typedef {string} LogLevel */ type LogLevel = 'DEBUG' | 'INFO' | 'WARN' | 'ERROR' | 'NONE'; /** * Logger API - * @interface ILoggerAPI */ interface ILoggerAPI { /** * Enables SDK logging to the console. - * @function enable - * @returns {void} */ enable(): void; /** * Disables SDK logging. - * @function disable - * @returns {void} */ disable(): void; /** * Sets a log level for the SDK logs. - * @function setLogLevel - * @returns {void} */ setLogLevel(logLevel: LogLevel): void; /** @@ -176,7 +173,6 @@ declare namespace SplitIO { } /** * User consent API - * @interface IUserConsentAPI */ interface IUserConsentAPI { /** @@ -186,16 +182,14 @@ declare namespace SplitIO { * * NOTE: calling this method updates the user consent at a factory level, affecting all clients of the same factory. * - * @function setStatus - * @param {boolean} userConsent The user consent status, true for 'GRANTED' and false for 'DECLINED'. - * @returns {boolean} Whether the provided param is a valid value (i.e., a boolean value) or not. + * @param userConsent - The user consent status, true for 'GRANTED' and false for 'DECLINED'. + * @returns Whether the provided param is a valid value (i.e., a boolean value) or not. */ setStatus(userConsent: boolean): boolean; /** * Gets the user consent status. * - * @function getStatus - * @returns {ConsentStatus} The user consent status. + * @returns The user consent status. */ getStatus(): SplitIO.ConsentStatus; /** @@ -207,13 +201,10 @@ declare namespace SplitIO { } /** * Common API for entities that expose status handlers. - * @interface IStatusInterface - * @extends EventEmitter */ interface IStatusInterface extends EventEmitter { /** * Constant object containing the SDK events for you to use. - * @property {EventConsts} Event */ Event: EventConsts; /** @@ -231,15 +222,12 @@ declare namespace SplitIO { * } * ``` * - * @function ready - * @returns {Promise} + * @returns A promise that resolves once the SDK is ready or rejects if the SDK has timedout. */ ready(): Promise; } /** * Common definitions between clients for different environments interface. - * @interface IBasicClient - * @extends IStatusInterface */ interface IBasicClient extends IStatusInterface { /** @@ -249,99 +237,94 @@ declare namespace SplitIO { * In 'standalone' mode, it also stops the synchronization of feature flag definitions with the backend. * In 'consumer' and 'partial consumer' modes, this method also disconnects the SDK from the Pluggable storage. * - * @function destroy - * @returns {Promise} A promise that resolves once the client is destroyed. + * @returns A promise that resolves once the client is destroyed. */ destroy(): Promise; } /** * Common definitions between SDK instances for different environments interface. - * @interface IBasicSDK */ interface IBasicSDK { /** * Current settings of the SDK instance. - * @property settings */ settings: ISettings; /** * Logger API. - * @property Logger */ Logger: ILoggerAPI; /** * Destroys all the clients created by this factory. - * @function destroy - * @returns {Promise} + * + * @returns A promise that resolves once all clients are destroyed. */ destroy(): Promise; } /** * Feature flag treatment value, returned by getTreatment. - * @typedef {string} Treatment */ type Treatment = string; /** * Feature flag treatment promise that resolves to actual treatment value. - * @typedef {Promise} AsyncTreatment */ type AsyncTreatment = Promise; /** * An object with the treatments for a bulk of feature flags, returned by getTreatments. For example: + * ``` * { * feature1: 'on', - * feature2: 'off + * feature2: 'off' * } - * @typedef {Object.} Treatments + * ``` */ type Treatments = { [featureName: string]: Treatment; }; /** * Feature flag treatments promise that resolves to the actual SplitIO.Treatments object. - * @typedef {Promise} AsyncTreatments */ type AsyncTreatments = Promise; /** * Feature flag evaluation result with treatment and configuration, returned by getTreatmentWithConfig. - * @typedef {Object} TreatmentWithConfig - * @property {string} treatment The treatment string - * @property {string | null} config The stringified version of the JSON config defined for that treatment, null if there is no config for the resulting treatment. */ type TreatmentWithConfig = { + /** + * The treatment string. + */ treatment: string; + /** + * The stringified version of the JSON config defined for that treatment, null if there is no config for the resulting treatment. + */ config: string | null; }; /** * Feature flag treatment promise that resolves to actual SplitIO.TreatmentWithConfig object. - * @typedef {Promise} AsyncTreatmentWithConfig */ type AsyncTreatmentWithConfig = Promise; /** * An object with the treatments with configs for a bulk of feature flags, returned by getTreatmentsWithConfig. * Each existing configuration is a stringified version of the JSON you defined on the Split user interface. For example: + * ``` * { * feature1: { treatment: 'on', config: null } * feature2: { treatment: 'off', config: '{"bannerText":"Click here."}' } * } - * @typedef {Object.} Treatments + * ``` */ type TreatmentsWithConfig = { [featureName: string]: TreatmentWithConfig; }; /** * Feature flag treatments promise that resolves to the actual SplitIO.TreatmentsWithConfig object. - * @typedef {Promise} AsyncTreatmentsWithConfig */ type AsyncTreatmentsWithConfig = Promise; /** * Possible Split SDK events. - * @typedef {string} Event */ type Event = 'init::timeout' | 'init::ready' | 'init::cache-ready' | 'state::update'; /** * Attributes should be on object with values of type string, boolean, number (dates should be sent as millis since epoch) or array of strings or numbers. - * @typedef {Object.} Attributes + * * @see {@link https://help.split.io/hc/en-us/articles/360020448791-JavaScript-SDK#attribute-syntax} */ type Attributes = { @@ -349,20 +332,18 @@ declare namespace SplitIO { }; /** * Type of an attribute value - * @typedef {string | number | boolean | Array} AttributeType */ type AttributeType = string | number | boolean | Array; /** * Properties should be an object with values of type string, number, boolean or null. Size limit of ~31kb. - * @typedef {Object.} Properties - * @see {@link https://help.split.io/hc/en-us/articles/360020448791-JavaScript-SDK#track + * + * @see {@link https://help.split.io/hc/en-us/articles/360020448791-JavaScript-SDK#track} */ type Properties = { [propertyName: string]: string | number | boolean | null; }; /** * The SplitKey object format. - * @typedef {Object.} SplitKeyObject */ type SplitKeyObject = { matchingKey: string; @@ -370,24 +351,20 @@ declare namespace SplitIO { }; /** * The customer identifier. Could be a SplitKeyObject or a string. - * @typedef {SplitKeyObject|string} SplitKey */ type SplitKey = SplitKeyObject | string; /** * Path to file with mocked features (for node). - * @typedef {string} MockedFeaturesFilePath */ type MockedFeaturesFilePath = string; /** * Object with mocked features mapping for client-side (e.g., Browser or React Native). We need to specify the featureName as key, and the mocked treatment as value. - * @typedef {Object} MockedFeaturesMap */ type MockedFeaturesMap = { [featureName: string]: string | TreatmentWithConfig; }; /** * Impression DTO generated by the SDK when processing evaluations. - * @typedef {Object} ImpressionDTO */ type ImpressionDTO = { feature: string; @@ -402,7 +379,6 @@ declare namespace SplitIO { /** * Object with information about an impression. It contains the generated impression DTO as well as * complementary information around where and how it was generated in that way. - * @typedef {Object} ImpressionData */ type ImpressionData = { impression: ImpressionDTO; @@ -413,56 +389,46 @@ declare namespace SplitIO { }; /** * Data corresponding to one feature flag view. - * @typedef {Object} SplitView */ type SplitView = { /** * The name of the feature flag. - * @property {string} name */ name: string; /** * The traffic type of the feature flag. - * @property {string} trafficType */ trafficType: string; /** * Whether the feature flag is killed or not. - * @property {boolean} killed */ killed: boolean; /** * The list of treatments available for the feature flag. - * @property {Array} treatments */ treatments: Array; /** * Current change number of the feature flag. - * @property {number} changeNumber */ changeNumber: number; /** * Map of configurations per treatment. * Each existing configuration is a stringified version of the JSON you defined on the Split user interface. - * @property {Object.} configs */ configs: { [treatmentName: string]: string; }; /** * List of sets of the feature flag. - * @property {string[]} sets */ sets: string[]; /** * The default treatment of the feature flag. - * @property {string} defaultTreatment */ defaultTreatment: string; }; /** * A promise that resolves to a feature flag view or null if the feature flag is not found. - * @typedef {Promise} SplitViewAsync */ type SplitViewAsync = Promise; /** @@ -471,17 +437,14 @@ declare namespace SplitIO { type SplitViews = Array; /** * A promise that resolves to an SplitIO.SplitViews array. - * @typedef {Promise} SplitViewsAsync */ type SplitViewsAsync = Promise; /** * An array of feature flag names. - * @typedef {Array} SplitNames */ type SplitNames = Array; /** * A promise that resolves to an array of feature flag names. - * @typedef {Promise} SplitNamesAsync */ type SplitNamesAsync = Promise; /** @@ -503,8 +466,8 @@ declare namespace SplitIO { type InLocalStorageOptions = { /** * Optional prefix to prevent any kind of data collision when having multiple factories using the same storage type. - * @property {string} prefix - * @default 'SPLITIO' + * + * @defaultValue `'SPLITIO'` */ prefix?: string; } @@ -527,29 +490,25 @@ declare namespace SplitIO { type PluggableStorageOptions = { /** * Optional prefix to prevent any kind of data collision when having multiple factories using the same storage wrapper. - * @property {string} prefix - * @default 'SPLITIO' + * + * @defaultValue `'SPLITIO'` */ prefix?: string; /** * Storage wrapper. - * @property {Object} wrapper */ wrapper: Object; } /** * Synchronous storage valid types for NodeJS. - * @typedef {string} NodeSyncStorage */ type NodeSyncStorage = 'MEMORY'; /** * Asynchronous storages valid types for NodeJS. - * @typedef {string} NodeAsyncStorage */ type NodeAsyncStorage = 'REDIS'; /** * Storage valid types for the browser. - * @typedef {string} BrowserStorage */ type BrowserStorage = 'MEMORY' | 'LOCALSTORAGE'; /** @@ -563,7 +522,7 @@ declare namespace SplitIO { /** * Impression listener interface. This is the interface that needs to be implemented * by the element you provide to the SDK as impression listener. - * @interface IImpressionListener + * * @see {@link https://help.split.io/hc/en-us/articles/360020564931-Node-js-SDK#listener} */ interface IImpressionListener { @@ -585,17 +544,19 @@ declare namespace SplitIO { } /** * A pair of user key and it's trafficType, required for tracking valid Split events. - * @typedef {Object} Identity - * @property {string} key The user key. - * @property {string} trafficType The key traffic type. */ type Identity = { + /** + * The user key. + */ key: string; + /** + * The key traffic type. + */ trafficType: string; }; /** * Object with information about a Split event. - * @typedef {Object} EventData */ type EventData = { eventTypeId: string; @@ -607,51 +568,64 @@ declare namespace SplitIO { }; /** * Object representing the data sent by Split (events and impressions). - * @typedef {Object} IntegrationData - * @property {string} type The type of Split data, either 'IMPRESSION' or 'EVENT'. - * @property {ImpressionData | EventData} payload The data instance itself. */ - type IntegrationData = { type: 'IMPRESSION', payload: SplitIO.ImpressionData } | { type: 'EVENT', payload: SplitIO.EventData }; + type IntegrationData = { + /** + * The type of Split data. + */ + type: 'IMPRESSION', + /** + * The impression data. + */ + payload: SplitIO.ImpressionData + } | { + /** + * The type of Split data. + */ + type: 'EVENT', + /** + * The event data. + */ + payload: SplitIO.EventData + }; /** * Available URL settings for the SDKs. */ type UrlSettings = { /** * String property to override the base URL where the SDK will get rollout plan related data, like feature flags and segments definitions. - * @property {string} sdk - * @default 'https://sdk.split.io/api' + * + * @defaultValue `'https://sdk.split.io/api'` */ sdk?: string; /** * String property to override the base URL where the SDK will post event-related information like impressions. - * @property {string} events - * @default 'https://events.split.io/api' + * + * @defaultValue `'https://events.split.io/api'` */ events?: string; /** * String property to override the base URL where the SDK will get authorization tokens to be used with functionality that requires it, like streaming. - * @property {string} auth - * @default 'https://auth.split.io/api' + * + * @defaultValue `'https://auth.split.io/api'` */ auth?: string; /** * String property to override the base URL where the SDK will connect to receive streaming updates. - * @property {string} streaming - * @default 'https://streaming.split.io' + * + * @defaultValue `'https://streaming.split.io'` */ streaming?: string; /** * String property to override the base URL where the SDK will post telemetry data. - * @property {string} telemetry - * @default 'https://telemetry.split.io/api' + * + * @defaultValue `'https://telemetry.split.io/api'` */ telemetry?: string; }; /** * SplitFilter type. - * - * @typedef {string} SplitFilterType */ type SplitFilterType = 'bySet' | 'byName' | 'byPrefix'; /** @@ -660,51 +634,40 @@ declare namespace SplitIO { interface SplitFilter { /** * Type of the filter. - * - * @property {SplitFilterType} type */ type: SplitFilterType; /** * List of values: feature flag names for 'byName' filter type, and feature flag name prefixes for 'byPrefix' type. - * - * @property {string[]} values */ values: string[]; } /** * ImpressionsMode type - * @typedef {string} ImpressionsMode */ type ImpressionsMode = 'OPTIMIZED' | 'DEBUG' | 'NONE'; /** * User consent status. - * @typedef {string} ConsentStatus */ type ConsentStatus = 'GRANTED' | 'DECLINED' | 'UNKNOWN'; /** - * Logger - * Its interface details are not part of the public API. It shouldn't be used directly. - * @interface ILogger + * Logger. Its interface details are not part of the public API. It shouldn't be used directly. */ interface ILogger { setLogLevel(logLevel: LogLevel): void; } /** * Common settings properties. - * - * @interface ISharedSettings */ interface ISharedSettings { /** * The impression listener, which is optional. Whatever you provide here needs to comply with the SplitIO.IImpressionListener interface, * which will check for the logImpression method. - * @property {IImpressionListener} impressionListener - * @default undefined + * + * @defaultValue `undefined` */ impressionListener?: SplitIO.IImpressionListener; /** * SDK synchronization settings. - * @property {Object} sync */ sync?: { /** @@ -712,10 +675,11 @@ declare namespace SplitIO { * This configuration is only meaningful when the SDK is working in "standalone" mode. * * Example: - * `splitFilter: [ - * { type: 'byName', values: ['my_feature_flag_1', 'my_feature_flag_2'] }, // will fetch feature flags named 'my_feature_flag_1' and 'my_feature_flag_2' - * ]` - * @property {SplitIO.SplitFilter[]} splitFilters + * ``` + * splitFilter: [ + * { type: 'byName', values: ['my_feature_flag_1', 'my_feature_flag_2'] }, // will fetch feature flags named 'my_feature_flag_1' and 'my_feature_flag_2' + * ] + * ``` */ splitFilters?: SplitIO.SplitFilter[]; /** @@ -725,8 +689,7 @@ declare namespace SplitIO { * - OPTIMIZED: will send unique impressions to Split servers, avoiding a considerable amount of traffic that duplicated impressions could generate. * - NONE: will send unique keys evaluated per feature to Split servers instead of full blown impressions, avoiding a considerable amount of traffic that impressions could generate. * - * @property {string} impressionsMode - * @default 'OPTIMIZED' + * @defaultValue `'OPTIMIZED'` */ impressionsMode?: SplitIO.ImpressionsMode; /** @@ -739,25 +702,25 @@ declare namespace SplitIO { * Custom function called before each request, allowing you to add or update headers in SDK HTTP requests. * Some headers, such as `SplitSDKVersion`, are required by the SDK and cannot be overridden. * To pass multiple headers with the same name, combine their values into a single line, separated by commas. Example: `{ 'Authorization': 'value1, value2' }` - * Or provide keys with different case since headers are case-insensitive. Example: `{ 'authorization': 'value1', 'Authorization': 'value2' }` + * Or provide keys with different cases since headers are case-insensitive. Example: `{ 'authorization': 'value1', 'Authorization': 'value2' }` * * NOTE: to pass custom headers to the streaming connection in Browser, you should polyfill the `window.EventSource` object with a library that supports headers, - * like https://www.npmjs.com/package/event-source-polyfill, since native EventSource does not support them and will be ignored. + * like https://www.npmjs.com/package/event-source-polyfill, since native EventSource does not support them and they will be ignored. * - * @property getHeaderOverrides - * @default undefined + * @defaultValue `undefined` * - * @param context - The context for the request. - * @param context.headers - The current headers in the request. - * @returns A set of headers to be merged with the current headers. + * @param context - The context for the request, which contains the `headers` property object representing the current headers in the request. + * @returns An object representing a set of headers to be merged with the current headers. * * @example + * ``` * const getHeaderOverrides = (context) => { * return { * 'Authorization': context.headers['Authorization'] + ', other-value', * 'custom-header': 'custom-value' * }; * }; + * ``` */ getHeaderOverrides?: (context: { headers: Record }) => Record; }; @@ -765,35 +728,29 @@ declare namespace SplitIO { /** * List of URLs that the SDK will use as base for it's synchronization functionalities, applicable only when running as standalone and partial consumer modes. * Do not change these settings unless you're working an advanced use case, like connecting to the Split proxy. - * @property {Object} urls */ urls?: UrlSettings; } /** * Common settings properties for SDKs with synchronous API (standalone and localhost modes). - * - * @interface ISyncSharedSettings - * @extends ISharedSettings */ interface ISyncSharedSettings extends ISharedSettings { /** * The SDK mode. When using the default in-memory storage or `InLocalStorage` as storage, the only possible value is "standalone", which is the default. * For "localhost" mode, use "localhost" as authorizationKey. * - * @property {'standalone'} mode - * @default 'standalone' + * @defaultValue `'standalone'` */ mode?: 'standalone'; /** * Boolean flag to enable the streaming service as default synchronization mechanism. In the event of any issue with streaming, * the SDK would fallback to the polling mechanism. If false, the SDK would poll for changes as usual without attempting to use streaming. - * @property {boolean} streamingEnabled - * @default true + * + * @defaultValue `true` */ streamingEnabled?: boolean; /** * SDK synchronization settings. - * @property {Object} sync */ sync?: ISharedSettings['sync'] & { /** @@ -802,16 +759,13 @@ declare namespace SplitIO { * When `true` a running SDK will process rollout plan updates performed on the UI (default). * When false it'll just fetch all data upon init. * - * @property {boolean} enabled - * @default true + * @defaultValue `true` */ enabled?: boolean; }; } /** * Common settings properties for SDKs with pluggable configuration. - * - * @interface IPluggableSettings */ interface IPluggableSettings { /** @@ -825,23 +779,19 @@ declare namespace SplitIO { * config.debug = 'WARN' * config.debug = ErrorLogger() * ``` - * @property {boolean | LogLevel | ILogger} debug - * @default false + * + * @defaultValue `false` */ debug?: boolean | LogLevel | SplitIO.ILogger; /** * Defines an optional list of factory functions used to instantiate SDK integrations. * * NOTE: at the moment there are not integrations to plug in. - * - * @property {Object} integrations */ integrations?: IntegrationFactory[]; } /** * Common settings properties for SDKs without pluggable configuration. - * - * @interface INonPluggableSettings */ interface INonPluggableSettings { /** @@ -852,69 +802,63 @@ declare namespace SplitIO { * config.debug = true * config.debug = 'WARN' * ``` - * @property {boolean | LogLevel} debug - * @default false + * + * @defaultValue `false` */ debug?: boolean | LogLevel; } /** * Common settings properties for server-side SDKs. - * - * @interface IServerSideSharedSettings */ interface IServerSideSharedSettings { /** * SDK Core settings for NodeJS. - * @property {Object} core */ core: { /** * Your SDK key. + * * @see {@link https://help.split.io/hc/en-us/articles/360019916211-API-keys} - * @property {string} authorizationKey */ authorizationKey: string; /** * Disable labels from being sent to Split backend. Labels may contain sensitive information. - * @property {boolean} labelsEnabled - * @default true + * + * @defaultValue `true` */ labelsEnabled?: boolean; /** * Disable machine IP and Name from being sent to Split backend. - * @property {boolean} IPAddressesEnabled - * @default true + * + * @defaultValue `true` */ IPAddressesEnabled?: boolean; }; } /** * Common settings properties for client-side SDKs. - * - * @interface IClientSideSharedSettings */ interface IClientSideSharedSettings { /** * SDK Core settings for client-side. - * @property {Object} core */ core: { /** * Your SDK key. + * * @see {@link https://help.split.io/hc/en-us/articles/360019916211-API-keys} - * @property {string} authorizationKey */ authorizationKey: string; /** * Customer identifier. Whatever this means to you. + * * @see {@link https://help.split.io/hc/en-us/articles/360019916311-Traffic-type} - * @property {SplitKey} key */ key: SplitKey; /** * Disable labels from being sent to Split backend. Labels may contain sensitive information. - * @property {boolean} labelsEnabled - * @default true + * + * @defaultValue `true` */ labelsEnabled?: boolean; }; @@ -925,15 +869,12 @@ declare namespace SplitIO { * - `'UNKNOWN'`: the user neither grants nor declines consent for tracking events and impressions. The SDK tracks them in its internal storage, and eventually either sends * them or not if the consent status is updated to 'GRANTED' or 'DECLINED' respectively. The status can be updated at any time with the `UserConsent.setStatus` factory method. * - * @typedef {string} userConsent - * @default 'GRANTED' + * @defaultValue `'GRANTED'` */ userConsent?: ConsentStatus; } /** * Common settings properties for client-side standalone SDKs. - * - * @interface IClientSideSyncSharedSettings */ interface IClientSideSyncSharedSettings extends IClientSideSharedSettings, ISyncSharedSettings { /** @@ -943,104 +884,101 @@ declare namespace SplitIO { features?: MockedFeaturesMap; /** * SDK Startup settings. - * @property {Object} startup */ startup?: { /** * Maximum amount of time used before notify a timeout. - * @property {number} readyTimeout - * @default 10 + * + * @defaultValue `10` */ readyTimeout?: number; /** * Time to wait for a request before the SDK is ready. If this time expires, JS SDK will retry 'retriesOnFailureBeforeReady' times before notifying its failure to be 'ready'. - * @property {number} requestTimeoutBeforeReady - * @default 5 + * + * @defaultValue `5` */ requestTimeoutBeforeReady?: number; /** * How many quick retries we will do while starting up the SDK. - * @property {number} retriesOnFailureBeforeReady - * @default 1 + * + * @defaultValue `1` */ retriesOnFailureBeforeReady?: number; /** * For SDK posts the queued events data in bulks with a given rate, but the first push window is defined separately, * to better control on browsers or mobile. This number defines that window before the first events push. * - * @property {number} eventsFirstPushWindow - * @default 10 + * @defaultValue `10` */ eventsFirstPushWindow?: number; }; /** * SDK scheduler settings. - * @property {Object} scheduler */ scheduler?: { /** * The SDK polls Split servers for changes to feature flag definitions. This parameter controls this polling period in seconds. - * @property {number} featuresRefreshRate - * @default 60 + * + * @defaultValue `60` */ featuresRefreshRate?: number; /** * The SDK sends information on who got what treatment at what time back to Split servers to power analytics. This parameter controls how often this data is sent to Split servers. The parameter should be in seconds. - * @property {number} impressionsRefreshRate - * @default 60 + * + * @defaultValue `60` */ impressionsRefreshRate?: number; /** * The maximum number of impression items we want to queue. If we queue more values, it will trigger a flush and reset the timer. * If you use a 0 here, the queue will have no maximum size. - * @property {number} impressionsQueueSize - * @default 30000 + * + * @defaultValue `30000` */ impressionsQueueSize?: number; /** * The SDK sends diagnostic metrics to Split servers. This parameters controls this metric flush period in seconds. - * @property {number} metricsRefreshRate - * @default 120 + * + * @defaultValue `120` * @deprecated This parameter is ignored now. Use `telemetryRefreshRate` instead. */ metricsRefreshRate?: number; /** * The SDK sends diagnostic metrics to Split servers. This parameters controls this metric flush period in seconds. - * @property {number} telemetryRefreshRate - * @default 3600 + * + * @defaultValue `3600` */ telemetryRefreshRate?: number; /** * The SDK polls Split servers for changes to segment definitions. This parameter controls this polling period in seconds. - * @property {number} segmentsRefreshRate - * @default 60 + * + * @defaultValue `60` */ segmentsRefreshRate?: number; /** * The SDK posts the queued events data in bulks. This parameter controls the posting rate in seconds. - * @property {number} eventsPushRate - * @default 60 + * + * @defaultValue `60` */ eventsPushRate?: number; /** * The maximum number of event items we want to queue. If we queue more values, it will trigger a flush and reset the timer. * If you use a 0 here, the queue will have no maximum size. - * @property {number} eventsQueueSize - * @default 500 + * + * @defaultValue `500` */ eventsQueueSize?: number; /** * For mocking/testing only. The SDK will refresh the features mocked data when mode is set to "localhost" by defining the key. * For more information see {@link https://help.split.io/hc/en-us/articles/360020448791-JavaScript-SDK#localhost-mode} - * @property {number} offlineRefreshRate - * @default 15 + * + * @defaultValue `15` */ offlineRefreshRate?: number; /** * When using streaming mode, seconds to wait before re attempting to connect for push notifications. * Next attempts follow intervals in power of two: base seconds, base x 2 seconds, base x 4 seconds, ... - * @property {number} pushRetryBackoffBase - * @default 1 + * + * @defaultValue `1` */ pushRetryBackoffBase?: number; }; @@ -1048,8 +986,6 @@ declare namespace SplitIO { /** * Settings interface for Browser SDK instances created with client-side API and synchronous storage (e.g., in-memory or local storage). * - * @interface IClientSideSettings - * @extends IClientSideSyncSharedSettings * @see {@link https://help.split.io/hc/en-us/articles/360058730852-Browser-SDK#configuration} */ interface IClientSideSettings extends IClientSideSyncSharedSettings, IPluggableSettings { @@ -1065,15 +1001,12 @@ declare namespace SplitIO { * storage: InLocalStorage() * }) * ``` - * @property {Object} storage */ storage?: StorageSyncFactory; } /** * Settings interface for React Native SDK instances, with client-side API and synchronous storage. * - * @interface IReactNativeSettings - * @extends IClientSideSettings * @see {@link https://help.split.io/hc/en-us/articles/4406066357901-React-Native-SDK#configuration} */ interface IReactNativeSettings extends IClientSideSettings { } @@ -1081,8 +1014,6 @@ declare namespace SplitIO { * Settings interface for Browser SDK instances created with client-side API and asynchronous storage (e.g., serverless environments with a persistent storage). * If your storage is synchronous (by default we use memory, which is sync) use SplitIO.IClientSideSettings instead. * - * @interface IClientSideAsyncSettings - * @extends IClientSideSharedSettings * @see {@link https://help.split.io/hc/en-us/articles/360058730852-Browser-SDK#sharing-state-with-a-pluggable-storage} */ interface IClientSideAsyncSettings extends IClientSideSharedSettings, ISharedSettings, IPluggableSettings { @@ -1090,8 +1021,6 @@ declare namespace SplitIO { * The SDK mode. When using `PluggableStorage` as storage, the possible values are "consumer" and "consumer_partial". * * @see {@link https://help.split.io/hc/en-us/articles/360058730852-Browser-SDK#sharing-state-with-a-pluggable-storage} - * - * @property {'consumer' | 'consumer_partial'} mode */ mode: 'consumer' | 'consumer_partial'; /** @@ -1104,18 +1033,16 @@ declare namespace SplitIO { * storage: PluggableStorage({ wrapper: SomeWrapper }) * }) * ``` - * @property {Object} storage */ storage: StorageAsyncFactory; /** * SDK Startup settings. - * @property {Object} startup */ startup?: { /** * Maximum amount of time used before notify a timeout. - * @property {number} readyTimeout - * @default 5 + * + * @defaultValue `5` */ readyTimeout?: number; /** @@ -1123,22 +1050,21 @@ declare namespace SplitIO { * to better control on browsers or mobile. This number defines that window before the first events push. * * NOTE: this param is ignored in 'consumer' mode. - * @property {number} eventsFirstPushWindow - * @default 10 + * + * @defaultValue `10` */ eventsFirstPushWindow?: number; }; /** * SDK scheduler settings. - * @property {Object} scheduler */ scheduler?: { /** * The SDK sends information on who got what treatment at what time back to Split servers to power analytics. This parameter controls how often this data is sent to Split servers. The parameter should be in seconds. * * NOTE: this param is ignored in 'consumer' mode. - * @property {number} impressionsRefreshRate - * @default 60 + * + * @defaultValue `60` */ impressionsRefreshRate?: number; /** @@ -1146,24 +1072,24 @@ declare namespace SplitIO { * If you use a 0 here, the queue will have no maximum size. * * NOTE: this param is ignored in 'consumer' mode. - * @property {number} impressionsQueueSize - * @default 30000 + * + * @defaultValue `30000` */ impressionsQueueSize?: number; /** * The SDK sends diagnostic metrics to Split servers. This parameters controls this metric flush period in seconds. * * NOTE: this param is ignored in 'consumer' mode. - * @property {number} telemetryRefreshRate - * @default 3600 + * + * @defaultValue `3600` */ telemetryRefreshRate?: number; /** * The SDK posts the queued events data in bulks. This parameter controls the posting rate in seconds. * * NOTE: this param is ignored in 'consumer' mode. - * @property {number} eventsPushRate - * @default 60 + * + * @defaultValue `60` */ eventsPushRate?: number; /** @@ -1171,8 +1097,8 @@ declare namespace SplitIO { * If you use a 0 here, the queue will have no maximum size. * * NOTE: this param is ignored in 'consumer' mode. - * @property {number} eventsQueueSize - * @default 500 + * + * @defaultValue `500` */ eventsQueueSize?: number; }; @@ -1180,27 +1106,24 @@ declare namespace SplitIO { /** * Settings interface for JavaScript SDK instances created on the browser, with client-side API and synchronous storage (e.g., in-memory or local storage). * - * @interface IBrowserSettings - * @extends IClientSideSyncSharedSettings * @see {@link https://help.split.io/hc/en-us/articles/360020448791-JavaScript-SDK#configuration} */ interface IBrowserSettings extends IClientSideSyncSharedSettings, INonPluggableSettings { /** * Defines which kind of storage we can instantiate on the browser. * Possible storage types are 'MEMORY', which is the default, and 'LOCALSTORAGE'. - * @property {Object} storage */ storage?: { /** * Storage type to be instantiated by the SDK. - * @property {BrowserStorage} type - * @default 'MEMORY' + * + * @defaultValue `'MEMORY'` */ type?: BrowserStorage; /** * Optional prefix to prevent any kind of data collision between SDK versions. - * @property {string} prefix - * @default 'SPLITIO' + * + * @defaultValue `'SPLITIO'` */ prefix?: string; }; @@ -1209,137 +1132,131 @@ declare namespace SplitIO { * Settings interface for JavaScript SDK instances created on NodeJS, with server-side API and synchronous in-memory storage. * If your storage is asynchronous (Redis for example) use SplitIO.INodeAsyncSettings instead. * - * @interface INodeSettings - * @extends IServerSideSharedSettings * @see {@link https://help.split.io/hc/en-us/articles/360020564931-Node-js-SDK#configuration} */ interface INodeSettings extends IServerSideSharedSettings, ISyncSharedSettings, INonPluggableSettings { /** * SDK Startup settings for NodeJS. - * @property {Object} startup */ startup?: { /** * Maximum amount of time used before notify a timeout. - * @property {number} readyTimeout - * @default 15 + * + * @defaultValue `15` */ readyTimeout?: number; /** * Time to wait for a request before the SDK is ready. If this time expires, JS SDK will retry 'retriesOnFailureBeforeReady' times before notifying its failure to be 'ready'. - * @property {number} requestTimeoutBeforeReady - * @default 15 + * + * @defaultValue `15` */ requestTimeoutBeforeReady?: number; /** * How many quick retries we will do while starting up the SDK. - * @property {number} retriesOnFailureBeforeReady - * @default 1 + * + * @defaultValue `1` */ retriesOnFailureBeforeReady?: number; /** * For SDK posts the queued events data in bulks with a given rate, but the first push window is defined separately, * to better control on browsers. This number defines that window before the first events push. * - * @property {number} eventsFirstPushWindow - * @default 0 + * @defaultValue `0` */ eventsFirstPushWindow?: number; }; /** * SDK scheduler settings. - * @property {Object} scheduler */ scheduler?: { /** * The SDK polls Split servers for changes to feature flag definitions. This parameter controls this polling period in seconds. - * @property {number} featuresRefreshRate - * @default 60 + * + * @defaultValue `60` */ featuresRefreshRate?: number; /** * The SDK sends information on who got what treatment at what time back to Split servers to power analytics. This parameter controls how often this data is sent to Split servers. The parameter should be in seconds. - * @property {number} impressionsRefreshRate - * @default 300 + * + * @defaultValue `300` */ impressionsRefreshRate?: number; /** * The maximum number of impression items we want to queue. If we queue more values, it will trigger a flush and reset the timer. * If you use a 0 here, the queue will have no maximum size. - * @property {number} impressionsQueueSize - * @default 30000 + * + * @defaultValue `30000` */ impressionsQueueSize?: number; /** * The SDK sends diagnostic metrics to Split servers. This parameters controls this metric flush period in seconds. - * @property {number} metricsRefreshRate - * @default 120 + * + * @defaultValue `120` * @deprecated This parameter is ignored now. Use `telemetryRefreshRate` instead. */ metricsRefreshRate?: number; /** * The SDK sends diagnostic metrics to Split servers. This parameters controls this metric flush period in seconds. - * @property {number} telemetryRefreshRate - * @default 3600 + * + * @defaultValue `3600` */ telemetryRefreshRate?: number; /** * The SDK polls Split servers for changes to segment definitions. This parameter controls this polling period in seconds. - * @property {number} segmentsRefreshRate - * @default 60 + * + * @defaultValue `60` */ segmentsRefreshRate?: number; /** * The SDK posts the queued events data in bulks. This parameter controls the posting rate in seconds. - * @property {number} eventsPushRate - * @default 60 + * + * @defaultValue `60` */ eventsPushRate?: number; /** * The maximum number of event items we want to queue. If we queue more values, it will trigger a flush and reset the timer. * If you use a 0 here, the queue will have no maximum size. - * @property {number} eventsQueueSize - * @default 500 + * + * @defaultValue `500` */ eventsQueueSize?: number; /** * For mocking/testing only. The SDK will refresh the features mocked data when mode is set to "localhost" by defining the key. * For more information see {@link https://help.split.io/hc/en-us/articles/360020564931-Node-js-SDK#localhost-mode} - * @property {number} offlineRefreshRate - * @default 15 + * + * @defaultValue `15` */ offlineRefreshRate?: number; /** * When using streaming mode, seconds to wait before re attempting to connect for push notifications. * Next attempts follow intervals in power of two: base seconds, base x 2 seconds, base x 4 seconds, ... - * @property {number} pushRetryBackoffBase - * @default 1 + * + * @defaultValue `1` */ pushRetryBackoffBase?: number; }; /** * Mocked features file path. For testing purposes only. For using this you should specify "localhost" as authorizationKey on core settings. * @see {@link https://help.split.io/hc/en-us/articles/360020564931-Node-js-SDK#localhost-mode} - * @property {MockedFeaturesFilePath} features - * @default '$HOME/.split' + * + * @defaultValue `'$HOME/.split'` */ features?: SplitIO.MockedFeaturesFilePath; /** * Defines which kind of storage we can instantiate on NodeJS for 'standalone' mode. * The only possible storage type is 'MEMORY', which is the default. - * @property {Object} storage */ storage?: { /** * Synchronous storage type to be instantiated by the SDK. - * @property {NodeSyncStorage} type - * @default 'MEMORY' + * + * @defaultValue `'MEMORY'` */ type?: NodeSyncStorage; /** * Optional prefix to prevent any kind of data collision between SDK versions. - * @property {string} prefix - * @default 'SPLITIO' + * + * @defaultValue `'SPLITIO'` */ prefix?: string; }; @@ -1354,22 +1271,22 @@ declare namespace SplitIO { * Custom function called before each request, allowing you to add or update headers in SDK HTTP requests. * Some headers, such as `SplitSDKVersion`, are required by the SDK and cannot be overridden. * To pass multiple headers with the same name, combine their values into a single line, separated by commas. Example: `{ 'Authorization': 'value1, value2' }` - * Or provide keys with different case since headers are case-insensitive. Example: `{ 'authorization': 'value1', 'Authorization': 'value2' }` + * Or provide keys with different cases since headers are case-insensitive. Example: `{ 'authorization': 'value1', 'Authorization': 'value2' }` * - * @property getHeaderOverrides - * @default undefined + * @defaultValue `undefined` * - * @param context - The context for the request. - * @param context.headers - The current headers in the request. - * @returns A set of headers to be merged with the current headers. + * @param context - The context for the request, which contains the `headers` property object representing the current headers in the request. + * @returns An object representing a set of headers to be merged with the current headers. * * @example + * ``` * const getHeaderOverrides = (context) => { * return { * 'Authorization': context.headers['Authorization'] + ', other-value', * 'custom-header': 'custom-value' * }; * }; + * ``` */ getHeaderOverrides?: (context: { headers: Record }) => Record; /** @@ -1394,8 +1311,7 @@ declare namespace SplitIO { * * @see {@link https://nodejs.org/api/https.html#class-httpsagent} * - * @property {http.Agent | https.Agent} agent - * @default undefined + * @defaultValue `undefined` */ agent?: RequestOptions['agent']; }; @@ -1405,8 +1321,6 @@ declare namespace SplitIO { * Settings interface for JavaScript SDK instances created on NodeJS, with asynchronous storage like Redis. * If your storage is synchronous (by default we use memory, which is sync) use SplitIO.INodeSettings instead. * - * @interface INodeAsyncSettings - * @extends IServerSideSharedSettings * @see {@link https://help.split.io/hc/en-us/articles/360020564931-Node-js-SDK#configuration} */ interface INodeAsyncSettings extends IServerSideSharedSettings, ISharedSettings, INonPluggableSettings { @@ -1414,36 +1328,30 @@ declare namespace SplitIO { * The SDK mode. When using 'REDIS' storage type, the only possible value is "consumer", which is required. * * @see {@link https://help.split.io/hc/en-us/articles/360020564931-Node-js-SDK#state-sharing-redis-integration} - * - * @property {'consumer'} mode */ mode: 'consumer'; /** * SDK Startup settings for NodeJS. - * @property {Object} startup */ startup?: { /** * Maximum amount of time used before notify a timeout. - * @property {number} readyTimeout - * @default 15 + * + * @defaultValue `15` */ readyTimeout?: number; }; /** * Defines which kind of async storage we can instantiate on NodeJS for 'consumer' mode. * The only possible storage type is 'REDIS'. - * @property {Object} storage */ storage: { /** * 'REDIS' storage type to be instantiated by the SDK. - * @property {NodeAsyncStorage} type */ type: NodeAsyncStorage; /** * Options to be passed to the Redis storage. Use it with storage type: 'REDIS'. - * @property {Object} options */ options?: { /** @@ -1455,59 +1363,57 @@ declare namespace SplitIO { * url: '127.0.0.1:6379' * url: 'redis://:authpassword@127.0.0.1:6379/0' * ``` - * @property {string=} url */ url?: string; /** * Redis host. - * @property {string=} host - * @default 'localhost' + * + * @defaultValue `'localhost'` */ host?: string; /** * Redis port. - * @property {number=} port - * @default 6379 + * + * @defaultValue `6379` */ port?: number; /** * Redis database to be used. - * @property {number=} db - * @default 0 + * + * @defaultValue `0` */ db?: number; /** * Redis password. Don't define if no password is used. - * @property {string=} pass - * @default undefined + * + * @defaultValue `undefined` */ pass?: string; /** * The milliseconds before a timeout occurs during the initial connection to the Redis server. - * @property {number=} connectionTimeout - * @default 10000 + * + * @defaultValue `10000` */ connectionTimeout?: number; /** * The milliseconds before Redis commands are timeout by the SDK. * Method calls that involve Redis commands, like `client.getTreatment` or `client.track` calls, are resolved when the commands success or timeout. - * @property {number=} operationTimeout - * @default 5000 + * + * @defaultValue `5000` */ operationTimeout?: number; /** * TLS configuration for Redis connection. * @see {@link https://www.npmjs.com/package/ioredis#tls-options } * - * @property {Object=} tls - * @default undefined + * @defaultValue `undefined` */ tls?: RedisOptions['tls']; }; /** * Optional prefix to prevent any kind of data collision between SDK versions. - * @property {string} prefix - * @default 'SPLITIO' + * + * @defaultValue `'SPLITIO'` */ prefix?: string; }; @@ -1515,204 +1421,181 @@ declare namespace SplitIO { /** * This represents the interface for the SDK instance with synchronous storage and client-side API, * i.e., where client instances have a bound user key. - * @interface ISDK - * @extends IBasicSDK */ interface ISDK extends IBasicSDK { /** * Returns the default client instance of the SDK, associated with the key provided on settings. - * @function client - * @returns {IClient} The client instance. + * + * @returns The client instance. */ client(): IClient; /** * Returns a shared client of the SDK, associated with the given key. - * @function client - * @param {SplitKey} key The key for the new client instance. - * @returns {IClient} The client instance. + * @param key - The key for the new client instance. + * @returns The client instance. */ client(key: SplitKey): IClient; /** * Returns a manager instance of the SDK to explore available information. - * @function manager - * @returns {IManager} The manager instance. + * + * @returns The manager instance. */ manager(): IManager; /** * User consent API. - * @property UserConsent */ UserConsent: IUserConsentAPI; } /** * This represents the interface for the SDK instance with asynchronous storage and client-side API, * i.e., where client instances have a bound user key. - * @interface IAsyncSDK - * @extends IBasicSDK */ interface IAsyncSDK extends IBasicSDK { /** * Returns the default client instance of the SDK, associated with the key provided on settings. - * @function client - * @returns {IAsyncClient} The asynchronous client instance. + * + * @returns The asynchronous client instance. */ client(): IAsyncClient; /** * Returns a shared client of the SDK, associated with the given key. - * @function client - * @param {SplitKey} key The key for the new client instance. - * @returns {IAsyncClient} The asynchronous client instance. + * + * @param key - The key for the new client instance. + * @returns The asynchronous client instance. */ client(key: SplitKey): IAsyncClient; /** * Returns a manager instance of the SDK to explore available information. - * @function manager - * @returns {IManager} The manager instance. + * + * @returns The manager instance. */ manager(): IAsyncManager; /** * User consent API. - * @property UserConsent */ UserConsent: IUserConsentAPI; } /** * This represents the interface for the SDK instance for server-side with synchronous storage. - * @interface INodeSDK - * @extends IBasicSDK */ interface INodeSDK extends IBasicSDK { /** * Returns the default client instance of the SDK. - * @function client - * @returns {INodeClient} The client instance. + * + * @returns The client instance. */ client(): INodeClient; /** * Returns a manager instance of the SDK to explore available information. - * @function manager - * @returns {IManager} The manager instance. + * + * @returns The manager instance. */ manager(): IManager; } /** * This represents the interface for the SDK instance for server-side with asynchronous storage. - * @interface INodeAsyncSDK - * @extends IBasicSDK */ interface INodeAsyncSDK extends IBasicSDK { /** * Returns the default client instance of the SDK. - * @function client - * @returns {INodeAsyncClient} The asynchronous client instance. + * + * @returns The asynchronous client instance. */ client(): INodeAsyncClient; /** * Returns a manager instance of the SDK to explore available information. - * @function manager - * @returns {IManager} The manager instance. + * + * @returns The manager instance. */ manager(): IAsyncManager; } /** * This represents the interface for the Client instance on server-side, where the user key is not bound to the instance and must be provided on each method call. * This interface is available in NodeJS, or when importing the 'server' sub-package of JS SDK (e.g., `import { SplitFactory } from '@splitsoftware/splitio/server'`). - * - * @interface INodeClient - * @extends IBasicClient */ interface INodeClient extends IBasicClient { /** * Returns a Treatment value, which is the treatment string for the given feature. * - * @function getTreatment - * @param {string} key - The string key representing the consumer. - * @param {string} featureFlagName - The string that represents the feature flag we want to get the treatment. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {Treatment} The treatment string. + * @param key - The string key representing the consumer. + * @param featureFlagName - The string that represents the feature flag we want to get the treatment. + * @param attributes - An object of type Attributes defining the attributes for the given key. + * @returns The treatment string. */ getTreatment(key: SplitKey, featureFlagName: string, attributes?: Attributes): Treatment; /** * Returns a TreatmentWithConfig value, which is an object with both treatment and config string for the given feature. * - * @function getTreatmentWithConfig - * @param {string} key - The string key representing the consumer. - * @param {string} featureFlagName - The string that represents the feature flag we want to get the treatment. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {TreatmentWithConfig} The TreatmentWithConfig, the object containing the treatment string and the + * @param key - The string key representing the consumer. + * @param featureFlagName - The string that represents the feature flag we want to get the treatment. + * @param attributes - An object of type Attributes defining the attributes for the given key. + * @returns The TreatmentWithConfig, the object containing the treatment string and the * configuration stringified JSON (or null if there was no config for that treatment). */ getTreatmentWithConfig(key: SplitKey, featureFlagName: string, attributes?: Attributes): TreatmentWithConfig; /** * Returns a Treatments value, which is an object map with the treatments for the given features. * - * @function getTreatments - * @param {string} key - The string key representing the consumer. - * @param {Array} featureFlagNames - An array of the feature flag names we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {Treatments} The treatments object map. + * @param key - The string key representing the consumer. + * @param featureFlagNames - An array of the feature flag names we want to get the treatments. + * @param attributes - An object of type Attributes defining the attributes for the given key. + * @returns The treatments object map. */ getTreatments(key: SplitKey, featureFlagNames: string[], attributes?: Attributes): Treatments; /** * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the given features. * - * @function getTreatmentsWithConfig - * @param {string} key - The string key representing the consumer. - * @param {Array} featureFlagNames - An array of the feature flag names we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {TreatmentsWithConfig} The map with all the TreatmentWithConfig objects + * @param key - The string key representing the consumer. + * @param featureFlagNames - An array of the feature flag names we want to get the treatments. + * @param attributes - An object of type Attributes defining the attributes for the given key. + * @returns The map with all the TreatmentWithConfig objects */ getTreatmentsWithConfig(key: SplitKey, featureFlagNames: string[], attributes?: Attributes): TreatmentsWithConfig; /** * Returns a Treatments value, which is an object map with the treatments for the feature flags related to the given flag set. * - * @function getTreatmentsByFlagSet - * @param {string} key - The string key representing the consumer. - * @param {string} flagSet - The flag set name we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {Treatments} The map with all the Treatment objects + * @param key - The string key representing the consumer. + * @param flagSet - The flag set name we want to get the treatments. + * @param attributes - An object of type Attributes defining the attributes for the given key. + * @returns The map with all the Treatment objects */ getTreatmentsByFlagSet(key: SplitKey, flagSet: string, attributes?: Attributes): Treatments; /** * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the feature flags related to the given flag set. * - * @function getTreatmentsWithConfigByFlagSet - * @param {string} key - The string key representing the consumer. - * @param {string} flagSet - The flag set name we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {TreatmentsWithConfig} The map with all the TreatmentWithConfig objects + * @param key - The string key representing the consumer. + * @param flagSet - The flag set name we want to get the treatments. + * @param attributes - An object of type Attributes defining the attributes for the given key. + * @returns The map with all the TreatmentWithConfig objects */ getTreatmentsWithConfigByFlagSet(key: SplitKey, flagSet: string, attributes?: Attributes): TreatmentsWithConfig; /** * Returns a Returns a Treatments value, which is an object with both treatment and config string for to the feature flags related to the given flag sets. * - * @function getTreatmentsByFlagSets - * @param {string} key - The string key representing the consumer. - * @param {Array} flagSets - An array of the flag set names we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {Treatments} The map with all the Treatment objects + * @param key - The string key representing the consumer. + * @param flagSets - An array of the flag set names we want to get the treatments. + * @param attributes - An object of type Attributes defining the attributes for the given key. + * @returns The map with all the Treatment objects */ getTreatmentsByFlagSets(key: SplitKey, flagSets: string[], attributes?: Attributes): Treatments; /** * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the feature flags related to the given flag sets. * - * @function getTreatmentsWithConfigByFlagSets - * @param {string} key - The string key representing the consumer. - * @param {Array} flagSets - An array of the flag set names we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {TreatmentsWithConfig} The map with all the TreatmentWithConfig objects + * @param key - The string key representing the consumer. + * @param flagSets - An array of the flag set names we want to get the treatments. + * @param attributes - An object of type Attributes defining the attributes for the given key. + * @returns The map with all the TreatmentWithConfig objects */ getTreatmentsWithConfigByFlagSets(key: SplitKey, flagSets: string[], attributes?: Attributes): TreatmentsWithConfig; /** * Tracks an event to be fed to the results product on Split user interface. * - * @function track - * @param {SplitKey} key - The key that identifies the entity related to this event. - * @param {string} trafficType - The traffic type of the entity related to this event. See {@link https://help.split.io/hc/en-us/articles/360019916311-Traffic-type} - * @param {string} eventType - The event type corresponding to this event. - * @param {number=} value - The value of this event. - * @param {Properties=} properties - The properties of this event. Values can be string, number, boolean or null. - * @returns {boolean} Whether the event was added to the queue successfully or not. + * @param key - The key that identifies the entity related to this event. + * @param trafficType - The traffic type of the entity related to this event. See {@link https://help.split.io/hc/en-us/articles/360019916311-Traffic-type} + * @param eventType - The event type corresponding to this event. + * @param value - The value of this event. + * @param properties - The properties of this event. Values can be string, number, boolean or null. + * @returns Whether the event was added to the queue successfully or not. */ track(key: SplitIO.SplitKey, trafficType: string, eventType: string, value?: number, properties?: Properties): boolean; } @@ -1720,101 +1603,89 @@ declare namespace SplitIO { * This represents the interface for the Client instance on server-side with asynchronous storage, like REDIS. * User key is not bound to the instance and must be provided on each method call, which returns a promise. * This interface is available in NodeJS, or when importing the 'server' sub-package in JS SDK (e.g., `import { SplitFactory } from '@splitsoftware/splitio/server'`). - * - * @interface INodeAsyncClient - * @extends IBasicClient */ interface INodeAsyncClient extends IBasicClient { /** * Returns a Treatment value, which will be (or eventually be) the treatment string for the given feature. * - * @function getTreatment - * @param {string} key - The string key representing the consumer. - * @param {string} featureFlagName - The string that represents the feature flag we want to get the treatment. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {AsyncTreatment} Treatment promise that resolves to the treatment string. + * @param key - The string key representing the consumer. + * @param featureFlagName - The string that represents the feature flag we want to get the treatment. + * @param attributes - An object of type Attributes defining the attributes for the given key. + * @returns Treatment promise that resolves to the treatment string. */ getTreatment(key: SplitKey, featureFlagName: string, attributes?: Attributes): AsyncTreatment; /** * Returns a TreatmentWithConfig value, which will be (or eventually be) an object with both treatment and config string for the given feature. * - * @function getTreatmentWithConfig - * @param {string} key - The string key representing the consumer. - * @param {string} featureFlagName - The string that represents the feature flag we want to get the treatment. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {AsyncTreatmentWithConfig} TreatmentWithConfig promise that resolves to the TreatmentWithConfig object. + * @param key - The string key representing the consumer. + * @param featureFlagName - The string that represents the feature flag we want to get the treatment. + * @param attributes - An object of type Attributes defining the attributes for the given key. + * @returns TreatmentWithConfig promise that resolves to the TreatmentWithConfig object. */ getTreatmentWithConfig(key: SplitKey, featureFlagName: string, attributes?: Attributes): AsyncTreatmentWithConfig; /** * Returns a Treatments value, which will be (or eventually be) an object map with the treatments for the given features. * - * @function getTreatments - * @param {string} key - The string key representing the consumer. - * @param {Array} featureFlagNames - An array of the feature flag names we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {AsyncTreatments} Treatments promise that resolves to the treatments object map. + * @param key - The string key representing the consumer. + * @param featureFlagNames - An array of the feature flag names we want to get the treatments. + * @param attributes - An object of type Attributes defining the attributes for the given key. + * @returns Treatments promise that resolves to the treatments object map. */ getTreatments(key: SplitKey, featureFlagNames: string[], attributes?: Attributes): AsyncTreatments; /** * Returns a TreatmentsWithConfig value, which will be (or eventually be) an object map with the TreatmentWithConfig (an object with both treatment and config string) for the given features. * - * @function getTreatmentsWithConfig - * @param {string} key - The string key representing the consumer. - * @param {Array} featureFlagNames - An array of the feature flag names we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {AsyncTreatmentsWithConfig} TreatmentsWithConfig promise that resolves to the map of TreatmentsWithConfig objects. + * @param key - The string key representing the consumer. + * @param featureFlagNames - An array of the feature flag names we want to get the treatments. + * @param attributes - An object of type Attributes defining the attributes for the given key. + * @returns TreatmentsWithConfig promise that resolves to the map of TreatmentsWithConfig objects. */ getTreatmentsWithConfig(key: SplitKey, featureFlagNames: string[], attributes?: Attributes): AsyncTreatmentsWithConfig; /** * Returns a Treatments value, which is an object map with the treatments for the feature flags related to the given flag set. * - * @function getTreatmentsByFlagSet - * @param {string} key - The string key representing the consumer. - * @param {string} flagSet - The flag set name we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {AsyncTreatments} Treatments promise that resolves to the treatments object map. + * @param key - The string key representing the consumer. + * @param flagSet - The flag set name we want to get the treatments. + * @param attributes - An object of type Attributes defining the attributes for the given key. + * @returns Treatments promise that resolves to the treatments object map. */ getTreatmentsByFlagSet(key: SplitKey, flagSet: string, attributes?: Attributes): AsyncTreatments; /** * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the feature flags related to the given flag set. * - * @function getTreatmentsWithConfigByFlagSet - * @param {string} key - The string key representing the consumer. - * @param {string} flagSet - The flag set name we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {AsyncTreatmentsWithConfig} TreatmentsWithConfig promise that resolves to the map of TreatmentsWithConfig objects. + * @param key - The string key representing the consumer. + * @param flagSet - The flag set name we want to get the treatments. + * @param attributes - An object of type Attributes defining the attributes for the given key. + * @returns TreatmentsWithConfig promise that resolves to the map of TreatmentsWithConfig objects. */ getTreatmentsWithConfigByFlagSet(key: SplitKey, flagSet: string, attributes?: Attributes): AsyncTreatmentsWithConfig; /** * Returns a Returns a Treatments value, which is an object with both treatment and config string for to the feature flags related to the given flag sets. * - * @function getTreatmentsByFlagSets - * @param {string} key - The string key representing the consumer. - * @param {Array} flagSets - An array of the flag set names we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {AsyncTreatments} Treatments promise that resolves to the treatments object map. + * @param key - The string key representing the consumer. + * @param flagSets - An array of the flag set names we want to get the treatments. + * @param attributes - An object of type Attributes defining the attributes for the given key. + * @returns Treatments promise that resolves to the treatments object map. */ getTreatmentsByFlagSets(key: SplitKey, flagSets: string[], attributes?: Attributes): AsyncTreatments; /** * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the feature flags related to the given flag sets. * - * @function getTreatmentsWithConfigByFlagSets - * @param {string} key - The string key representing the consumer. - * @param {Array} flagSets - An array of the flag set names we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {AsyncTreatmentsWithConfig} TreatmentsWithConfig promise that resolves to the map of TreatmentsWithConfig objects. + * @param key - The string key representing the consumer. + * @param flagSets - An array of the flag set names we want to get the treatments. + * @param attributes - An object of type Attributes defining the attributes for the given key. + * @returns TreatmentsWithConfig promise that resolves to the map of TreatmentsWithConfig objects. */ getTreatmentsWithConfigByFlagSets(key: SplitKey, flagSets: string[], attributes?: Attributes): AsyncTreatmentsWithConfig; /** * Tracks an event to be fed to the results product on Split user interface, and returns a promise to signal when the event was successfully queued (or not). * - * @function track - * @param {SplitKey} key - The key that identifies the entity related to this event. - * @param {string} trafficType - The traffic type of the entity related to this event. See {@link https://help.split.io/hc/en-us/articles/360019916311-Traffic-type} - * @param {string} eventType - The event type corresponding to this event. - * @param {number=} value - The value of this event. - * @param {Properties=} properties - The properties of this event. Values can be string, number, boolean or null. - * @returns {Promise} A promise that resolves to a boolean indicating if the event was added to the queue successfully or not. + * @param key - The key that identifies the entity related to this event. + * @param trafficType - The traffic type of the entity related to this event. See {@link https://help.split.io/hc/en-us/articles/360019916311-Traffic-type} + * @param eventType - The event type corresponding to this event. + * @param value - The value of this event. + * @param properties - The properties of this event. Values can be string, number, boolean or null. + * @returns A promise that resolves to a boolean indicating if the event was added to the queue successfully or not. */ track(key: SplitIO.SplitKey, trafficType: string, eventType: string, value?: number, properties?: Properties): Promise; } @@ -1822,275 +1693,248 @@ declare namespace SplitIO { /** * Add an attribute to client's in-memory attributes storage. * - * @param {string} attributeName Attribute name - * @param {AttributeType} attributeValue Attribute value - * @returns {boolean} true if the attribute was stored and false otherwise + * @param attributeName - Attribute name + * @param attributeValue - Attribute value + * @returns true if the attribute was stored and false otherwise */ setAttribute(attributeName: string, attributeValue: AttributeType): boolean; /** * Returns the attribute with the given name. * - * @param {string} attributeName Attribute name - * @returns {AttributeType} Attribute with the given name + * @param attributeName - Attribute name + * @returns Attribute with the given name */ getAttribute(attributeName: string): AttributeType; /** * Removes from client's in-memory attributes storage the attribute with the given name. * - * @param {string} attributeName - * @returns {boolean} true if attribute was removed and false otherwise + * @param attributeName - Attribute name + * @returns true if attribute was removed and false otherwise */ removeAttribute(attributeName: string): boolean; /** * Add to client's in-memory attributes storage the attributes in 'attributes'. * - * @param {Attributes} attributes Object with attributes to store + * @param attributes - Object with attributes to store * @returns true if attributes were stored an false otherwise */ setAttributes(attributes: Attributes): boolean; /** * Return all the attributes stored in client's in-memory attributes storage. * - * @returns {Attributes} returns all the stored attributes + * @returns returns all the stored attributes */ getAttributes(): Attributes; /** * Remove all the stored attributes in the client's in-memory attribute storage. * - * @returns {boolean} true if all attribute were removed and false otherwise + * @returns true if all attribute were removed and false otherwise */ clearAttributes(): boolean; } /** * This represents the interface for the Client instance on client-side, where the user key is bound to the instance on creation and does not need to be provided on each method call. - * - * @interface IClient - * @extends IClientWithAttributes */ interface IClient extends IClientWithAttributes { /** * Returns a Treatment value, which is the treatment string for the given feature. * - * @function getTreatment - * @param {string} featureFlagName - The string that represents the feature flag we want to get the treatment. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {Treatment} The treatment string. + * @param featureFlagName - The string that represents the feature flag we want to get the treatment. + * @param attributes - An object of type Attributes defining the attributes for the given key. + * @returns The treatment string. */ getTreatment(featureFlagName: string, attributes?: Attributes): Treatment; /** * Returns a TreatmentWithConfig value, which is an object with both treatment and config string for the given feature. * - * @function getTreatmentWithConfig - * @param {string} featureFlagName - The string that represents the feature flag we want to get the treatment. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {TreatmentWithConfig} The map containing the treatment and the configuration stringified JSON (or null if there was no config for that treatment). + * @param featureFlagName - The string that represents the feature flag we want to get the treatment. + * @param attributes - An object of type Attributes defining the attributes for the given key. + * @returns The map containing the treatment and the configuration stringified JSON (or null if there was no config for that treatment). */ getTreatmentWithConfig(featureFlagName: string, attributes?: Attributes): TreatmentWithConfig; /** * Returns a Treatments value, which is an object map with the treatments for the given features. * - * @function getTreatments - * @param {Array} featureFlagNames - An array of the feature flag names we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {Treatments} The treatments object map. + * @param featureFlagNames - An array of the feature flag names we want to get the treatments. + * @param attributes - An object of type Attributes defining the attributes for the given key. + * @returns The treatments object map. */ getTreatments(featureFlagNames: string[], attributes?: Attributes): Treatments; /** * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the given features. * - * @function getTreatmentsWithConfig - * @param {Array} featureFlagNames - An array of the feature flag names we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {TreatmentsWithConfig} The map with all the TreatmentWithConfig objects + * @param featureFlagNames - An array of the feature flag names we want to get the treatments. + * @param attributes - An object of type Attributes defining the attributes for the given key. + * @returns The map with all the TreatmentWithConfig objects */ getTreatmentsWithConfig(featureFlagNames: string[], attributes?: Attributes): TreatmentsWithConfig; /** * Returns a Treatments value, which is an object map with the treatments for the feature flags related to the given flag set. * - * @function getTreatmentsByFlagSet - * @param {string} flagSet - The flag set name we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {Treatments} The map with all the Treatments objects + * @param flagSet - The flag set name we want to get the treatments. + * @param attributes - An object of type Attributes defining the attributes for the given key. + * @returns The map with all the Treatments objects */ getTreatmentsByFlagSet(flagSet: string, attributes?: Attributes): Treatments; /** * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the feature flags related to the given flag set. * - * @function getTreatmentsWithConfigByFlagSet - * @param {string} flagSet - The flag set name we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {TreatmentsWithConfig} The map with all the TreatmentWithConfig objects + * @param flagSet - The flag set name we want to get the treatments. + * @param attributes - An object of type Attributes defining the attributes for the given key. + * @returns The map with all the TreatmentWithConfig objects */ getTreatmentsWithConfigByFlagSet(flagSet: string, attributes?: Attributes): TreatmentsWithConfig; /** * Returns a Returns a Treatments value, which is an object with both treatment and config string for to the feature flags related to the given flag sets. * - * @function getTreatmentsByFlagSets - * @param {Array} flagSets - An array of the flag set names we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {Treatments} The map with all the Treatments objects + * @param flagSets - An array of the flag set names we want to get the treatments. + * @param attributes - An object of type Attributes defining the attributes for the given key. + * @returns The map with all the Treatments objects */ getTreatmentsByFlagSets(flagSets: string[], attributes?: Attributes): Treatments; /** * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the feature flags related to the given flag sets. * - * @function getTreatmentsWithConfigByFlagSets - * @param {Array} flagSets - An array of the flag set names we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {TreatmentsWithConfig} The map with all the TreatmentWithConfig objects + * @param flagSets - An array of the flag set names we want to get the treatments. + * @param attributes - An object of type Attributes defining the attributes for the given key. + * @returns The map with all the TreatmentWithConfig objects */ getTreatmentsWithConfigByFlagSets(flagSets: string[], attributes?: Attributes): TreatmentsWithConfig; /** * Tracks an event to be fed to the results product on Split user interface. * - * @function track - * @param {string} trafficType - The traffic type of the entity related to this event. See {@link https://help.split.io/hc/en-us/articles/360019916311-Traffic-type} - * @param {string} eventType - The event type corresponding to this event. - * @param {number=} value - The value of this event. - * @param {Properties=} properties - The properties of this event. Values can be string, number, boolean or null. - * @returns {boolean} Whether the event was added to the queue successfully or not. + * @param trafficType - The traffic type of the entity related to this event. See {@link https://help.split.io/hc/en-us/articles/360019916311-Traffic-type} + * @param eventType - The event type corresponding to this event. + * @param value - The value of this event. + * @param properties - The properties of this event. Values can be string, number, boolean or null. + * @returns Whether the event was added to the queue successfully or not. */ track(trafficType: string, eventType: string, value?: number, properties?: Properties): boolean; } /** * This represents the interface for the Client instance with asynchronous storage for client-side SDK, where each client has associated a key. - * @interface IAsyncClient - * @extends IClientWithAttributes */ interface IAsyncClient extends IClientWithAttributes { /** * Returns a Treatment value, which will be (or eventually be) the treatment string for the given feature. * - * @function getTreatment - * @param {string} featureFlagName - The string that represents the feature flag we want to get the treatment. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {AsyncTreatment} Treatment promise that resolves to the treatment string. + * @param featureFlagName - The string that represents the feature flag we want to get the treatment. + * @param attributes - An object of type Attributes defining the attributes for the given key. + * @returns Treatment promise that resolves to the treatment string. */ getTreatment(featureFlagName: string, attributes?: Attributes): AsyncTreatment; /** * Returns a TreatmentWithConfig value, which will be (or eventually be) an object with both treatment and config string for the given feature. * - * @function getTreatmentWithConfig - * @param {string} featureFlagName - The string that represents the feature flag we want to get the treatment. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {AsyncTreatmentWithConfig} TreatmentWithConfig promise that resolves to the TreatmentWithConfig object. + * @param featureFlagName - The string that represents the feature flag we want to get the treatment. + * @param attributes - An object of type Attributes defining the attributes for the given key. + * @returns TreatmentWithConfig promise that resolves to the TreatmentWithConfig object. */ getTreatmentWithConfig(featureFlagName: string, attributes?: Attributes): AsyncTreatmentWithConfig; /** * Returns a Treatments value, which will be (or eventually be) an object map with the treatments for the given features. * - * @function getTreatments - * @param {Array} featureFlagNames - An array of the feature flag names we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {AsyncTreatments} Treatments promise that resolves to the treatments object map. + * @param featureFlagNames - An array of the feature flag names we want to get the treatments. + * @param attributes - An object of type Attributes defining the attributes for the given key. + * @returns Treatments promise that resolves to the treatments object map. */ getTreatments(featureFlagNames: string[], attributes?: Attributes): AsyncTreatments; /** * Returns a TreatmentsWithConfig value, which will be (or eventually be) an object map with the TreatmentWithConfig (an object with both treatment and config string) for the given features. * - * @function getTreatmentsWithConfig - * @param {Array} featureFlagNames - An array of the feature flag names we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {AsyncTreatmentsWithConfig} TreatmentsWithConfig promise that resolves to the TreatmentsWithConfig object. + * @param featureFlagNames - An array of the feature flag names we want to get the treatments. + * @param attributes - An object of type Attributes defining the attributes for the given key. + * @returns TreatmentsWithConfig promise that resolves to the TreatmentsWithConfig object. */ getTreatmentsWithConfig(featureFlagNames: string[], attributes?: Attributes): AsyncTreatmentsWithConfig; /** * Returns a Treatments value, which is an object map with the treatments for the feature flags related to the given flag set. * - * @function getTreatmentsByFlagSet - * @param {string} flagSet - The flag set name we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {AsyncTreatments} Treatments promise that resolves to the treatments object map. + * @param flagSet - The flag set name we want to get the treatments. + * @param attributes - An object of type Attributes defining the attributes for the given key. + * @returns Treatments promise that resolves to the treatments object map. */ getTreatmentsByFlagSet(flagSet: string, attributes?: Attributes): AsyncTreatments; /** * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the feature flags related to the given flag set. * - * @function getTreatmentsWithConfigByFlagSet - * @param {string} flagSet - The flag set name we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {AsyncTreatmentsWithConfig} TreatmentsWithConfig promise that resolves to the TreatmentsWithConfig object. + * @param flagSet - The flag set name we want to get the treatments. + * @param attributes - An object of type Attributes defining the attributes for the given key. + * @returns TreatmentsWithConfig promise that resolves to the TreatmentsWithConfig object. */ getTreatmentsWithConfigByFlagSet(flagSet: string, attributes?: Attributes): AsyncTreatmentsWithConfig; /** * Returns a Returns a Treatments value, which is an object with both treatment and config string for to the feature flags related to the given flag sets. * - * @function getTreatmentsByFlagSets - * @param {Array} flagSets - An array of the flag set names we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {AsyncTreatments} Treatments promise that resolves to the treatments object map. + * @param flagSets - An array of the flag set names we want to get the treatments. + * @param attributes - An object of type Attributes defining the attributes for the given key. + * @returns Treatments promise that resolves to the treatments object map. */ getTreatmentsByFlagSets(flagSets: string[], attributes?: Attributes): AsyncTreatments; /** * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the feature flags related to the given flag sets. * - * @function getTreatmentsWithConfigByFlagSets - * @param {Array} flagSets - An array of the flag set names we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {AsyncTreatmentsWithConfig} TreatmentsWithConfig promise that resolves to the TreatmentsWithConfig object. + * @param flagSets - An array of the flag set names we want to get the treatments. + * @param attributes - An object of type Attributes defining the attributes for the given key. + * @returns TreatmentsWithConfig promise that resolves to the TreatmentsWithConfig object. */ getTreatmentsWithConfigByFlagSets(flagSets: string[], attributes?: Attributes): AsyncTreatmentsWithConfig; /** * Tracks an event to be fed to the results product on Split user interface, and returns a promise to signal when the event was successfully queued (or not). * - * @function track - * @param {string} trafficType - The traffic type of the entity related to this event. - * @param {string} eventType - The event type corresponding to this event. - * @param {number=} value - The value of this event. - * @param {Properties=} properties - The properties of this event. Values can be string, number, boolean or null. - * @returns {boolean} A promise that resolves to a boolean indicating if the event was added to the queue successfully or not. + * @param trafficType - The traffic type of the entity related to this event. + * @param eventType - The event type corresponding to this event. + * @param value - The value of this event. + * @param properties - The properties of this event. Values can be string, number, boolean or null. + * @returns A promise that resolves to a boolean indicating if the event was added to the queue successfully or not. */ track(trafficType: string, eventType: string, value?: number, properties?: Properties): Promise; } /** * Representation of a manager instance with synchronous storage of the SDK. - * @interface IManager - * @extends IStatusInterface */ interface IManager extends IStatusInterface { /** * Get the array of feature flag names. - * @function names - * @returns {SplitNames} The list of feature flag names. + * + * @returns The list of feature flag names. */ names(): SplitNames; /** * Get the array of feature flags data in SplitView format. - * @function splits - * @returns {SplitViews} The list of SplitIO.SplitView. + * + * @returns The list of SplitIO.SplitView. */ splits(): SplitViews; /** * Get the data of a feature flag in SplitView format. - * @function split - * @param {string} featureFlagName The name of the feature flag we want to get info of. - * @returns {SplitView | null} The SplitIO.SplitView of the given feature flag name or null if the feature flag is not found. + * + * @param featureFlagName - The name of the feature flag we want to get info of. + * @returns The SplitIO.SplitView of the given feature flag name or null if the feature flag is not found. */ split(featureFlagName: string): SplitView | null; } /** * Representation of a manager instance with asynchronous storage of the SDK. - * @interface IAsyncManager - * @extends IStatusInterface */ interface IAsyncManager extends IStatusInterface { /** * Get the array of feature flag names. - * @function names - * @returns {SplitNamesAsync} A promise that resolves to the list of feature flag names. + * + * @returns A promise that resolves to the list of feature flag names. */ names(): SplitNamesAsync; /** * Get the array of feature flags data in SplitView format. - * @function splits - * @returns {SplitViewsAsync} A promise that resolves to the SplitIO.SplitView list. + * + * @returns A promise that resolves to the SplitIO.SplitView list. */ splits(): SplitViewsAsync; /** * Get the data of a feature flag in SplitView format. - * @function split - * @param {string} featureFlagName The name of the feature flag we want to get info of. - * @returns {SplitViewAsync} A promise that resolves to the SplitIO.SplitView value. + * + * @param featureFlagName - The name of the feature flag we want to get info of. + * @returns A promise that resolves to the SplitIO.SplitView value. */ split(featureFlagName: string): SplitViewAsync; } From 1351df622355f4b4917ab66133d8e0814689e3fe Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Tue, 29 Oct 2024 13:28:32 -0300 Subject: [PATCH 136/146] rc --- package-lock.json | 4 ++-- package.json | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/package-lock.json b/package-lock.json index 589820a5..5c66a6b6 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "@splitsoftware/splitio-commons", - "version": "2.0.0-rc.4", + "version": "2.0.0-rc.5", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "@splitsoftware/splitio-commons", - "version": "2.0.0-rc.4", + "version": "2.0.0-rc.5", "license": "Apache-2.0", "dependencies": { "@types/ioredis": "^4.28.0", diff --git a/package.json b/package.json index f774a4e4..80479f41 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@splitsoftware/splitio-commons", - "version": "2.0.0-rc.4", + "version": "2.0.0-rc.5", "description": "Split JavaScript SDK common components", "main": "cjs/index.js", "module": "esm/index.js", @@ -17,7 +17,7 @@ ], "scripts": { "check": "npm run check:lint && npm run check:types", - "check:lint": "eslint src types --ext .js,.ts,.d.ts", + "check:lint": "eslint src types --ext .js,.ts", "check:types": "tsc --noEmit", "build": "npm run build:cjs && npm run build:esm", "build:esm": "rimraf esm && tsc -m es2015 --outDir esm", From b352f71c8fb21f8041ebb83145d2dfda0ce5b4cd Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Wed, 30 Oct 2024 23:36:29 -0300 Subject: [PATCH 137/146] Rename some TS interfaces to minimize breaking changes in JS SDK --- src/sdkClient/client.ts | 4 +-- src/sdkClient/clientAttributesDecoration.ts | 2 +- src/sdkClient/clientCS.ts | 4 +-- src/sdkClient/clientInputValidation.ts | 2 +- src/sdkClient/sdkClient.ts | 2 +- src/sdkClient/sdkClientMethod.ts | 2 +- src/sdkClient/sdkClientMethodCS.ts | 8 +++--- src/sdkFactory/__tests__/index.spec.ts | 6 ++-- src/sdkFactory/index.ts | 2 +- src/sdkFactory/types.ts | 8 +++--- .../inMemory/UniqueKeysCacheInMemoryCS.ts | 5 ---- types/splitio.d.ts | 28 +++++++++---------- 12 files changed, 34 insertions(+), 39 deletions(-) diff --git a/src/sdkClient/client.ts b/src/sdkClient/client.ts index 0f5eb8a6..01c5053d 100644 --- a/src/sdkClient/client.ts +++ b/src/sdkClient/client.ts @@ -25,7 +25,7 @@ function treatmentsNotReady(featureFlagNames: string[]) { /** * Creator of base client with getTreatments and track methods. */ -export function clientFactory(params: ISdkFactoryContext): SplitIO.INodeClient | SplitIO.INodeAsyncClient { +export function clientFactory(params: ISdkFactoryContext): SplitIO.IClient | SplitIO.IAsyncClient { const { sdkReadinessManager: { readinessManager }, storage, settings, impressionsTracker, eventTracker, telemetryTracker } = params; const { log, mode } = settings; const isAsync = isConsumerMode(mode); @@ -199,5 +199,5 @@ export function clientFactory(params: ISdkFactoryContext): SplitIO.INodeClient | getTreatmentsByFlagSet, getTreatmentsWithConfigByFlagSet, track, - } as SplitIO.INodeClient | SplitIO.INodeAsyncClient; + } as SplitIO.IClient | SplitIO.IAsyncClient; } diff --git a/src/sdkClient/clientAttributesDecoration.ts b/src/sdkClient/clientAttributesDecoration.ts index 621b017d..cf31b5d3 100644 --- a/src/sdkClient/clientAttributesDecoration.ts +++ b/src/sdkClient/clientAttributesDecoration.ts @@ -7,7 +7,7 @@ import { objectAssign } from '../utils/lang/objectAssign'; /** * Add in memory attributes storage methods and combine them with any attribute received from the getTreatment/s call */ -export function clientAttributesDecoration(log: ILogger, client: TClient) { +export function clientAttributesDecoration(log: ILogger, client: TClient) { const attributeStorage = new AttributesCacheInMemory(); diff --git a/src/sdkClient/clientCS.ts b/src/sdkClient/clientCS.ts index a16ba90f..e815f011 100644 --- a/src/sdkClient/clientCS.ts +++ b/src/sdkClient/clientCS.ts @@ -10,7 +10,7 @@ import { clientAttributesDecoration } from './clientAttributesDecoration'; * @param client - sync client instance * @param key - validated split key */ -export function clientCSDecorator(log: ILogger, client: SplitIO.INodeClient, key: SplitIO.SplitKey): SplitIO.IClient { +export function clientCSDecorator(log: ILogger, client: SplitIO.IClient, key: SplitIO.SplitKey): SplitIO.IBrowserClient { let clientCS = clientAttributesDecoration(log, client); @@ -30,5 +30,5 @@ export function clientCSDecorator(log: ILogger, client: SplitIO.INodeClient, key // Not part of the public API. These properties are used to support other modules (e.g., Split Suite) isClientSide: true, key - }) as SplitIO.IClient; + }) as SplitIO.IBrowserClient; } diff --git a/src/sdkClient/clientInputValidation.ts b/src/sdkClient/clientInputValidation.ts index 8b93cc96..e7fb9db1 100644 --- a/src/sdkClient/clientInputValidation.ts +++ b/src/sdkClient/clientInputValidation.ts @@ -24,7 +24,7 @@ import { validateFlagSets } from '../utils/settingsValidation/splitFilters'; * Decorator that validates the input before actually executing the client methods. * We should "guard" the client here, while not polluting the "real" implementation of those methods. */ -export function clientInputValidationDecorator(settings: ISettings, client: TClient, readinessManager: IReadinessManager): TClient { +export function clientInputValidationDecorator(settings: ISettings, client: TClient, readinessManager: IReadinessManager): TClient { const { log, mode } = settings; const isAsync = isConsumerMode(mode); diff --git a/src/sdkClient/sdkClient.ts b/src/sdkClient/sdkClient.ts index 3d7aef28..fbc4aeb9 100644 --- a/src/sdkClient/sdkClient.ts +++ b/src/sdkClient/sdkClient.ts @@ -10,7 +10,7 @@ const COOLDOWN_TIME_IN_MILLIS = 1000; /** * Creates an Sdk client, i.e., a base client with status and destroy interface */ -export function sdkClientFactory(params: ISdkFactoryContext, isSharedClient?: boolean): SplitIO.INodeClient | SplitIO.INodeAsyncClient { +export function sdkClientFactory(params: ISdkFactoryContext, isSharedClient?: boolean): SplitIO.IClient | SplitIO.IAsyncClient { const { sdkReadinessManager, syncManager, storage, signalListener, settings, telemetryTracker, uniqueKeysTracker } = params; let lastActionTime = 0; diff --git a/src/sdkClient/sdkClientMethod.ts b/src/sdkClient/sdkClientMethod.ts index c24307e2..e2a4112e 100644 --- a/src/sdkClient/sdkClientMethod.ts +++ b/src/sdkClient/sdkClientMethod.ts @@ -6,7 +6,7 @@ import { ISdkFactoryContext } from '../sdkFactory/types'; /** * Factory of client method for server-side SDKs */ -export function sdkClientMethodFactory(params: ISdkFactoryContext): () => SplitIO.INodeClient | SplitIO.INodeAsyncClient { +export function sdkClientMethodFactory(params: ISdkFactoryContext): () => SplitIO.IClient | SplitIO.IAsyncClient { const log = params.settings.log; const clientInstance = sdkClientFactory(params); diff --git a/src/sdkClient/sdkClientMethodCS.ts b/src/sdkClient/sdkClientMethodCS.ts index a5d66656..ebc755a1 100644 --- a/src/sdkClient/sdkClientMethodCS.ts +++ b/src/sdkClient/sdkClientMethodCS.ts @@ -14,12 +14,12 @@ import { buildInstanceId } from './identity'; * Factory of client method for the client-side API variant where TT is ignored. * Therefore, clients don't have a bound TT for the track method. */ -export function sdkClientMethodCSFactory(params: ISdkFactoryContext): (key?: SplitIO.SplitKey) => SplitIO.IClient { +export function sdkClientMethodCSFactory(params: ISdkFactoryContext): (key?: SplitIO.SplitKey) => SplitIO.IBrowserClient { const { clients, storage, syncManager, sdkReadinessManager, settings: { core: { key }, log } } = params; const mainClientInstance = clientCSDecorator( log, - sdkClientFactory(params) as SplitIO.INodeClient, + sdkClientFactory(params) as SplitIO.IClient, key ); @@ -71,7 +71,7 @@ export function sdkClientMethodCSFactory(params: ISdkFactoryContext): (key?: Spl sdkReadinessManager: sharedSdkReadiness, storage: sharedStorage || storage, syncManager: sharedSyncManager, - }), true) as SplitIO.INodeClient, + }), true) as SplitIO.IClient, validKey ); @@ -80,6 +80,6 @@ export function sdkClientMethodCSFactory(params: ISdkFactoryContext): (key?: Spl log.debug(RETRIEVE_CLIENT_EXISTING); } - return clients[instanceId] as SplitIO.IClient; + return clients[instanceId] as SplitIO.IBrowserClient; }; } diff --git a/src/sdkFactory/__tests__/index.spec.ts b/src/sdkFactory/__tests__/index.spec.ts index b4ef3ec1..e46296be 100644 --- a/src/sdkFactory/__tests__/index.spec.ts +++ b/src/sdkFactory/__tests__/index.spec.ts @@ -26,7 +26,7 @@ jest.mock('../../trackers/telemetryTracker', () => { }; }); -// IAsyncSDK, minimal params +// IBrowserAsyncSDK, minimal params const paramsForAsyncSDK = { settings: fullSettings, storageFactory: jest.fn(() => mockStorage), @@ -40,7 +40,7 @@ const paramsForAsyncSDK = { const SignalListenerInstanceMock = { start: jest.fn() }; -// ISDK, full params +// IBrowserSDK, full params const fullParamsForSyncSDK = { ...paramsForAsyncSDK, syncManagerFactory: jest.fn(), @@ -59,7 +59,7 @@ const fullParamsForSyncSDK = { /** End Mocks */ -function assertSdkApi(sdk: SplitIO.INodeAsyncSDK | SplitIO.INodeSDK | SplitIO.IAsyncSDK | SplitIO.ISDK, params: any) { +function assertSdkApi(sdk: SplitIO.IAsyncSDK | SplitIO.ISDK | SplitIO.IBrowserAsyncSDK | SplitIO.IBrowserSDK, params: any) { expect(sdk.Logger).toBe(loggerApiMock); expect(sdk.settings).toBe(params.settings); expect(sdk.client).toBe(params.sdkClientMethodFactory.mock.results[0].value); diff --git a/src/sdkFactory/index.ts b/src/sdkFactory/index.ts index 4363f0e2..572b6bec 100644 --- a/src/sdkFactory/index.ts +++ b/src/sdkFactory/index.ts @@ -18,7 +18,7 @@ import { NONE, OPTIMIZED } from '../utils/constants'; /** * Modular SDK factory */ -export function sdkFactory(params: ISdkFactoryParams): SplitIO.INodeSDK | SplitIO.INodeAsyncSDK | SplitIO.ISDK | SplitIO.IAsyncSDK { +export function sdkFactory(params: ISdkFactoryParams): SplitIO.ISDK | SplitIO.IAsyncSDK | SplitIO.IBrowserSDK | SplitIO.IBrowserAsyncSDK { const { settings, platform, storageFactory, splitApiFactory, extraProps, syncManagerFactory, SignalListener, impressionsObserverFactory, diff --git a/src/sdkFactory/types.ts b/src/sdkFactory/types.ts index 2fa1cee0..443b1456 100644 --- a/src/sdkFactory/types.ts +++ b/src/sdkFactory/types.ts @@ -79,7 +79,7 @@ export interface ISdkFactoryParams { platform: IPlatform, // Storage factory. The result storage type implies the type of the SDK: - // sync SDK (`ISDK` and `INodeSDK`) with `IStorageSync`, and async SDK (`IAsyncSDK` and `INodeAsyncSDK`) with `IStorageAsync` + // sync SDK (`IBrowserSDK` and `ISDK`) with `IStorageSync`, and async SDK (`IBrowserAsyncSDK` and `IAsyncSDK`) with `IStorageAsync` storageFactory: (params: IStorageFactoryParams) => IStorageSync | IStorageAsync, // Factory of Split Api (HTTP Client Service). @@ -94,9 +94,9 @@ export interface ISdkFactoryParams { // Sdk manager factory sdkManagerFactory: typeof sdkManagerFactory, - // Sdk client method factory (ISDK::client method). - // It Allows to distinguish SDK clients with the client-side API (`ISDK` and `IAsyncSDK`) or server-side API (`INodeSDK` and `INodeAsyncSDK`). - sdkClientMethodFactory: (params: ISdkFactoryContext) => ({ (): SplitIO.IClient; (key: SplitIO.SplitKey): SplitIO.IClient; } | (() => SplitIO.INodeClient) | (() => SplitIO.INodeAsyncClient)) + // Sdk client method factory. + // It Allows to distinguish SDK clients with the client-side API (`IBrowserSDK` and `IBrowserAsyncSDK`) or server-side API (`ISDK` and `IAsyncSDK`). + sdkClientMethodFactory: (params: ISdkFactoryContext) => ({ (): SplitIO.IBrowserClient; (key: SplitIO.SplitKey): SplitIO.IBrowserClient; } | (() => SplitIO.IClient) | (() => SplitIO.IAsyncClient)) // Impression observer factory. impressionsObserverFactory: () => IImpressionObserver diff --git a/src/storages/inMemory/UniqueKeysCacheInMemoryCS.ts b/src/storages/inMemory/UniqueKeysCacheInMemoryCS.ts index d5fb8375..87133e3d 100644 --- a/src/storages/inMemory/UniqueKeysCacheInMemoryCS.ts +++ b/src/storages/inMemory/UniqueKeysCacheInMemoryCS.ts @@ -10,11 +10,6 @@ export class UniqueKeysCacheInMemoryCS implements IUniqueKeysCacheBase { private uniqueTrackerSize = 0; private uniqueKeysTracker: { [userKey: string]: Set } = {}; - /** - * - * @param impressionsQueueSize - number of queued impressions to call onFullQueueCb. - * Default value is 0, that means no maximum value, in case we want to avoid this being triggered. - */ constructor(uniqueKeysQueueSize = DEFAULT_CACHE_SIZE) { this.maxStorage = uniqueKeysQueueSize; } diff --git a/types/splitio.d.ts b/types/splitio.d.ts index 0a5233db..223b39b9 100644 --- a/types/splitio.d.ts +++ b/types/splitio.d.ts @@ -1422,19 +1422,19 @@ declare namespace SplitIO { * This represents the interface for the SDK instance with synchronous storage and client-side API, * i.e., where client instances have a bound user key. */ - interface ISDK extends IBasicSDK { + interface IBrowserSDK extends IBasicSDK { /** * Returns the default client instance of the SDK, associated with the key provided on settings. * * @returns The client instance. */ - client(): IClient; + client(): IBrowserClient; /** * Returns a shared client of the SDK, associated with the given key. * @param key - The key for the new client instance. * @returns The client instance. */ - client(key: SplitKey): IClient; + client(key: SplitKey): IBrowserClient; /** * Returns a manager instance of the SDK to explore available information. * @@ -1450,20 +1450,20 @@ declare namespace SplitIO { * This represents the interface for the SDK instance with asynchronous storage and client-side API, * i.e., where client instances have a bound user key. */ - interface IAsyncSDK extends IBasicSDK { + interface IBrowserAsyncSDK extends IBasicSDK { /** * Returns the default client instance of the SDK, associated with the key provided on settings. * * @returns The asynchronous client instance. */ - client(): IAsyncClient; + client(): IBrowserAsyncClient; /** * Returns a shared client of the SDK, associated with the given key. * * @param key - The key for the new client instance. * @returns The asynchronous client instance. */ - client(key: SplitKey): IAsyncClient; + client(key: SplitKey): IBrowserAsyncClient; /** * Returns a manager instance of the SDK to explore available information. * @@ -1478,13 +1478,13 @@ declare namespace SplitIO { /** * This represents the interface for the SDK instance for server-side with synchronous storage. */ - interface INodeSDK extends IBasicSDK { + interface ISDK extends IBasicSDK { /** * Returns the default client instance of the SDK. * * @returns The client instance. */ - client(): INodeClient; + client(): IClient; /** * Returns a manager instance of the SDK to explore available information. * @@ -1495,13 +1495,13 @@ declare namespace SplitIO { /** * This represents the interface for the SDK instance for server-side with asynchronous storage. */ - interface INodeAsyncSDK extends IBasicSDK { + interface IAsyncSDK extends IBasicSDK { /** * Returns the default client instance of the SDK. * * @returns The asynchronous client instance. */ - client(): INodeAsyncClient; + client(): IAsyncClient; /** * Returns a manager instance of the SDK to explore available information. * @@ -1513,7 +1513,7 @@ declare namespace SplitIO { * This represents the interface for the Client instance on server-side, where the user key is not bound to the instance and must be provided on each method call. * This interface is available in NodeJS, or when importing the 'server' sub-package of JS SDK (e.g., `import { SplitFactory } from '@splitsoftware/splitio/server'`). */ - interface INodeClient extends IBasicClient { + interface IClient extends IBasicClient { /** * Returns a Treatment value, which is the treatment string for the given feature. * @@ -1604,7 +1604,7 @@ declare namespace SplitIO { * User key is not bound to the instance and must be provided on each method call, which returns a promise. * This interface is available in NodeJS, or when importing the 'server' sub-package in JS SDK (e.g., `import { SplitFactory } from '@splitsoftware/splitio/server'`). */ - interface INodeAsyncClient extends IBasicClient { + interface IAsyncClient extends IBasicClient { /** * Returns a Treatment value, which will be (or eventually be) the treatment string for the given feature. * @@ -1735,7 +1735,7 @@ declare namespace SplitIO { /** * This represents the interface for the Client instance on client-side, where the user key is bound to the instance on creation and does not need to be provided on each method call. */ - interface IClient extends IClientWithAttributes { + interface IBrowserClient extends IClientWithAttributes { /** * Returns a Treatment value, which is the treatment string for the given feature. * @@ -1814,7 +1814,7 @@ declare namespace SplitIO { /** * This represents the interface for the Client instance with asynchronous storage for client-side SDK, where each client has associated a key. */ - interface IAsyncClient extends IClientWithAttributes { + interface IBrowserAsyncClient extends IClientWithAttributes { /** * Returns a Treatment value, which will be (or eventually be) the treatment string for the given feature. * From d8a3445d3479dfd2bc4c3824b11d770152f7e52a Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Wed, 30 Oct 2024 23:59:25 -0300 Subject: [PATCH 138/146] Polishing --- .../__tests__/SplitsUpdateWorker.spec.ts | 6 +-- types/splitio.d.ts | 48 +++++++++---------- 2 files changed, 27 insertions(+), 27 deletions(-) diff --git a/src/sync/streaming/UpdateWorkers/__tests__/SplitsUpdateWorker.spec.ts b/src/sync/streaming/UpdateWorkers/__tests__/SplitsUpdateWorker.spec.ts index 99c61bae..d5fd3acd 100644 --- a/src/sync/streaming/UpdateWorkers/__tests__/SplitsUpdateWorker.spec.ts +++ b/src/sync/streaming/UpdateWorkers/__tests__/SplitsUpdateWorker.spec.ts @@ -219,9 +219,9 @@ describe('SplitsUpdateWorker', () => { const splitUpdateWorker = SplitsUpdateWorker(loggerMock, cache, splitsSyncTask, telemetryTracker); const payload = notification.decoded; const changeNumber = payload.changeNumber; - splitUpdateWorker.put( { changeNumber, pcn }, payload); // queued + splitUpdateWorker.put({ changeNumber, pcn }, payload); // queued expect(splitsSyncTask.execute).toBeCalledTimes(1); - expect(splitsSyncTask.execute.mock.calls[0]).toEqual([true, undefined, {changeNumber, payload}]); + expect(splitsSyncTask.execute.mock.calls[0]).toEqual([true, undefined, { changeNumber, payload }]); }); }); @@ -265,7 +265,7 @@ describe('SplitsUpdateWorker', () => { splitUpdateWorker = SplitsUpdateWorker(loggerMock, cache, splitsSyncTask, telemetryTracker); splitUpdateWorker.put({ changeNumber, pcn }, notification.decoded); expect(splitsSyncTask.execute).toBeCalledTimes(1); - expect(splitsSyncTask.execute.mock.calls[0]).toEqual([true, undefined, {payload: notification.decoded, changeNumber }]); + expect(splitsSyncTask.execute.mock.calls[0]).toEqual([true, undefined, { payload: notification.decoded, changeNumber }]); }); }); diff --git a/types/splitio.d.ts b/types/splitio.d.ts index 223b39b9..b3ee06f7 100644 --- a/types/splitio.d.ts +++ b/types/splitio.d.ts @@ -83,7 +83,7 @@ declare namespace SplitIO { interface ISettings { readonly core: { authorizationKey: string; - key: SplitIO.SplitKey; + key: SplitKey; labelsEnabled: boolean; IPAddressesEnabled: boolean; }; @@ -109,7 +109,7 @@ declare namespace SplitIO { retriesOnFailureBeforeReady: number; eventsFirstPushWindow: number; }; - readonly storage: SplitIO.StorageSyncFactory | SplitIO.StorageAsyncFactory | SplitIO.StorageOptions; + readonly storage: StorageSyncFactory | StorageAsyncFactory | StorageOptions; readonly urls: { events: string; sdk: string; @@ -117,17 +117,17 @@ declare namespace SplitIO { streaming: string; telemetry: string; }; - readonly integrations?: SplitIO.IntegrationFactory[]; - readonly debug: boolean | LogLevel | SplitIO.ILogger; + readonly integrations?: IntegrationFactory[]; + readonly debug: boolean | LogLevel | ILogger; readonly version: string; /** * Mocked features map if using in client-side, or mocked features file path string if using in server-side (NodeJS). */ - features: SplitIO.MockedFeaturesMap | SplitIO.MockedFeaturesFilePath; + features: MockedFeaturesMap | MockedFeaturesFilePath; readonly streamingEnabled: boolean; readonly sync: { - splitFilters: SplitIO.SplitFilter[]; - impressionsMode: SplitIO.ImpressionsMode; + splitFilters: SplitFilter[]; + impressionsMode: ImpressionsMode; enabled: boolean; flagSpecVersion: string; requestOptions?: { @@ -138,11 +138,11 @@ declare namespace SplitIO { ip: string | false; hostname: string | false; }; - readonly impressionListener?: SplitIO.IImpressionListener; + readonly impressionListener?: IImpressionListener; /** * User consent status if using in client-side. Undefined if using in server-side (NodeJS). */ - readonly userConsent?: SplitIO.ConsentStatus; + readonly userConsent?: ConsentStatus; } /** * Log levels. @@ -191,12 +191,12 @@ declare namespace SplitIO { * * @returns The user consent status. */ - getStatus(): SplitIO.ConsentStatus; + getStatus(): ConsentStatus; /** * Consent status constants. Use this to compare with the getStatus function result. */ Status: { - [status in SplitIO.ConsentStatus]: SplitIO.ConsentStatus; + [status in ConsentStatus]: ConsentStatus; }; } /** @@ -382,7 +382,7 @@ declare namespace SplitIO { */ type ImpressionData = { impression: ImpressionDTO; - attributes?: SplitIO.Attributes; + attributes?: Attributes; ip: string | false; hostname: string | false; sdkLanguageVersion: string; @@ -526,7 +526,7 @@ declare namespace SplitIO { * @see {@link https://help.split.io/hc/en-us/articles/360020564931-Node-js-SDK#listener} */ interface IImpressionListener { - logImpression(data: SplitIO.ImpressionData): void; + logImpression(data: ImpressionData): void; } /** * SDK integration instance. @@ -573,20 +573,20 @@ declare namespace SplitIO { /** * The type of Split data. */ - type: 'IMPRESSION', + type: 'IMPRESSION'; /** * The impression data. */ - payload: SplitIO.ImpressionData + payload: ImpressionData; } | { /** * The type of Split data. */ - type: 'EVENT', + type: 'EVENT'; /** * The event data. */ - payload: SplitIO.EventData + payload: EventData; }; /** * Available URL settings for the SDKs. @@ -665,7 +665,7 @@ declare namespace SplitIO { * * @defaultValue `undefined` */ - impressionListener?: SplitIO.IImpressionListener; + impressionListener?: IImpressionListener; /** * SDK synchronization settings. */ @@ -681,7 +681,7 @@ declare namespace SplitIO { * ] * ``` */ - splitFilters?: SplitIO.SplitFilter[]; + splitFilters?: SplitFilter[]; /** * Impressions Collection Mode. Option to determine how impressions are going to be sent to Split servers. * Possible values are 'DEBUG', 'OPTIMIZED', and 'NONE'. @@ -691,7 +691,7 @@ declare namespace SplitIO { * * @defaultValue `'OPTIMIZED'` */ - impressionsMode?: SplitIO.ImpressionsMode; + impressionsMode?: ImpressionsMode; /** * Custom options object for HTTP(S) requests. * If provided, this object is merged with the options object passed by the SDK for EventSource and Fetch calls. @@ -782,7 +782,7 @@ declare namespace SplitIO { * * @defaultValue `false` */ - debug?: boolean | LogLevel | SplitIO.ILogger; + debug?: boolean | LogLevel | ILogger; /** * Defines an optional list of factory functions used to instantiate SDK integrations. * @@ -1241,7 +1241,7 @@ declare namespace SplitIO { * * @defaultValue `'$HOME/.split'` */ - features?: SplitIO.MockedFeaturesFilePath; + features?: MockedFeaturesFilePath; /** * Defines which kind of storage we can instantiate on NodeJS for 'standalone' mode. * The only possible storage type is 'MEMORY', which is the default. @@ -1597,7 +1597,7 @@ declare namespace SplitIO { * @param properties - The properties of this event. Values can be string, number, boolean or null. * @returns Whether the event was added to the queue successfully or not. */ - track(key: SplitIO.SplitKey, trafficType: string, eventType: string, value?: number, properties?: Properties): boolean; + track(key: SplitKey, trafficType: string, eventType: string, value?: number, properties?: Properties): boolean; } /** * This represents the interface for the Client instance on server-side with asynchronous storage, like REDIS. @@ -1687,7 +1687,7 @@ declare namespace SplitIO { * @param properties - The properties of this event. Values can be string, number, boolean or null. * @returns A promise that resolves to a boolean indicating if the event was added to the queue successfully or not. */ - track(key: SplitIO.SplitKey, trafficType: string, eventType: string, value?: number, properties?: Properties): Promise; + track(key: SplitKey, trafficType: string, eventType: string, value?: number, properties?: Properties): Promise; } interface IClientWithAttributes extends IBasicClient { /** From d052757921850dfd44e83d853098065eeb224702 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Thu, 31 Oct 2024 00:30:58 -0300 Subject: [PATCH 139/146] Move internal types outside namespace --- types/splitio.d.ts | 657 +++++++++++++++++++++++---------------------- 1 file changed, 329 insertions(+), 328 deletions(-) diff --git a/types/splitio.d.ts b/types/splitio.d.ts index b3ee06f7..22d5a6e8 100644 --- a/types/splitio.d.ts +++ b/types/splitio.d.ts @@ -7,6 +7,335 @@ import { RequestOptions } from 'http'; export as namespace SplitIO; export = SplitIO; +/** + * Common settings properties. + */ +interface ISharedSettings { + /** + * The impression listener, which is optional. Whatever you provide here needs to comply with the SplitIO.IImpressionListener interface, + * which will check for the logImpression method. + * + * @defaultValue `undefined` + */ + impressionListener?: SplitIO.IImpressionListener; + /** + * SDK synchronization settings. + */ + sync?: { + /** + * List of feature flag filters. These filters are used to fetch a subset of the feature flag definitions in your environment, in order to reduce the delay of the SDK to be ready. + * This configuration is only meaningful when the SDK is working in "standalone" mode. + * + * Example: + * ``` + * splitFilter: [ + * { type: 'byName', values: ['my_feature_flag_1', 'my_feature_flag_2'] }, // will fetch feature flags named 'my_feature_flag_1' and 'my_feature_flag_2' + * ] + * ``` + */ + splitFilters?: SplitIO.SplitFilter[]; + /** + * Impressions Collection Mode. Option to determine how impressions are going to be sent to Split servers. + * Possible values are 'DEBUG', 'OPTIMIZED', and 'NONE'. + * - DEBUG: will send all the impressions generated (recommended only for debugging purposes). + * - OPTIMIZED: will send unique impressions to Split servers, avoiding a considerable amount of traffic that duplicated impressions could generate. + * - NONE: will send unique keys evaluated per feature to Split servers instead of full blown impressions, avoiding a considerable amount of traffic that impressions could generate. + * + * @defaultValue `'OPTIMIZED'` + */ + impressionsMode?: SplitIO.ImpressionsMode; + /** + * Custom options object for HTTP(S) requests. + * If provided, this object is merged with the options object passed by the SDK for EventSource and Fetch calls. + * This configuration has no effect in "consumer" mode, as no HTTP(S) requests are made by the SDK. + */ + requestOptions?: { + /** + * Custom function called before each request, allowing you to add or update headers in SDK HTTP requests. + * Some headers, such as `SplitSDKVersion`, are required by the SDK and cannot be overridden. + * To pass multiple headers with the same name, combine their values into a single line, separated by commas. Example: `{ 'Authorization': 'value1, value2' }` + * Or provide keys with different cases since headers are case-insensitive. Example: `{ 'authorization': 'value1', 'Authorization': 'value2' }` + * + * NOTE: to pass custom headers to the streaming connection in Browser, you should polyfill the `window.EventSource` object with a library that supports headers, + * like https://www.npmjs.com/package/event-source-polyfill, since native EventSource does not support them and they will be ignored. + * + * @defaultValue `undefined` + * + * @param context - The context for the request, which contains the `headers` property object representing the current headers in the request. + * @returns An object representing a set of headers to be merged with the current headers. + * + * @example + * ``` + * const getHeaderOverrides = (context) => { + * return { + * 'Authorization': context.headers['Authorization'] + ', other-value', + * 'custom-header': 'custom-value' + * }; + * }; + * ``` + */ + getHeaderOverrides?: (context: { headers: Record }) => Record; + }; + }; + /** + * List of URLs that the SDK will use as base for it's synchronization functionalities, applicable only when running as standalone and partial consumer modes. + * Do not change these settings unless you're working an advanced use case, like connecting to the Split proxy. + */ + urls?: SplitIO.UrlSettings; +} +/** + * Common settings properties for SDKs with synchronous API (standalone and localhost modes). + */ +interface ISyncSharedSettings extends ISharedSettings { + /** + * The SDK mode. When using the default in-memory storage or `InLocalStorage` as storage, the only possible value is "standalone", which is the default. + * For "localhost" mode, use "localhost" as authorizationKey. + * + * @defaultValue `'standalone'` + */ + mode?: 'standalone'; + /** + * Boolean flag to enable the streaming service as default synchronization mechanism. In the event of any issue with streaming, + * the SDK would fallback to the polling mechanism. If false, the SDK would poll for changes as usual without attempting to use streaming. + * + * @defaultValue `true` + */ + streamingEnabled?: boolean; + /** + * SDK synchronization settings. + */ + sync?: ISharedSettings['sync'] & { + /** + * Controls the SDK continuous synchronization flags. + * + * When `true` a running SDK will process rollout plan updates performed on the UI (default). + * When false it'll just fetch all data upon init. + * + * @defaultValue `true` + */ + enabled?: boolean; + }; +} +/** + * Common settings properties for SDKs with pluggable configuration. + */ +interface IPluggableSettings { + /** + * Boolean value to indicate whether the logger should be enabled or disabled by default, or a log level string or a Logger object. + * Passing a logger object is required to get descriptive log messages. Otherwise most logs will print with message codes. + * @see {@link https://help.split.io/hc/en-us/articles/360058730852-Browser-SDK#logging}. + * + * Examples: + * ``` + * config.debug = true + * config.debug = 'WARN' + * config.debug = ErrorLogger() + * ``` + * + * @defaultValue `false` + */ + debug?: boolean | SplitIO.LogLevel | SplitIO.ILogger; + /** + * Defines an optional list of factory functions used to instantiate SDK integrations. + * + * NOTE: at the moment there are not integrations to plug in. + */ + integrations?: SplitIO.IntegrationFactory[]; +} +/** + * Common settings properties for SDKs without pluggable configuration. + */ +interface INonPluggableSettings { + /** + * Boolean value to indicate whether the logger should be enabled or disabled, or a log level string. + * + * Examples: + * ``` + * config.debug = true + * config.debug = 'WARN' + * ``` + * + * @defaultValue `false` + */ + debug?: boolean | SplitIO.LogLevel; +} +/** + * Common settings properties for server-side SDKs. + */ +interface IServerSideSharedSettings { + /** + * SDK Core settings for NodeJS. + */ + core: { + /** + * Your SDK key. + * + * @see {@link https://help.split.io/hc/en-us/articles/360019916211-API-keys} + */ + authorizationKey: string; + /** + * Disable labels from being sent to Split backend. Labels may contain sensitive information. + * + * @defaultValue `true` + */ + labelsEnabled?: boolean; + /** + * Disable machine IP and Name from being sent to Split backend. + * + * @defaultValue `true` + */ + IPAddressesEnabled?: boolean; + }; +} +/** + * Common settings properties for client-side SDKs. + */ +interface IClientSideSharedSettings { + /** + * SDK Core settings for client-side. + */ + core: { + /** + * Your SDK key. + * + * @see {@link https://help.split.io/hc/en-us/articles/360019916211-API-keys} + */ + authorizationKey: string; + /** + * Customer identifier. Whatever this means to you. + * + * @see {@link https://help.split.io/hc/en-us/articles/360019916311-Traffic-type} + */ + key: SplitIO.SplitKey; + /** + * Disable labels from being sent to Split backend. Labels may contain sensitive information. + * + * @defaultValue `true` + */ + labelsEnabled?: boolean; + }; + /** + * User consent status. Possible values are `'GRANTED'`, which is the default, `'DECLINED'` or `'UNKNOWN'`. + * - `'GRANTED'`: the user grants consent for tracking events and impressions. The SDK sends them to Split cloud. + * - `'DECLINED'`: the user declines consent for tracking events and impressions. The SDK does not send them to Split cloud. + * - `'UNKNOWN'`: the user neither grants nor declines consent for tracking events and impressions. The SDK tracks them in its internal storage, and eventually either sends + * them or not if the consent status is updated to 'GRANTED' or 'DECLINED' respectively. The status can be updated at any time with the `UserConsent.setStatus` factory method. + * + * @defaultValue `'GRANTED'` + */ + userConsent?: SplitIO.ConsentStatus; +} +/** + * Common settings properties for client-side standalone SDKs. + */ +interface IClientSideSyncSharedSettings extends IClientSideSharedSettings, ISyncSharedSettings { + /** + * Mocked features map. For testing purposes only. For using this you should specify "localhost" as authorizationKey on core settings. + * @see {@link https://help.split.io/hc/en-us/articles/360020448791-JavaScript-SDK#localhost-mode} + */ + features?: SplitIO.MockedFeaturesMap; + /** + * SDK Startup settings. + */ + startup?: { + /** + * Maximum amount of time used before notify a timeout. + * + * @defaultValue `10` + */ + readyTimeout?: number; + /** + * Time to wait for a request before the SDK is ready. If this time expires, JS SDK will retry 'retriesOnFailureBeforeReady' times before notifying its failure to be 'ready'. + * + * @defaultValue `5` + */ + requestTimeoutBeforeReady?: number; + /** + * How many quick retries we will do while starting up the SDK. + * + * @defaultValue `1` + */ + retriesOnFailureBeforeReady?: number; + /** + * For SDK posts the queued events data in bulks with a given rate, but the first push window is defined separately, + * to better control on browsers or mobile. This number defines that window before the first events push. + * + * @defaultValue `10` + */ + eventsFirstPushWindow?: number; + }; + /** + * SDK scheduler settings. + */ + scheduler?: { + /** + * The SDK polls Split servers for changes to feature flag definitions. This parameter controls this polling period in seconds. + * + * @defaultValue `60` + */ + featuresRefreshRate?: number; + /** + * The SDK sends information on who got what treatment at what time back to Split servers to power analytics. This parameter controls how often this data is sent to Split servers. The parameter should be in seconds. + * + * @defaultValue `60` + */ + impressionsRefreshRate?: number; + /** + * The maximum number of impression items we want to queue. If we queue more values, it will trigger a flush and reset the timer. + * If you use a 0 here, the queue will have no maximum size. + * + * @defaultValue `30000` + */ + impressionsQueueSize?: number; + /** + * The SDK sends diagnostic metrics to Split servers. This parameters controls this metric flush period in seconds. + * + * @defaultValue `120` + * @deprecated This parameter is ignored now. Use `telemetryRefreshRate` instead. + */ + metricsRefreshRate?: number; + /** + * The SDK sends diagnostic metrics to Split servers. This parameters controls this metric flush period in seconds. + * + * @defaultValue `3600` + */ + telemetryRefreshRate?: number; + /** + * The SDK polls Split servers for changes to segment definitions. This parameter controls this polling period in seconds. + * + * @defaultValue `60` + */ + segmentsRefreshRate?: number; + /** + * The SDK posts the queued events data in bulks. This parameter controls the posting rate in seconds. + * + * @defaultValue `60` + */ + eventsPushRate?: number; + /** + * The maximum number of event items we want to queue. If we queue more values, it will trigger a flush and reset the timer. + * If you use a 0 here, the queue will have no maximum size. + * + * @defaultValue `500` + */ + eventsQueueSize?: number; + /** + * For mocking/testing only. The SDK will refresh the features mocked data when mode is set to "localhost" by defining the key. + * For more information see {@link https://help.split.io/hc/en-us/articles/360020448791-JavaScript-SDK#localhost-mode} + * + * @defaultValue `15` + */ + offlineRefreshRate?: number; + /** + * When using streaming mode, seconds to wait before re attempting to connect for push notifications. + * Next attempts follow intervals in power of two: base seconds, base x 2 seconds, base x 4 seconds, ... + * + * @defaultValue `1` + */ + pushRetryBackoffBase?: number; + }; +} + /****** Exposed namespace ******/ /** * Shared types and interfaces for `@splitsoftware` packages, to support integrating JavaScript SDKs with TypeScript. @@ -655,334 +984,6 @@ declare namespace SplitIO { interface ILogger { setLogLevel(logLevel: LogLevel): void; } - /** - * Common settings properties. - */ - interface ISharedSettings { - /** - * The impression listener, which is optional. Whatever you provide here needs to comply with the SplitIO.IImpressionListener interface, - * which will check for the logImpression method. - * - * @defaultValue `undefined` - */ - impressionListener?: IImpressionListener; - /** - * SDK synchronization settings. - */ - sync?: { - /** - * List of feature flag filters. These filters are used to fetch a subset of the feature flag definitions in your environment, in order to reduce the delay of the SDK to be ready. - * This configuration is only meaningful when the SDK is working in "standalone" mode. - * - * Example: - * ``` - * splitFilter: [ - * { type: 'byName', values: ['my_feature_flag_1', 'my_feature_flag_2'] }, // will fetch feature flags named 'my_feature_flag_1' and 'my_feature_flag_2' - * ] - * ``` - */ - splitFilters?: SplitFilter[]; - /** - * Impressions Collection Mode. Option to determine how impressions are going to be sent to Split servers. - * Possible values are 'DEBUG', 'OPTIMIZED', and 'NONE'. - * - DEBUG: will send all the impressions generated (recommended only for debugging purposes). - * - OPTIMIZED: will send unique impressions to Split servers, avoiding a considerable amount of traffic that duplicated impressions could generate. - * - NONE: will send unique keys evaluated per feature to Split servers instead of full blown impressions, avoiding a considerable amount of traffic that impressions could generate. - * - * @defaultValue `'OPTIMIZED'` - */ - impressionsMode?: ImpressionsMode; - /** - * Custom options object for HTTP(S) requests. - * If provided, this object is merged with the options object passed by the SDK for EventSource and Fetch calls. - * This configuration has no effect in "consumer" mode, as no HTTP(S) requests are made by the SDK. - */ - requestOptions?: { - /** - * Custom function called before each request, allowing you to add or update headers in SDK HTTP requests. - * Some headers, such as `SplitSDKVersion`, are required by the SDK and cannot be overridden. - * To pass multiple headers with the same name, combine their values into a single line, separated by commas. Example: `{ 'Authorization': 'value1, value2' }` - * Or provide keys with different cases since headers are case-insensitive. Example: `{ 'authorization': 'value1', 'Authorization': 'value2' }` - * - * NOTE: to pass custom headers to the streaming connection in Browser, you should polyfill the `window.EventSource` object with a library that supports headers, - * like https://www.npmjs.com/package/event-source-polyfill, since native EventSource does not support them and they will be ignored. - * - * @defaultValue `undefined` - * - * @param context - The context for the request, which contains the `headers` property object representing the current headers in the request. - * @returns An object representing a set of headers to be merged with the current headers. - * - * @example - * ``` - * const getHeaderOverrides = (context) => { - * return { - * 'Authorization': context.headers['Authorization'] + ', other-value', - * 'custom-header': 'custom-value' - * }; - * }; - * ``` - */ - getHeaderOverrides?: (context: { headers: Record }) => Record; - }; - }; - /** - * List of URLs that the SDK will use as base for it's synchronization functionalities, applicable only when running as standalone and partial consumer modes. - * Do not change these settings unless you're working an advanced use case, like connecting to the Split proxy. - */ - urls?: UrlSettings; - } - /** - * Common settings properties for SDKs with synchronous API (standalone and localhost modes). - */ - interface ISyncSharedSettings extends ISharedSettings { - /** - * The SDK mode. When using the default in-memory storage or `InLocalStorage` as storage, the only possible value is "standalone", which is the default. - * For "localhost" mode, use "localhost" as authorizationKey. - * - * @defaultValue `'standalone'` - */ - mode?: 'standalone'; - /** - * Boolean flag to enable the streaming service as default synchronization mechanism. In the event of any issue with streaming, - * the SDK would fallback to the polling mechanism. If false, the SDK would poll for changes as usual without attempting to use streaming. - * - * @defaultValue `true` - */ - streamingEnabled?: boolean; - /** - * SDK synchronization settings. - */ - sync?: ISharedSettings['sync'] & { - /** - * Controls the SDK continuous synchronization flags. - * - * When `true` a running SDK will process rollout plan updates performed on the UI (default). - * When false it'll just fetch all data upon init. - * - * @defaultValue `true` - */ - enabled?: boolean; - }; - } - /** - * Common settings properties for SDKs with pluggable configuration. - */ - interface IPluggableSettings { - /** - * Boolean value to indicate whether the logger should be enabled or disabled by default, or a log level string or a Logger object. - * Passing a logger object is required to get descriptive log messages. Otherwise most logs will print with message codes. - * @see {@link https://help.split.io/hc/en-us/articles/360058730852-Browser-SDK#logging}. - * - * Examples: - * ``` - * config.debug = true - * config.debug = 'WARN' - * config.debug = ErrorLogger() - * ``` - * - * @defaultValue `false` - */ - debug?: boolean | LogLevel | ILogger; - /** - * Defines an optional list of factory functions used to instantiate SDK integrations. - * - * NOTE: at the moment there are not integrations to plug in. - */ - integrations?: IntegrationFactory[]; - } - /** - * Common settings properties for SDKs without pluggable configuration. - */ - interface INonPluggableSettings { - /** - * Boolean value to indicate whether the logger should be enabled or disabled, or a log level string. - * - * Examples: - * ``` - * config.debug = true - * config.debug = 'WARN' - * ``` - * - * @defaultValue `false` - */ - debug?: boolean | LogLevel; - } - /** - * Common settings properties for server-side SDKs. - */ - interface IServerSideSharedSettings { - /** - * SDK Core settings for NodeJS. - */ - core: { - /** - * Your SDK key. - * - * @see {@link https://help.split.io/hc/en-us/articles/360019916211-API-keys} - */ - authorizationKey: string; - /** - * Disable labels from being sent to Split backend. Labels may contain sensitive information. - * - * @defaultValue `true` - */ - labelsEnabled?: boolean; - /** - * Disable machine IP and Name from being sent to Split backend. - * - * @defaultValue `true` - */ - IPAddressesEnabled?: boolean; - }; - } - /** - * Common settings properties for client-side SDKs. - */ - interface IClientSideSharedSettings { - /** - * SDK Core settings for client-side. - */ - core: { - /** - * Your SDK key. - * - * @see {@link https://help.split.io/hc/en-us/articles/360019916211-API-keys} - */ - authorizationKey: string; - /** - * Customer identifier. Whatever this means to you. - * - * @see {@link https://help.split.io/hc/en-us/articles/360019916311-Traffic-type} - */ - key: SplitKey; - /** - * Disable labels from being sent to Split backend. Labels may contain sensitive information. - * - * @defaultValue `true` - */ - labelsEnabled?: boolean; - }; - /** - * User consent status. Possible values are `'GRANTED'`, which is the default, `'DECLINED'` or `'UNKNOWN'`. - * - `'GRANTED'`: the user grants consent for tracking events and impressions. The SDK sends them to Split cloud. - * - `'DECLINED'`: the user declines consent for tracking events and impressions. The SDK does not send them to Split cloud. - * - `'UNKNOWN'`: the user neither grants nor declines consent for tracking events and impressions. The SDK tracks them in its internal storage, and eventually either sends - * them or not if the consent status is updated to 'GRANTED' or 'DECLINED' respectively. The status can be updated at any time with the `UserConsent.setStatus` factory method. - * - * @defaultValue `'GRANTED'` - */ - userConsent?: ConsentStatus; - } - /** - * Common settings properties for client-side standalone SDKs. - */ - interface IClientSideSyncSharedSettings extends IClientSideSharedSettings, ISyncSharedSettings { - /** - * Mocked features map. For testing purposes only. For using this you should specify "localhost" as authorizationKey on core settings. - * @see {@link https://help.split.io/hc/en-us/articles/360020448791-JavaScript-SDK#localhost-mode} - */ - features?: MockedFeaturesMap; - /** - * SDK Startup settings. - */ - startup?: { - /** - * Maximum amount of time used before notify a timeout. - * - * @defaultValue `10` - */ - readyTimeout?: number; - /** - * Time to wait for a request before the SDK is ready. If this time expires, JS SDK will retry 'retriesOnFailureBeforeReady' times before notifying its failure to be 'ready'. - * - * @defaultValue `5` - */ - requestTimeoutBeforeReady?: number; - /** - * How many quick retries we will do while starting up the SDK. - * - * @defaultValue `1` - */ - retriesOnFailureBeforeReady?: number; - /** - * For SDK posts the queued events data in bulks with a given rate, but the first push window is defined separately, - * to better control on browsers or mobile. This number defines that window before the first events push. - * - * @defaultValue `10` - */ - eventsFirstPushWindow?: number; - }; - /** - * SDK scheduler settings. - */ - scheduler?: { - /** - * The SDK polls Split servers for changes to feature flag definitions. This parameter controls this polling period in seconds. - * - * @defaultValue `60` - */ - featuresRefreshRate?: number; - /** - * The SDK sends information on who got what treatment at what time back to Split servers to power analytics. This parameter controls how often this data is sent to Split servers. The parameter should be in seconds. - * - * @defaultValue `60` - */ - impressionsRefreshRate?: number; - /** - * The maximum number of impression items we want to queue. If we queue more values, it will trigger a flush and reset the timer. - * If you use a 0 here, the queue will have no maximum size. - * - * @defaultValue `30000` - */ - impressionsQueueSize?: number; - /** - * The SDK sends diagnostic metrics to Split servers. This parameters controls this metric flush period in seconds. - * - * @defaultValue `120` - * @deprecated This parameter is ignored now. Use `telemetryRefreshRate` instead. - */ - metricsRefreshRate?: number; - /** - * The SDK sends diagnostic metrics to Split servers. This parameters controls this metric flush period in seconds. - * - * @defaultValue `3600` - */ - telemetryRefreshRate?: number; - /** - * The SDK polls Split servers for changes to segment definitions. This parameter controls this polling period in seconds. - * - * @defaultValue `60` - */ - segmentsRefreshRate?: number; - /** - * The SDK posts the queued events data in bulks. This parameter controls the posting rate in seconds. - * - * @defaultValue `60` - */ - eventsPushRate?: number; - /** - * The maximum number of event items we want to queue. If we queue more values, it will trigger a flush and reset the timer. - * If you use a 0 here, the queue will have no maximum size. - * - * @defaultValue `500` - */ - eventsQueueSize?: number; - /** - * For mocking/testing only. The SDK will refresh the features mocked data when mode is set to "localhost" by defining the key. - * For more information see {@link https://help.split.io/hc/en-us/articles/360020448791-JavaScript-SDK#localhost-mode} - * - * @defaultValue `15` - */ - offlineRefreshRate?: number; - /** - * When using streaming mode, seconds to wait before re attempting to connect for push notifications. - * Next attempts follow intervals in power of two: base seconds, base x 2 seconds, base x 4 seconds, ... - * - * @defaultValue `1` - */ - pushRetryBackoffBase?: number; - }; - } /** * Settings interface for Browser SDK instances created with client-side API and synchronous storage (e.g., in-memory or local storage). * From 1e1d82768389ad5689acedf6b27e49415c8c6445 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Thu, 31 Oct 2024 00:32:09 -0300 Subject: [PATCH 140/146] Revert specialization of the INodeAsyncSettings interface --- types/splitio.d.ts | 225 +++++++++++++++++++++------------------------ 1 file changed, 107 insertions(+), 118 deletions(-) diff --git a/types/splitio.d.ts b/types/splitio.d.ts index 22d5a6e8..1cdb5f2a 100644 --- a/types/splitio.d.ts +++ b/types/splitio.d.ts @@ -186,6 +186,113 @@ interface IServerSideSharedSettings { */ IPAddressesEnabled?: boolean; }; + /** + * SDK Startup settings for NodeJS. + */ + startup?: { + /** + * Maximum amount of time used before notify a timeout. + * + * @defaultValue `15` + */ + readyTimeout?: number; + /** + * Time to wait for a request before the SDK is ready. If this time expires, JS SDK will retry 'retriesOnFailureBeforeReady' times before notifying its failure to be 'ready'. + * + * @defaultValue `15` + */ + requestTimeoutBeforeReady?: number; + /** + * How many quick retries we will do while starting up the SDK. + * + * @defaultValue `1` + */ + retriesOnFailureBeforeReady?: number; + /** + * For SDK posts the queued events data in bulks with a given rate, but the first push window is defined separately, + * to better control on browsers. This number defines that window before the first events push. + * + * @defaultValue `0` + */ + eventsFirstPushWindow?: number; + }; + /** + * SDK scheduler settings. + */ + scheduler?: { + /** + * The SDK polls Split servers for changes to feature flag definitions. This parameter controls this polling period in seconds. + * + * @defaultValue `60` + */ + featuresRefreshRate?: number; + /** + * The SDK sends information on who got what treatment at what time back to Split servers to power analytics. This parameter controls how often this data is sent to Split servers. The parameter should be in seconds. + * + * @defaultValue `300` + */ + impressionsRefreshRate?: number; + /** + * The maximum number of impression items we want to queue. If we queue more values, it will trigger a flush and reset the timer. + * If you use a 0 here, the queue will have no maximum size. + * + * @defaultValue `30000` + */ + impressionsQueueSize?: number; + /** + * The SDK sends diagnostic metrics to Split servers. This parameters controls this metric flush period in seconds. + * + * @defaultValue `120` + * @deprecated This parameter is ignored now. Use `telemetryRefreshRate` instead. + */ + metricsRefreshRate?: number; + /** + * The SDK sends diagnostic metrics to Split servers. This parameters controls this metric flush period in seconds. + * + * @defaultValue `3600` + */ + telemetryRefreshRate?: number; + /** + * The SDK polls Split servers for changes to segment definitions. This parameter controls this polling period in seconds. + * + * @defaultValue `60` + */ + segmentsRefreshRate?: number; + /** + * The SDK posts the queued events data in bulks. This parameter controls the posting rate in seconds. + * + * @defaultValue `60` + */ + eventsPushRate?: number; + /** + * The maximum number of event items we want to queue. If we queue more values, it will trigger a flush and reset the timer. + * If you use a 0 here, the queue will have no maximum size. + * + * @defaultValue `500` + */ + eventsQueueSize?: number; + /** + * For mocking/testing only. The SDK will refresh the features mocked data when mode is set to "localhost" by defining the key. + * For more information see {@link https://help.split.io/hc/en-us/articles/360020564931-Node-js-SDK#localhost-mode} + * + * @defaultValue `15` + */ + offlineRefreshRate?: number; + /** + * When using streaming mode, seconds to wait before re attempting to connect for push notifications. + * Next attempts follow intervals in power of two: base seconds, base x 2 seconds, base x 4 seconds, ... + * + * @defaultValue `1` + */ + pushRetryBackoffBase?: number; + }; + /** + * Mocked features file path. For testing purposes only. For using this you should specify "localhost" as authorizationKey on core settings. + * @see {@link https://help.split.io/hc/en-us/articles/360020564931-Node-js-SDK#localhost-mode} + * + * @defaultValue `'$HOME/.split'` + */ + features?: SplitIO.MockedFeaturesFilePath; } /** * Common settings properties for client-side SDKs. @@ -1136,113 +1243,6 @@ declare namespace SplitIO { * @see {@link https://help.split.io/hc/en-us/articles/360020564931-Node-js-SDK#configuration} */ interface INodeSettings extends IServerSideSharedSettings, ISyncSharedSettings, INonPluggableSettings { - /** - * SDK Startup settings for NodeJS. - */ - startup?: { - /** - * Maximum amount of time used before notify a timeout. - * - * @defaultValue `15` - */ - readyTimeout?: number; - /** - * Time to wait for a request before the SDK is ready. If this time expires, JS SDK will retry 'retriesOnFailureBeforeReady' times before notifying its failure to be 'ready'. - * - * @defaultValue `15` - */ - requestTimeoutBeforeReady?: number; - /** - * How many quick retries we will do while starting up the SDK. - * - * @defaultValue `1` - */ - retriesOnFailureBeforeReady?: number; - /** - * For SDK posts the queued events data in bulks with a given rate, but the first push window is defined separately, - * to better control on browsers. This number defines that window before the first events push. - * - * @defaultValue `0` - */ - eventsFirstPushWindow?: number; - }; - /** - * SDK scheduler settings. - */ - scheduler?: { - /** - * The SDK polls Split servers for changes to feature flag definitions. This parameter controls this polling period in seconds. - * - * @defaultValue `60` - */ - featuresRefreshRate?: number; - /** - * The SDK sends information on who got what treatment at what time back to Split servers to power analytics. This parameter controls how often this data is sent to Split servers. The parameter should be in seconds. - * - * @defaultValue `300` - */ - impressionsRefreshRate?: number; - /** - * The maximum number of impression items we want to queue. If we queue more values, it will trigger a flush and reset the timer. - * If you use a 0 here, the queue will have no maximum size. - * - * @defaultValue `30000` - */ - impressionsQueueSize?: number; - /** - * The SDK sends diagnostic metrics to Split servers. This parameters controls this metric flush period in seconds. - * - * @defaultValue `120` - * @deprecated This parameter is ignored now. Use `telemetryRefreshRate` instead. - */ - metricsRefreshRate?: number; - /** - * The SDK sends diagnostic metrics to Split servers. This parameters controls this metric flush period in seconds. - * - * @defaultValue `3600` - */ - telemetryRefreshRate?: number; - /** - * The SDK polls Split servers for changes to segment definitions. This parameter controls this polling period in seconds. - * - * @defaultValue `60` - */ - segmentsRefreshRate?: number; - /** - * The SDK posts the queued events data in bulks. This parameter controls the posting rate in seconds. - * - * @defaultValue `60` - */ - eventsPushRate?: number; - /** - * The maximum number of event items we want to queue. If we queue more values, it will trigger a flush and reset the timer. - * If you use a 0 here, the queue will have no maximum size. - * - * @defaultValue `500` - */ - eventsQueueSize?: number; - /** - * For mocking/testing only. The SDK will refresh the features mocked data when mode is set to "localhost" by defining the key. - * For more information see {@link https://help.split.io/hc/en-us/articles/360020564931-Node-js-SDK#localhost-mode} - * - * @defaultValue `15` - */ - offlineRefreshRate?: number; - /** - * When using streaming mode, seconds to wait before re attempting to connect for push notifications. - * Next attempts follow intervals in power of two: base seconds, base x 2 seconds, base x 4 seconds, ... - * - * @defaultValue `1` - */ - pushRetryBackoffBase?: number; - }; - /** - * Mocked features file path. For testing purposes only. For using this you should specify "localhost" as authorizationKey on core settings. - * @see {@link https://help.split.io/hc/en-us/articles/360020564931-Node-js-SDK#localhost-mode} - * - * @defaultValue `'$HOME/.split'` - */ - features?: MockedFeaturesFilePath; /** * Defines which kind of storage we can instantiate on NodeJS for 'standalone' mode. * The only possible storage type is 'MEMORY', which is the default. @@ -1331,17 +1331,6 @@ declare namespace SplitIO { * @see {@link https://help.split.io/hc/en-us/articles/360020564931-Node-js-SDK#state-sharing-redis-integration} */ mode: 'consumer'; - /** - * SDK Startup settings for NodeJS. - */ - startup?: { - /** - * Maximum amount of time used before notify a timeout. - * - * @defaultValue `15` - */ - readyTimeout?: number; - }; /** * Defines which kind of async storage we can instantiate on NodeJS for 'consumer' mode. * The only possible storage type is 'REDIS'. From ecae2b22659f41b120a81abffde597f518c7b053 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Thu, 31 Oct 2024 00:56:18 -0300 Subject: [PATCH 141/146] rc --- package-lock.json | 4 ++-- package.json | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/package-lock.json b/package-lock.json index 5c66a6b6..5f72f59f 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "@splitsoftware/splitio-commons", - "version": "2.0.0-rc.5", + "version": "2.0.0-rc.6", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "@splitsoftware/splitio-commons", - "version": "2.0.0-rc.5", + "version": "2.0.0-rc.6", "license": "Apache-2.0", "dependencies": { "@types/ioredis": "^4.28.0", diff --git a/package.json b/package.json index 80479f41..1ab3048e 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@splitsoftware/splitio-commons", - "version": "2.0.0-rc.5", + "version": "2.0.0-rc.6", "description": "Split JavaScript SDK common components", "main": "cjs/index.js", "module": "esm/index.js", From 549ee4fb3b80d04b65e1a504c2cbf49ffdb6fc27 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Thu, 31 Oct 2024 12:37:39 -0300 Subject: [PATCH 142/146] Polishing --- types/splitio.d.ts | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/types/splitio.d.ts b/types/splitio.d.ts index 1cdb5f2a..a87dfc9b 100644 --- a/types/splitio.d.ts +++ b/types/splitio.d.ts @@ -84,7 +84,7 @@ interface ISharedSettings { urls?: SplitIO.UrlSettings; } /** - * Common settings properties for SDKs with synchronous API (standalone and localhost modes). + * Common settings properties for SDKs with synchronous API (standalone and localhost modes). */ interface ISyncSharedSettings extends ISharedSettings { /** @@ -117,9 +117,9 @@ interface ISyncSharedSettings extends ISharedSettings { }; } /** - * Common settings properties for SDKs with pluggable configuration. + * Common settings properties for SDKs with pluggable options. */ -interface IPluggableSettings { +interface IPluggableSharedSettings { /** * Boolean value to indicate whether the logger should be enabled or disabled by default, or a log level string or a Logger object. * Passing a logger object is required to get descriptive log messages. Otherwise most logs will print with message codes. @@ -143,9 +143,9 @@ interface IPluggableSettings { integrations?: SplitIO.IntegrationFactory[]; } /** - * Common settings properties for SDKs without pluggable configuration. + * Common settings properties for SDKs without pluggable options. */ -interface INonPluggableSettings { +interface INonPluggableSharedSettings { /** * Boolean value to indicate whether the logger should be enabled or disabled, or a log level string. * @@ -160,7 +160,7 @@ interface INonPluggableSettings { debug?: boolean | SplitIO.LogLevel; } /** - * Common settings properties for server-side SDKs. + * Common settings properties for SDKs with server-side API. */ interface IServerSideSharedSettings { /** @@ -295,7 +295,7 @@ interface IServerSideSharedSettings { features?: SplitIO.MockedFeaturesFilePath; } /** - * Common settings properties for client-side SDKs. + * Common settings properties for SDKs with client-side API. */ interface IClientSideSharedSettings { /** @@ -333,7 +333,7 @@ interface IClientSideSharedSettings { userConsent?: SplitIO.ConsentStatus; } /** - * Common settings properties for client-side standalone SDKs. + * Common settings properties for SDKs with client-side and synchronous API (standalone and localhost modes). */ interface IClientSideSyncSharedSettings extends IClientSideSharedSettings, ISyncSharedSettings { /** @@ -1096,7 +1096,7 @@ declare namespace SplitIO { * * @see {@link https://help.split.io/hc/en-us/articles/360058730852-Browser-SDK#configuration} */ - interface IClientSideSettings extends IClientSideSyncSharedSettings, IPluggableSettings { + interface IClientSideSettings extends IClientSideSyncSharedSettings, IPluggableSharedSettings { /** * Defines the factory function to instantiate the storage. If not provided, the default in-memory storage is used. * @@ -1124,7 +1124,7 @@ declare namespace SplitIO { * * @see {@link https://help.split.io/hc/en-us/articles/360058730852-Browser-SDK#sharing-state-with-a-pluggable-storage} */ - interface IClientSideAsyncSettings extends IClientSideSharedSettings, ISharedSettings, IPluggableSettings { + interface IClientSideAsyncSettings extends IClientSideSharedSettings, ISharedSettings, IPluggableSharedSettings { /** * The SDK mode. When using `PluggableStorage` as storage, the possible values are "consumer" and "consumer_partial". * @@ -1216,7 +1216,7 @@ declare namespace SplitIO { * * @see {@link https://help.split.io/hc/en-us/articles/360020448791-JavaScript-SDK#configuration} */ - interface IBrowserSettings extends IClientSideSyncSharedSettings, INonPluggableSettings { + interface IBrowserSettings extends IClientSideSyncSharedSettings, INonPluggableSharedSettings { /** * Defines which kind of storage we can instantiate on the browser. * Possible storage types are 'MEMORY', which is the default, and 'LOCALSTORAGE'. @@ -1242,7 +1242,7 @@ declare namespace SplitIO { * * @see {@link https://help.split.io/hc/en-us/articles/360020564931-Node-js-SDK#configuration} */ - interface INodeSettings extends IServerSideSharedSettings, ISyncSharedSettings, INonPluggableSettings { + interface INodeSettings extends IServerSideSharedSettings, ISyncSharedSettings, INonPluggableSharedSettings { /** * Defines which kind of storage we can instantiate on NodeJS for 'standalone' mode. * The only possible storage type is 'MEMORY', which is the default. @@ -1324,7 +1324,7 @@ declare namespace SplitIO { * * @see {@link https://help.split.io/hc/en-us/articles/360020564931-Node-js-SDK#configuration} */ - interface INodeAsyncSettings extends IServerSideSharedSettings, ISharedSettings, INonPluggableSettings { + interface INodeAsyncSettings extends IServerSideSharedSettings, ISharedSettings, INonPluggableSharedSettings { /** * The SDK mode. When using 'REDIS' storage type, the only possible value is "consumer", which is required. * From 28c4b1b043fc858efd2992318ea665b846a13e71 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Fri, 1 Nov 2024 17:50:32 -0300 Subject: [PATCH 143/146] Prepare stable version --- CHANGES.txt | 4 ++-- package-lock.json | 4 ++-- package.json | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/CHANGES.txt b/CHANGES.txt index 63b7cbfc..a0e237e0 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -1,7 +1,7 @@ -2.0.0 (October XX, 2024) +2.0.0 (November 1, 2024) - Added support for targeting rules based on large segments. - Added `factory.destroy()` method, which invokes the `destroy` method on all SDK clients created by the factory. - - Added `SplitIO` namespace with the public TypeScript definitions to be reused by the SDKs, rather than having each SDK define its own types. + - Added `SplitIO` namespace with the public TypeScript definitions to be reused by the SDKs. - Updated the handling of timers and async operations inside an `init` factory method to enable lazy initialization of the SDK in standalone mode. This update is intended for the React SDK. - Bugfixing - Fixed an issue with the server-side polling manager that caused dangling timers when the SDK was destroyed before it was ready. - BREAKING CHANGES: diff --git a/package-lock.json b/package-lock.json index 5f72f59f..40fecca7 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "@splitsoftware/splitio-commons", - "version": "2.0.0-rc.6", + "version": "2.0.0", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "@splitsoftware/splitio-commons", - "version": "2.0.0-rc.6", + "version": "2.0.0", "license": "Apache-2.0", "dependencies": { "@types/ioredis": "^4.28.0", diff --git a/package.json b/package.json index 1ab3048e..608c3982 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@splitsoftware/splitio-commons", - "version": "2.0.0-rc.6", + "version": "2.0.0", "description": "Split JavaScript SDK common components", "main": "cjs/index.js", "module": "esm/index.js", From 62741e8b4eb3df3b5952f5fd30390bd1142f1b19 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Fri, 1 Nov 2024 18:31:17 -0300 Subject: [PATCH 144/146] Polishing --- CHANGES.txt | 2 +- src/__tests__/testUtils/fetchMock.ts | 3 +-- src/readiness/readinessManager.ts | 8 ++++---- src/readiness/types.ts | 2 +- temp.ts | 25 ------------------------- 5 files changed, 7 insertions(+), 33 deletions(-) delete mode 100644 temp.ts diff --git a/CHANGES.txt b/CHANGES.txt index a0e237e0..b615364a 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -5,7 +5,7 @@ - Updated the handling of timers and async operations inside an `init` factory method to enable lazy initialization of the SDK in standalone mode. This update is intended for the React SDK. - Bugfixing - Fixed an issue with the server-side polling manager that caused dangling timers when the SDK was destroyed before it was ready. - BREAKING CHANGES: - - Updated default flag spec version to 1.2. + - Updated default flag spec version to 1.2, which requires Split Proxy v5.9.0 or higher. - Removed `/mySegments` endpoint from SplitAPI module, as it is replaced by `/memberships` endpoint. - Removed support for MY_SEGMENTS_UPDATE and MY_SEGMENTS_UPDATE_V2 notification types, as they are replaced by MEMBERSHIPS_MS_UPDATE and MEMBERSHIPS_LS_UPDATE notification types. - Removed the deprecated `GOOGLE_ANALYTICS_TO_SPLIT` and `SPLIT_TO_GOOGLE_ANALYTICS` integrations. diff --git a/src/__tests__/testUtils/fetchMock.ts b/src/__tests__/testUtils/fetchMock.ts index 8b86df27..780aa231 100644 --- a/src/__tests__/testUtils/fetchMock.ts +++ b/src/__tests__/testUtils/fetchMock.ts @@ -1,5 +1,4 @@ -// @TODO upgrade fetch-mock when fetch-mock-jest vulnerabilities are fixed -// https://www.wheresrhys.co.uk/fetch-mock/docs/fetch-mock/Usage/cheatsheet#local-fetch-with-jest +// @TODO upgrade fetch-mock to fix vulnerabilities import fetchMockLib from 'fetch-mock'; const fetchMock = fetchMockLib.sandbox(); diff --git a/src/readiness/readinessManager.ts b/src/readiness/readinessManager.ts index 1581014c..e8f92dce 100644 --- a/src/readiness/readinessManager.ts +++ b/src/readiness/readinessManager.ts @@ -8,7 +8,7 @@ function splitsEventEmitterFactory(EventEmitter: new () => SplitIO.IEventEmitter const splitsEventEmitter = objectAssign(new EventEmitter(), { splitsArrived: false, splitsCacheLoaded: false, - initialized: false, + hasInit: false, initCallbacks: [] }); @@ -68,7 +68,7 @@ export function readinessManagerFactory( let readyTimeoutId: ReturnType; if (readyTimeout > 0) { - if (splits.initialized) readyTimeoutId = setTimeout(timeout, readyTimeout); + if (splits.hasInit) readyTimeoutId = setTimeout(timeout, readyTimeout); else splits.initCallbacks.push(() => { readyTimeoutId = setTimeout(timeout, readyTimeout); }); } @@ -137,8 +137,8 @@ export function readinessManagerFactory( setDestroyed() { isDestroyed = true; }, init() { - if (splits.initialized) return; - splits.initialized = true; + if (splits.hasInit) return; + splits.hasInit = true; splits.initCallbacks.forEach(cb => cb()); }, diff --git a/src/readiness/types.ts b/src/readiness/types.ts index 03886c24..df3c2603 100644 --- a/src/readiness/types.ts +++ b/src/readiness/types.ts @@ -13,7 +13,7 @@ export interface ISplitsEventEmitter extends SplitIO.IEventEmitter { once(event: ISplitsEvent, listener: (...args: any[]) => void): this; splitsArrived: boolean splitsCacheLoaded: boolean - initialized: boolean, + hasInit: boolean, initCallbacks: (() => void)[] } diff --git a/temp.ts b/temp.ts deleted file mode 100644 index 859fa873..00000000 --- a/temp.ts +++ /dev/null @@ -1,25 +0,0 @@ - /** - * Custom function called before each request, allowing you to add or update headers in SDK HTTP requests. - * Some headers, such as `SplitSDKVersion`, are required by the SDK and cannot be overridden. - * To pass multiple headers with the same name, combine their values into a single line, separated by commas. Example: `{ 'Authorization': 'value1, value2' }` - * Or provide keys with different case since headers are case-insensitive. Example: `{ 'authorization': 'value1', 'Authorization': 'value2' }` - * - * NOTE: to pass custom headers to the streaming connection in Browser, you should polyfill the `window.EventSource` object with a library that supports headers, - * like https://www.npmjs.com/package/event-source-polyfill, since native EventSource does not support them and will be ignored. - * - * @property getHeaderOverrides - * @default undefined - * - * @param context - The context for the request. - * @param context.headers - The current headers in the request. - * @returns A set of headers to be merged with the current headers. - * - * @example - * const getHeaderOverrides = (context) => { - * return { - * 'Authorization': context.headers['Authorization'] + ', other-value', - * 'custom-header': 'custom-value' - * }; - * }; - */ - getHeaderOverrides?: (context: { headers: Record }) => Record \ No newline at end of file From 55228089f108537db4b2d902ac73a3ec7e9608c6 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Fri, 1 Nov 2024 21:20:52 -0300 Subject: [PATCH 145/146] Re-trigger CI From dbddb86b418aa9099d1565f1743c35dd1a65f7f3 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Fri, 1 Nov 2024 21:28:12 -0300 Subject: [PATCH 146/146] Re-trigger CI --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 4ecfe2ab..181fa4af 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -10,7 +10,7 @@ on: jobs: build: - name: CI + name: Build runs-on: ubuntu-latest services: redis: