diff --git a/.eslintrc b/.eslintrc index 365fc121..be7b42d7 100644 --- a/.eslintrc +++ b/.eslintrc @@ -5,6 +5,7 @@ "parser": "@typescript-eslint/parser", "plugins": [ "@typescript-eslint", + "eslint-plugin-tsdoc", "import" ], @@ -80,7 +81,7 @@ "message": "Don't declare const enum, because it is not supported by Babel used for building RN SDK" } ], - "compat/compat": ["error", "defaults, ie 10, node 6"], + "compat/compat": ["error", "defaults, node >=14"], "no-throw-literal": "error", "import/no-default-export": "error", "import/no-self-import": "error" @@ -90,6 +91,19 @@ "sourceType": "module" } }, + { + "files": ["types/**"], + "rules": { + "no-use-before-define": "off" + } + }, + { + // Enable TSDoc rules for TypeScript files, allowing the use of JSDoc in JS files. + "files": ["**/*.ts"], + "rules": { + "tsdoc/syntax": "warn" + } + }, // @TODO remove when moving InLocalStorage to js-browser { "files": ["src/storages/inLocalStorage/**/*.ts"], diff --git a/.gitignore b/.gitignore index b09085de..34d8005c 100644 --- a/.gitignore +++ b/.gitignore @@ -9,7 +9,6 @@ ## transpiled code /esm /cjs -/types ## coverage info /coverage diff --git a/CHANGES.txt b/CHANGES.txt index 5fff047b..b615364a 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -1,3 +1,19 @@ +2.0.0 (November 1, 2024) + - Added support for targeting rules based on large segments. + - Added `factory.destroy()` method, which invokes the `destroy` method on all SDK clients created by the factory. + - Added `SplitIO` namespace with the public TypeScript definitions to be reused by the SDKs. + - Updated the handling of timers and async operations inside an `init` factory method to enable lazy initialization of the SDK in standalone mode. This update is intended for the React SDK. + - Bugfixing - Fixed an issue with the server-side polling manager that caused dangling timers when the SDK was destroyed before it was ready. + - BREAKING CHANGES: + - Updated default flag spec version to 1.2, which requires Split Proxy v5.9.0 or higher. + - Removed `/mySegments` endpoint from SplitAPI module, as it is replaced by `/memberships` endpoint. + - Removed support for MY_SEGMENTS_UPDATE and MY_SEGMENTS_UPDATE_V2 notification types, as they are replaced by MEMBERSHIPS_MS_UPDATE and MEMBERSHIPS_LS_UPDATE notification types. + - Removed the deprecated `GOOGLE_ANALYTICS_TO_SPLIT` and `SPLIT_TO_GOOGLE_ANALYTICS` integrations. + - Removed the migration logic for the old format of MySegments keys in LocalStorage introduced in JavaScript SDK v10.17.3. + - Removed the `sdkClientMethodCSWithTT` function, which handled the logic to bound an optional traffic type to SDK clients. Client-side SDK implementations must use `sdkClientMethodCS` module, which, unlike the previous function, does not allow passing a traffic type but simplifies the SDK API. + - Removed internal ponyfills for `Map` and `Set` global objects, dropping support for IE and other outdated browsers. The SDK now requires the runtime environment to support these features natively or to provide a polyfill. + - Removed the `sync.localhostMode` configuration option to plug the LocalhostMode module. + 1.17.0 (September 6, 2024) - Added `sync.requestOptions.getHeaderOverrides` configuration option to enhance SDK HTTP request Headers for Authorization Frameworks. - Added `isTimedout` and `lastUpdate` properties to IStatusInterface to keep track of the timestamp of the last SDK event, used on React and Redux SDKs. diff --git a/package-lock.json b/package-lock.json index af57cfa8..40fecca7 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,14 +1,15 @@ { "name": "@splitsoftware/splitio-commons", - "version": "1.17.0", + "version": "2.0.0", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "@splitsoftware/splitio-commons", - "version": "1.17.0", + "version": "2.0.0", "license": "Apache-2.0", "dependencies": { + "@types/ioredis": "^4.28.0", "tslib": "^2.3.1" }, "devDependencies": { @@ -20,8 +21,9 @@ "@typescript-eslint/parser": "^6.6.0", "cross-env": "^7.0.2", "eslint": "^8.48.0", - "eslint-plugin-compat": "^4.2.0", + "eslint-plugin-compat": "^6.0.1", "eslint-plugin-import": "^2.25.3", + "eslint-plugin-tsdoc": "^0.3.0", "fetch-mock": "^9.11.0", "ioredis": "^4.28.0", "jest": "^27.2.3", @@ -493,12 +495,12 @@ } }, "node_modules/@babel/runtime": { - "version": "7.17.2", - "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.17.2.tgz", - "integrity": "sha512-hzeyJyMA1YGdJTuWU0e/j4wKXrU4OMFvY2MSlaI9B7VQb0r5cxTE3EAIS2Q7Tn2RIcDkRvTA/v2JsAEhxe99uw==", + "version": "7.25.6", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.25.6.tgz", + "integrity": "sha512-VBj9MYyDb9tuLq7yzqjgzt6Q+IBQLrGZfdjOekyEirZPHxXWoTSGUTMrpsfi58Up73d13NfYLv8HT9vmznjzhQ==", "dev": true, "dependencies": { - "regenerator-runtime": "^0.13.4" + "regenerator-runtime": "^0.14.0" }, "engines": { "node": ">=6.9.0" @@ -1363,9 +1365,49 @@ "dev": true }, "node_modules/@mdn/browser-compat-data": { - "version": "5.3.14", - "resolved": "https://registry.npmjs.org/@mdn/browser-compat-data/-/browser-compat-data-5.3.14.tgz", - "integrity": "sha512-Y9XQrphVcE6u9xMm+gIqN86opbU/5s2W1pdPyKRyFV5B7+2jWM2gLI5JpfhZncaoDKvhy6FYwK04aCz5UM/bTQ==", + "version": "5.6.4", + "resolved": "https://registry.npmjs.org/@mdn/browser-compat-data/-/browser-compat-data-5.6.4.tgz", + "integrity": "sha512-bOOF4GGzn0exmvNHpSWmTfOXB9beTpIFCm2KPY2UVoCdn1YVfr8heuHr1C++BYI9Tun8REgi5TNVdKbBs249CA==", + "dev": true + }, + "node_modules/@microsoft/tsdoc": { + "version": "0.15.0", + "resolved": "https://registry.npmjs.org/@microsoft/tsdoc/-/tsdoc-0.15.0.tgz", + "integrity": "sha512-HZpPoABogPvjeJOdzCOSJsXeL/SMCBgBZMVC3X3d7YYp2gf31MfxhUoYUNwf1ERPJOnQc0wkFn9trqI6ZEdZuA==", + "dev": true + }, + "node_modules/@microsoft/tsdoc-config": { + "version": "0.17.0", + "resolved": "https://registry.npmjs.org/@microsoft/tsdoc-config/-/tsdoc-config-0.17.0.tgz", + "integrity": "sha512-v/EYRXnCAIHxOHW+Plb6OWuUoMotxTN0GLatnpOb1xq0KuTNw/WI3pamJx/UbsoJP5k9MCw1QxvvhPcF9pH3Zg==", + "dev": true, + "dependencies": { + "@microsoft/tsdoc": "0.15.0", + "ajv": "~8.12.0", + "jju": "~1.4.0", + "resolve": "~1.22.2" + } + }, + "node_modules/@microsoft/tsdoc-config/node_modules/ajv": { + "version": "8.12.0", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.12.0.tgz", + "integrity": "sha512-sRu1kpcO9yLtYxBKvqfTeh9KzZEwO3STyX1HT+4CaDzC6HpTGYhIhPIzj9XuKU7KYDwnaeh5hcOwjy1QuJzBPA==", + "dev": true, + "dependencies": { + "fast-deep-equal": "^3.1.1", + "json-schema-traverse": "^1.0.0", + "require-from-string": "^2.0.2", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/@microsoft/tsdoc-config/node_modules/json-schema-traverse": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", + "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", "dev": true }, "node_modules/@nodelib/fs.scandir": { @@ -2228,9 +2270,9 @@ "dev": true }, "node_modules/browserslist": { - "version": "4.21.10", - "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.21.10.tgz", - "integrity": "sha512-bipEBdZfVH5/pwrvqc+Ub0kUPVfGUhlKxbvfD+z1BDnPEO/X98ruXGA1WP5ASpAFKan7Qr6j736IacbZQuAlKQ==", + "version": "4.24.0", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.24.0.tgz", + "integrity": "sha512-Rmb62sR1Zpjql25eSanFGEhAxcFwfA1K0GuQcLoaJBAcENegrQut3hYdhXFF1obQfiDyqIW/cLM5HSJ/9k884A==", "dev": true, "funding": [ { @@ -2247,10 +2289,10 @@ } ], "dependencies": { - "caniuse-lite": "^1.0.30001517", - "electron-to-chromium": "^1.4.477", - "node-releases": "^2.0.13", - "update-browserslist-db": "^1.0.11" + "caniuse-lite": "^1.0.30001663", + "electron-to-chromium": "^1.5.28", + "node-releases": "^2.0.18", + "update-browserslist-db": "^1.1.0" }, "bin": { "browserslist": "cli.js" @@ -2318,9 +2360,9 @@ } }, "node_modules/caniuse-lite": { - "version": "1.0.30001528", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001528.tgz", - "integrity": "sha512-0Db4yyjR9QMNlsxh+kKWzQtkyflkG/snYheSzkjmvdEtEXB1+jt7A2HmSEiO6XIJPIbo92lHNGNySvE5pZcs5Q==", + "version": "1.0.30001667", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001667.tgz", + "integrity": "sha512-7LTwJjcRkzKFmtqGsibMeuXmvFDfZq/nzIjnmgCGzKKRVzjD72selLDK1oPF/Oxzmt4fNcPvTDvGqSDG4tCALw==", "dev": true, "funding": [ { @@ -2460,10 +2502,9 @@ } }, "node_modules/core-js": { - "version": "3.6.5", - "resolved": "https://registry.npmjs.org/core-js/-/core-js-3.6.5.tgz", - "integrity": "sha512-vZVEEwZoIsI+vPEuoF9Iqf5H7/M3eeQqWlQnYa8FSKKePuYTf5MWnxb5SDAzCa60b3JBRS5g9b+Dq7b1y/RCrA==", - "deprecated": "core-js@<3.23.3 is no longer maintained and not recommended for usage due to the number of issues. Because of the V8 engine whims, feature detection in old core-js versions could cause a slowdown up to 100x even if nothing is polyfilled. Some versions have web compatibility issues. Please, upgrade your dependencies to the actual version of core-js.", + "version": "3.38.1", + "resolved": "https://registry.npmjs.org/core-js/-/core-js-3.38.1.tgz", + "integrity": "sha512-OP35aUorbU3Zvlx7pjsFdu1rGNnD4pgw/CWoYzRY3t2EzoVT7shKHY1dlAy3f41cGIO7ZDPQimhGFTlEYkG/Hw==", "dev": true, "hasInstallScript": true, "funding": { @@ -2679,9 +2720,9 @@ } }, "node_modules/electron-to-chromium": { - "version": "1.4.510", - "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.510.tgz", - "integrity": "sha512-xPfLIPFcN/WLXBpQ/K4UgE98oUBO5Tia6BD4rkSR0wE7ep/PwBVlgvPJQrIBpmJGVAmUzwPKuDbVt9XV6+uC2g==", + "version": "1.5.33", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.33.tgz", + "integrity": "sha512-+cYTcFB1QqD4j4LegwLfpCNxifb6dDFUAwk6RsLusCwIaZI6or2f+q8rs5tTB2YC53HhOlIbEaqHMAAC8IOIwA==", "dev": true }, "node_modules/emittery": { @@ -2763,9 +2804,9 @@ } }, "node_modules/escalade": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", - "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==", + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", + "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", "dev": true, "engines": { "node": ">=6" @@ -3029,24 +3070,25 @@ } }, "node_modules/eslint-plugin-compat": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-compat/-/eslint-plugin-compat-4.2.0.tgz", - "integrity": "sha512-RDKSYD0maWy5r7zb5cWQS+uSPc26mgOzdORJ8hxILmWM7S/Ncwky7BcAtXVY5iRbKjBdHsWU8Yg7hfoZjtkv7w==", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-compat/-/eslint-plugin-compat-6.0.1.tgz", + "integrity": "sha512-0MeIEuoy8kWkOhW38kK8hU4vkb6l/VvyjpuYDymYOXmUY9NvTgyErF16lYuX+HPS5hkmym7lfA+XpYZiWYWmYA==", "dev": true, "dependencies": { - "@mdn/browser-compat-data": "^5.3.13", + "@mdn/browser-compat-data": "^5.5.35", "ast-metadata-inferer": "^0.8.0", - "browserslist": "^4.21.10", - "caniuse-lite": "^1.0.30001524", + "browserslist": "^4.23.1", + "caniuse-lite": "^1.0.30001639", "find-up": "^5.0.0", + "globals": "^15.7.0", "lodash.memoize": "^4.1.2", - "semver": "^7.5.4" + "semver": "^7.6.2" }, "engines": { - "node": ">=14.x" + "node": ">=18.x" }, "peerDependencies": { - "eslint": "^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0 || ^8.0.0" + "eslint": "^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0 || ^8.0.0 || ^9.0.0" } }, "node_modules/eslint-plugin-compat/node_modules/find-up": { @@ -3065,6 +3107,18 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/eslint-plugin-compat/node_modules/globals": { + "version": "15.10.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-15.10.0.tgz", + "integrity": "sha512-tqFIbz83w4Y5TCbtgjZjApohbuh7K9BxGYFm7ifwDR240tvdb7P9x+/9VvUKlmkPoiknoJtanI8UOrqxS3a7lQ==", + "dev": true, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/eslint-plugin-compat/node_modules/locate-path": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", @@ -3111,13 +3165,10 @@ } }, "node_modules/eslint-plugin-compat/node_modules/semver": { - "version": "7.5.4", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", - "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", + "version": "7.6.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", + "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==", "dev": true, - "dependencies": { - "lru-cache": "^6.0.0" - }, "bin": { "semver": "bin/semver.js" }, @@ -3173,24 +3224,22 @@ "node": ">=0.10.0" } }, - "node_modules/eslint-plugin-import/node_modules/is-core-module": { - "version": "2.8.0", - "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.8.0.tgz", - "integrity": "sha512-vd15qHsaqrRL7dtH6QNuy0ndJmRDrS9HAM1CAiSifNUFv4x1a0CCVsj18hJ1mShxIG6T2i1sO78MkP56r0nYRw==", - "dev": true, - "dependencies": { - "has": "^1.0.3" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/eslint-plugin-import/node_modules/ms": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=", "dev": true }, + "node_modules/eslint-plugin-tsdoc": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-tsdoc/-/eslint-plugin-tsdoc-0.3.0.tgz", + "integrity": "sha512-0MuFdBrrJVBjT/gyhkP2BqpD0np1NxNLfQ38xXDlSs/KVVpKI2A6vN7jx2Rve/CyUsvOsMGwp9KKrinv7q9g3A==", + "dev": true, + "dependencies": { + "@microsoft/tsdoc": "0.15.0", + "@microsoft/tsdoc-config": "0.17.0" + } + }, "node_modules/eslint-scope": { "version": "7.2.2", "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.2.2.tgz", @@ -3638,7 +3687,7 @@ "node_modules/fetch-mock/node_modules/tr46": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/tr46/-/tr46-1.0.1.tgz", - "integrity": "sha1-qLE/1r/SSJUZZ0zN5VujaTtwbQk=", + "integrity": "sha512-dTpowEjclQ7Kgx5SdBkqRzVhERQXov8/l9Ft9dVM9fmg0W0KQSVaXX9T4i6twCPNtYiZM53lpSSUAwJbFPOHxA==", "dev": true, "dependencies": { "punycode": "^2.1.0" @@ -3752,10 +3801,13 @@ } }, "node_modules/function-bind": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", - "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==", - "dev": true + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "dev": true, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } }, "node_modules/gensync": { "version": "1.0.0-beta.2", @@ -3962,6 +4014,18 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/hasown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", + "dev": true, + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, "node_modules/html-encoding-sniffer": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/html-encoding-sniffer/-/html-encoding-sniffer-2.0.1.tgz", @@ -4193,12 +4257,15 @@ } }, "node_modules/is-core-module": { - "version": "2.7.0", - "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.7.0.tgz", - "integrity": "sha512-ByY+tjCciCr+9nLryBYcSD50EOGWt95c7tIsKTG1J2ixKKXPvF7Ej3AVd+UfDydAJom3biBGDBALaO79ktwgEQ==", + "version": "2.15.1", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.15.1.tgz", + "integrity": "sha512-z0vtXSwucUJtANQWldhbtbt7BnL0vxiFjIdDLAatwhDYty2bad6s+rijD6Ri4YuYJubLzIJLUidCh09e1djEVQ==", "dev": true, "dependencies": { - "has": "^1.0.3" + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -6189,6 +6256,12 @@ "url": "https://github.com/chalk/supports-color?sponsor=1" } }, + "node_modules/jju": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/jju/-/jju-1.4.0.tgz", + "integrity": "sha512-8wb9Yw966OSxApiCt0K3yNJL8pnNeIv+OEq2YMidz4FKP6nonSRoOXc80iXY4JaN2FC11B9qsNmDsm+ZOfMROA==", + "dev": true + }, "node_modules/js-tokens": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", @@ -6384,7 +6457,7 @@ "node_modules/lodash.isequal": { "version": "4.5.0", "resolved": "https://registry.npmjs.org/lodash.isequal/-/lodash.isequal-4.5.0.tgz", - "integrity": "sha1-QVxEePK8wwEgwizhDtMib30+GOA=", + "integrity": "sha512-pDo3lu8Jhfjqls6GkMgpahsF9kCyayhgykjyLMNFTKWrpVdAQtYyB4muAMWozBB4ig/dtWAmsMxLEI8wuz+DYQ==", "dev": true }, "node_modules/lodash.memoize": { @@ -6402,7 +6475,7 @@ "node_modules/lodash.sortby": { "version": "4.7.0", "resolved": "https://registry.npmjs.org/lodash.sortby/-/lodash.sortby-4.7.0.tgz", - "integrity": "sha1-7dFMgk4sycHgsKG0K7UhBRakJDg=", + "integrity": "sha512-HDWXG8isMntAyRF5vZ7xKuEvOhT4AhlRt/3czTSjvGUxjYCBVRQY48ViDHyfYz9VIoBkW4TMGQNapx+l3RUwdA==", "dev": true }, "node_modules/lru-cache": { @@ -6584,9 +6657,9 @@ "dev": true }, "node_modules/node-releases": { - "version": "2.0.13", - "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.13.tgz", - "integrity": "sha512-uYr7J37ae/ORWdZeQ1xxMJe3NtdmqMC/JZK+geofDrkLUApKRHPd18/TxtBOJ4A0/+uUIliorNrfYV6s1b02eQ==", + "version": "2.0.18", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.18.tgz", + "integrity": "sha512-d9VeXT4SJ7ZeOqGX6R5EM022wpL+eWPooLI+5UpWn2jCT1aosUQEhQP214x33Wkwx3JQMvIm+tIoVOdodFS40g==", "dev": true }, "node_modules/normalize-path": { @@ -6840,9 +6913,9 @@ } }, "node_modules/picocolors": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz", - "integrity": "sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.0.tgz", + "integrity": "sha512-TQ92mBOW0l3LeMeyLV6mzy/kWr8lkd/hp3mTg7wYK7zJhuBStmGMBG0BdeDZS/dZx1IukaX6Bk11zcln25o1Aw==", "dev": true }, "node_modules/picomatch": { @@ -7032,9 +7105,9 @@ } }, "node_modules/regenerator-runtime": { - "version": "0.13.9", - "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.9.tgz", - "integrity": "sha512-p3VT+cOEgxFsRRA9X4lkI1E+k2/CtnKtU4gcxyaCUreilL/vqI6CdZ3wxVUx3UOUg+gnUOQQcRI7BmSI656MYA==", + "version": "0.14.1", + "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.14.1.tgz", + "integrity": "sha512-dYnhHh0nJoMfnkZs6GmmhFknAGRrLznOu5nc9ML+EJxGvrx6H7teuevqVqCuPcPK//3eDrrjQhehXVx9cnkGdw==", "dev": true }, "node_modules/require-directory": { @@ -7046,6 +7119,15 @@ "node": ">=0.10.0" } }, + "node_modules/require-from-string": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz", + "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/requires-port": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz", @@ -7053,13 +7135,17 @@ "dev": true }, "node_modules/resolve": { - "version": "1.20.0", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.20.0.tgz", - "integrity": "sha512-wENBPt4ySzg4ybFQW2TT1zMQucPK95HSh/nq2CFTZVOGut2+pQvSsgtda4d26YrYcr067wjbmzOG8byDPBX63A==", + "version": "1.22.8", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.8.tgz", + "integrity": "sha512-oKWePCxqpd6FlLvGV1VU0x7bkPmmCNolxzjMf4NczoDnQcIWrAF+cPtZn5i6n+RfD2d9i0tzpKnG6Yk168yIyw==", "dev": true, "dependencies": { - "is-core-module": "^2.2.0", - "path-parse": "^1.0.6" + "is-core-module": "^2.13.0", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -7425,6 +7511,18 @@ "node": ">=8" } }, + "node_modules/supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/symbol-tree": { "version": "3.2.4", "resolved": "https://registry.npmjs.org/symbol-tree/-/symbol-tree-3.2.4.tgz", @@ -7715,9 +7813,9 @@ } }, "node_modules/update-browserslist-db": { - "version": "1.0.11", - "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.0.11.tgz", - "integrity": "sha512-dCwEFf0/oT85M1fHBg4F0jtLwJrutGoHSQXCh7u4o2t1drG+c0a9Flnqww6XUKSfQMPpJBRjU8d4RXB09qtvaA==", + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.1.1.tgz", + "integrity": "sha512-R8UzCaa9Az+38REPiJ1tXlImTJXlVfgHZsglwBD/k6nj76ctsH1E3q4doGrukiLQd3sGQYu56r5+lo5r94l29A==", "dev": true, "funding": [ { @@ -7734,8 +7832,8 @@ } ], "dependencies": { - "escalade": "^3.1.1", - "picocolors": "^1.0.0" + "escalade": "^3.2.0", + "picocolors": "^1.1.0" }, "bin": { "update-browserslist-db": "cli.js" @@ -8384,12 +8482,12 @@ } }, "@babel/runtime": { - "version": "7.17.2", - "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.17.2.tgz", - "integrity": "sha512-hzeyJyMA1YGdJTuWU0e/j4wKXrU4OMFvY2MSlaI9B7VQb0r5cxTE3EAIS2Q7Tn2RIcDkRvTA/v2JsAEhxe99uw==", + "version": "7.25.6", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.25.6.tgz", + "integrity": "sha512-VBj9MYyDb9tuLq7yzqjgzt6Q+IBQLrGZfdjOekyEirZPHxXWoTSGUTMrpsfi58Up73d13NfYLv8HT9vmznjzhQ==", "dev": true, "requires": { - "regenerator-runtime": "^0.13.4" + "regenerator-runtime": "^0.14.0" } }, "@babel/template": { @@ -9044,11 +9142,49 @@ } }, "@mdn/browser-compat-data": { - "version": "5.3.14", - "resolved": "https://registry.npmjs.org/@mdn/browser-compat-data/-/browser-compat-data-5.3.14.tgz", - "integrity": "sha512-Y9XQrphVcE6u9xMm+gIqN86opbU/5s2W1pdPyKRyFV5B7+2jWM2gLI5JpfhZncaoDKvhy6FYwK04aCz5UM/bTQ==", + "version": "5.6.4", + "resolved": "https://registry.npmjs.org/@mdn/browser-compat-data/-/browser-compat-data-5.6.4.tgz", + "integrity": "sha512-bOOF4GGzn0exmvNHpSWmTfOXB9beTpIFCm2KPY2UVoCdn1YVfr8heuHr1C++BYI9Tun8REgi5TNVdKbBs249CA==", + "dev": true + }, + "@microsoft/tsdoc": { + "version": "0.15.0", + "resolved": "https://registry.npmjs.org/@microsoft/tsdoc/-/tsdoc-0.15.0.tgz", + "integrity": "sha512-HZpPoABogPvjeJOdzCOSJsXeL/SMCBgBZMVC3X3d7YYp2gf31MfxhUoYUNwf1ERPJOnQc0wkFn9trqI6ZEdZuA==", "dev": true }, + "@microsoft/tsdoc-config": { + "version": "0.17.0", + "resolved": "https://registry.npmjs.org/@microsoft/tsdoc-config/-/tsdoc-config-0.17.0.tgz", + "integrity": "sha512-v/EYRXnCAIHxOHW+Plb6OWuUoMotxTN0GLatnpOb1xq0KuTNw/WI3pamJx/UbsoJP5k9MCw1QxvvhPcF9pH3Zg==", + "dev": true, + "requires": { + "@microsoft/tsdoc": "0.15.0", + "ajv": "~8.12.0", + "jju": "~1.4.0", + "resolve": "~1.22.2" + }, + "dependencies": { + "ajv": { + "version": "8.12.0", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.12.0.tgz", + "integrity": "sha512-sRu1kpcO9yLtYxBKvqfTeh9KzZEwO3STyX1HT+4CaDzC6HpTGYhIhPIzj9XuKU7KYDwnaeh5hcOwjy1QuJzBPA==", + "dev": true, + "requires": { + "fast-deep-equal": "^3.1.1", + "json-schema-traverse": "^1.0.0", + "require-from-string": "^2.0.2", + "uri-js": "^4.2.2" + } + }, + "json-schema-traverse": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", + "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", + "dev": true + } + } + }, "@nodelib/fs.scandir": { "version": "2.1.5", "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", @@ -9699,15 +9835,15 @@ "dev": true }, "browserslist": { - "version": "4.21.10", - "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.21.10.tgz", - "integrity": "sha512-bipEBdZfVH5/pwrvqc+Ub0kUPVfGUhlKxbvfD+z1BDnPEO/X98ruXGA1WP5ASpAFKan7Qr6j736IacbZQuAlKQ==", + "version": "4.24.0", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.24.0.tgz", + "integrity": "sha512-Rmb62sR1Zpjql25eSanFGEhAxcFwfA1K0GuQcLoaJBAcENegrQut3hYdhXFF1obQfiDyqIW/cLM5HSJ/9k884A==", "dev": true, "requires": { - "caniuse-lite": "^1.0.30001517", - "electron-to-chromium": "^1.4.477", - "node-releases": "^2.0.13", - "update-browserslist-db": "^1.0.11" + "caniuse-lite": "^1.0.30001663", + "electron-to-chromium": "^1.5.28", + "node-releases": "^2.0.18", + "update-browserslist-db": "^1.1.0" } }, "bs-logger": { @@ -9757,9 +9893,9 @@ "dev": true }, "caniuse-lite": { - "version": "1.0.30001528", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001528.tgz", - "integrity": "sha512-0Db4yyjR9QMNlsxh+kKWzQtkyflkG/snYheSzkjmvdEtEXB1+jt7A2HmSEiO6XIJPIbo92lHNGNySvE5pZcs5Q==", + "version": "1.0.30001667", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001667.tgz", + "integrity": "sha512-7LTwJjcRkzKFmtqGsibMeuXmvFDfZq/nzIjnmgCGzKKRVzjD72selLDK1oPF/Oxzmt4fNcPvTDvGqSDG4tCALw==", "dev": true }, "chalk": { @@ -9860,9 +9996,9 @@ } }, "core-js": { - "version": "3.6.5", - "resolved": "https://registry.npmjs.org/core-js/-/core-js-3.6.5.tgz", - "integrity": "sha512-vZVEEwZoIsI+vPEuoF9Iqf5H7/M3eeQqWlQnYa8FSKKePuYTf5MWnxb5SDAzCa60b3JBRS5g9b+Dq7b1y/RCrA==", + "version": "3.38.1", + "resolved": "https://registry.npmjs.org/core-js/-/core-js-3.38.1.tgz", + "integrity": "sha512-OP35aUorbU3Zvlx7pjsFdu1rGNnD4pgw/CWoYzRY3t2EzoVT7shKHY1dlAy3f41cGIO7ZDPQimhGFTlEYkG/Hw==", "dev": true }, "cross-env": { @@ -10021,9 +10157,9 @@ } }, "electron-to-chromium": { - "version": "1.4.510", - "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.510.tgz", - "integrity": "sha512-xPfLIPFcN/WLXBpQ/K4UgE98oUBO5Tia6BD4rkSR0wE7ep/PwBVlgvPJQrIBpmJGVAmUzwPKuDbVt9XV6+uC2g==", + "version": "1.5.33", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.33.tgz", + "integrity": "sha512-+cYTcFB1QqD4j4LegwLfpCNxifb6dDFUAwk6RsLusCwIaZI6or2f+q8rs5tTB2YC53HhOlIbEaqHMAAC8IOIwA==", "dev": true }, "emittery": { @@ -10087,9 +10223,9 @@ } }, "escalade": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", - "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==", + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", + "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", "dev": true }, "escape-string-regexp": { @@ -10423,18 +10559,19 @@ } }, "eslint-plugin-compat": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-compat/-/eslint-plugin-compat-4.2.0.tgz", - "integrity": "sha512-RDKSYD0maWy5r7zb5cWQS+uSPc26mgOzdORJ8hxILmWM7S/Ncwky7BcAtXVY5iRbKjBdHsWU8Yg7hfoZjtkv7w==", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-compat/-/eslint-plugin-compat-6.0.1.tgz", + "integrity": "sha512-0MeIEuoy8kWkOhW38kK8hU4vkb6l/VvyjpuYDymYOXmUY9NvTgyErF16lYuX+HPS5hkmym7lfA+XpYZiWYWmYA==", "dev": true, "requires": { - "@mdn/browser-compat-data": "^5.3.13", + "@mdn/browser-compat-data": "^5.5.35", "ast-metadata-inferer": "^0.8.0", - "browserslist": "^4.21.10", - "caniuse-lite": "^1.0.30001524", + "browserslist": "^4.23.1", + "caniuse-lite": "^1.0.30001639", "find-up": "^5.0.0", + "globals": "^15.7.0", "lodash.memoize": "^4.1.2", - "semver": "^7.5.4" + "semver": "^7.6.2" }, "dependencies": { "find-up": { @@ -10447,6 +10584,12 @@ "path-exists": "^4.0.0" } }, + "globals": { + "version": "15.10.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-15.10.0.tgz", + "integrity": "sha512-tqFIbz83w4Y5TCbtgjZjApohbuh7K9BxGYFm7ifwDR240tvdb7P9x+/9VvUKlmkPoiknoJtanI8UOrqxS3a7lQ==", + "dev": true + }, "locate-path": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", @@ -10475,13 +10618,10 @@ } }, "semver": { - "version": "7.5.4", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", - "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", - "dev": true, - "requires": { - "lru-cache": "^6.0.0" - } + "version": "7.6.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", + "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==", + "dev": true } } }, @@ -10524,15 +10664,6 @@ "esutils": "^2.0.2" } }, - "is-core-module": { - "version": "2.8.0", - "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.8.0.tgz", - "integrity": "sha512-vd15qHsaqrRL7dtH6QNuy0ndJmRDrS9HAM1CAiSifNUFv4x1a0CCVsj18hJ1mShxIG6T2i1sO78MkP56r0nYRw==", - "dev": true, - "requires": { - "has": "^1.0.3" - } - }, "ms": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", @@ -10541,6 +10672,16 @@ } } }, + "eslint-plugin-tsdoc": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-tsdoc/-/eslint-plugin-tsdoc-0.3.0.tgz", + "integrity": "sha512-0MuFdBrrJVBjT/gyhkP2BqpD0np1NxNLfQ38xXDlSs/KVVpKI2A6vN7jx2Rve/CyUsvOsMGwp9KKrinv7q9g3A==", + "dev": true, + "requires": { + "@microsoft/tsdoc": "0.15.0", + "@microsoft/tsdoc-config": "0.17.0" + } + }, "eslint-scope": { "version": "7.2.2", "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.2.2.tgz", @@ -10728,7 +10869,7 @@ "tr46": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/tr46/-/tr46-1.0.1.tgz", - "integrity": "sha1-qLE/1r/SSJUZZ0zN5VujaTtwbQk=", + "integrity": "sha512-dTpowEjclQ7Kgx5SdBkqRzVhERQXov8/l9Ft9dVM9fmg0W0KQSVaXX9T4i6twCPNtYiZM53lpSSUAwJbFPOHxA==", "dev": true, "requires": { "punycode": "^2.1.0" @@ -10822,9 +10963,9 @@ "optional": true }, "function-bind": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", - "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==", + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", "dev": true }, "gensync": { @@ -10969,6 +11110,15 @@ "has-symbols": "^1.0.2" } }, + "hasown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", + "dev": true, + "requires": { + "function-bind": "^1.1.2" + } + }, "html-encoding-sniffer": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/html-encoding-sniffer/-/html-encoding-sniffer-2.0.1.tgz", @@ -11138,12 +11288,12 @@ "dev": true }, "is-core-module": { - "version": "2.7.0", - "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.7.0.tgz", - "integrity": "sha512-ByY+tjCciCr+9nLryBYcSD50EOGWt95c7tIsKTG1J2ixKKXPvF7Ej3AVd+UfDydAJom3biBGDBALaO79ktwgEQ==", + "version": "2.15.1", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.15.1.tgz", + "integrity": "sha512-z0vtXSwucUJtANQWldhbtbt7BnL0vxiFjIdDLAatwhDYty2bad6s+rijD6Ri4YuYJubLzIJLUidCh09e1djEVQ==", "dev": true, "requires": { - "has": "^1.0.3" + "hasown": "^2.0.2" } }, "is-date-object": { @@ -12613,6 +12763,12 @@ } } }, + "jju": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/jju/-/jju-1.4.0.tgz", + "integrity": "sha512-8wb9Yw966OSxApiCt0K3yNJL8pnNeIv+OEq2YMidz4FKP6nonSRoOXc80iXY4JaN2FC11B9qsNmDsm+ZOfMROA==", + "dev": true + }, "js-tokens": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", @@ -12766,7 +12922,7 @@ "lodash.isequal": { "version": "4.5.0", "resolved": "https://registry.npmjs.org/lodash.isequal/-/lodash.isequal-4.5.0.tgz", - "integrity": "sha1-QVxEePK8wwEgwizhDtMib30+GOA=", + "integrity": "sha512-pDo3lu8Jhfjqls6GkMgpahsF9kCyayhgykjyLMNFTKWrpVdAQtYyB4muAMWozBB4ig/dtWAmsMxLEI8wuz+DYQ==", "dev": true }, "lodash.memoize": { @@ -12784,7 +12940,7 @@ "lodash.sortby": { "version": "4.7.0", "resolved": "https://registry.npmjs.org/lodash.sortby/-/lodash.sortby-4.7.0.tgz", - "integrity": "sha1-7dFMgk4sycHgsKG0K7UhBRakJDg=", + "integrity": "sha512-HDWXG8isMntAyRF5vZ7xKuEvOhT4AhlRt/3czTSjvGUxjYCBVRQY48ViDHyfYz9VIoBkW4TMGQNapx+l3RUwdA==", "dev": true }, "lru-cache": { @@ -12930,9 +13086,9 @@ "dev": true }, "node-releases": { - "version": "2.0.13", - "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.13.tgz", - "integrity": "sha512-uYr7J37ae/ORWdZeQ1xxMJe3NtdmqMC/JZK+geofDrkLUApKRHPd18/TxtBOJ4A0/+uUIliorNrfYV6s1b02eQ==", + "version": "2.0.18", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.18.tgz", + "integrity": "sha512-d9VeXT4SJ7ZeOqGX6R5EM022wpL+eWPooLI+5UpWn2jCT1aosUQEhQP214x33Wkwx3JQMvIm+tIoVOdodFS40g==", "dev": true }, "normalize-path": { @@ -13117,9 +13273,9 @@ "dev": true }, "picocolors": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz", - "integrity": "sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.0.tgz", + "integrity": "sha512-TQ92mBOW0l3LeMeyLV6mzy/kWr8lkd/hp3mTg7wYK7zJhuBStmGMBG0BdeDZS/dZx1IukaX6Bk11zcln25o1Aw==", "dev": true }, "picomatch": { @@ -13251,9 +13407,9 @@ } }, "regenerator-runtime": { - "version": "0.13.9", - "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.9.tgz", - "integrity": "sha512-p3VT+cOEgxFsRRA9X4lkI1E+k2/CtnKtU4gcxyaCUreilL/vqI6CdZ3wxVUx3UOUg+gnUOQQcRI7BmSI656MYA==", + "version": "0.14.1", + "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.14.1.tgz", + "integrity": "sha512-dYnhHh0nJoMfnkZs6GmmhFknAGRrLznOu5nc9ML+EJxGvrx6H7teuevqVqCuPcPK//3eDrrjQhehXVx9cnkGdw==", "dev": true }, "require-directory": { @@ -13262,6 +13418,12 @@ "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", "dev": true }, + "require-from-string": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz", + "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==", + "dev": true + }, "requires-port": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz", @@ -13269,13 +13431,14 @@ "dev": true }, "resolve": { - "version": "1.20.0", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.20.0.tgz", - "integrity": "sha512-wENBPt4ySzg4ybFQW2TT1zMQucPK95HSh/nq2CFTZVOGut2+pQvSsgtda4d26YrYcr067wjbmzOG8byDPBX63A==", + "version": "1.22.8", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.8.tgz", + "integrity": "sha512-oKWePCxqpd6FlLvGV1VU0x7bkPmmCNolxzjMf4NczoDnQcIWrAF+cPtZn5i6n+RfD2d9i0tzpKnG6Yk168yIyw==", "dev": true, "requires": { - "is-core-module": "^2.2.0", - "path-parse": "^1.0.6" + "is-core-module": "^2.13.0", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" } }, "resolve-cwd": { @@ -13543,6 +13706,12 @@ } } }, + "supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "dev": true + }, "symbol-tree": { "version": "3.2.4", "resolved": "https://registry.npmjs.org/symbol-tree/-/symbol-tree-3.2.4.tgz", @@ -13747,13 +13916,13 @@ "dev": true }, "update-browserslist-db": { - "version": "1.0.11", - "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.0.11.tgz", - "integrity": "sha512-dCwEFf0/oT85M1fHBg4F0jtLwJrutGoHSQXCh7u4o2t1drG+c0a9Flnqww6XUKSfQMPpJBRjU8d4RXB09qtvaA==", + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.1.1.tgz", + "integrity": "sha512-R8UzCaa9Az+38REPiJ1tXlImTJXlVfgHZsglwBD/k6nj76ctsH1E3q4doGrukiLQd3sGQYu56r5+lo5r94l29A==", "dev": true, "requires": { - "escalade": "^3.1.1", - "picocolors": "^1.0.0" + "escalade": "^3.2.0", + "picocolors": "^1.1.0" } }, "uri-js": { diff --git a/package.json b/package.json index 2cd3e530..608c3982 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@splitsoftware/splitio-commons", - "version": "1.17.0", + "version": "2.0.0", "description": "Split JavaScript SDK common components", "main": "cjs/index.js", "module": "esm/index.js", @@ -17,10 +17,10 @@ ], "scripts": { "check": "npm run check:lint && npm run check:types", - "check:lint": "eslint src --ext .js,.ts", + "check:lint": "eslint src types --ext .js,.ts", "check:types": "tsc --noEmit", "build": "npm run build:cjs && npm run build:esm", - "build:esm": "rimraf esm && tsc -m es2015 --outDir esm -d true --declarationDir types", + "build:esm": "rimraf esm && tsc -m es2015 --outDir esm", "build:cjs": "rimraf cjs && tsc -m CommonJS --outDir cjs", "test": "jest", "test:coverage": "jest --coverage", @@ -45,6 +45,7 @@ "bugs": "https://github.com/splitio/javascript-commons/issues", "homepage": "https://github.com/splitio/javascript-commons#readme", "dependencies": { + "@types/ioredis": "^4.28.0", "tslib": "^2.3.1" }, "peerDependencies": { @@ -64,8 +65,9 @@ "@typescript-eslint/parser": "^6.6.0", "cross-env": "^7.0.2", "eslint": "^8.48.0", - "eslint-plugin-compat": "^4.2.0", + "eslint-plugin-compat": "^6.0.1", "eslint-plugin-import": "^2.25.3", + "eslint-plugin-tsdoc": "^0.3.0", "fetch-mock": "^9.11.0", "ioredis": "^4.28.0", "jest": "^27.2.3", diff --git a/src/__tests__/mocks/fetchSpecificSplits.ts b/src/__tests__/mocks/fetchSpecificSplits.ts index 4b2f4c90..3fd16dfe 100644 --- a/src/__tests__/mocks/fetchSpecificSplits.ts +++ b/src/__tests__/mocks/fetchSpecificSplits.ts @@ -1,4 +1,4 @@ -import { SplitIO } from '../../types'; +import SplitIO from '../../../types/splitio'; const valuesExamples = [ ['\u0223abc', 'abc\u0223asd', 'abc\u0223', 'abcȣ'], diff --git a/src/__tests__/mocks/message.MEMBERSHIPS_LS_UPDATE.SEGMENT_REMOVAL.1457552653000.json b/src/__tests__/mocks/message.MEMBERSHIPS_LS_UPDATE.SEGMENT_REMOVAL.1457552653000.json new file mode 100644 index 00000000..5a59b352 --- /dev/null +++ b/src/__tests__/mocks/message.MEMBERSHIPS_LS_UPDATE.SEGMENT_REMOVAL.1457552653000.json @@ -0,0 +1,4 @@ +{ + "type": "message", + "data": "{\"data\":\"{\\\"type\\\":\\\"MEMBERSHIPS_LS_UPDATE\\\",\\\"cn\\\":1457552653000,\\\"l\\\":[\\\"employees\\\"],\\\"c\\\": 0,\\\"u\\\": 3,\\\"d\\\":\\\"\\\"}\"}" +} \ No newline at end of file diff --git a/src/__tests__/mocks/message.MEMBERSHIPS_LS_UPDATE.UNBOUNDED.1457552650000.json b/src/__tests__/mocks/message.MEMBERSHIPS_LS_UPDATE.UNBOUNDED.1457552650000.json new file mode 100644 index 00000000..e9b07c7a --- /dev/null +++ b/src/__tests__/mocks/message.MEMBERSHIPS_LS_UPDATE.UNBOUNDED.1457552650000.json @@ -0,0 +1,4 @@ +{ + "type": "message", + "data": "{\"data\":\"{\\\"type\\\":\\\"MEMBERSHIPS_LS_UPDATE\\\",\\\"cn\\\":1457552650000,\\\"l\\\":[],\\\"c\\\": 0,\\\"u\\\": 0,\\\"d\\\":\\\"\\\",\\\"i\\\":300,\\\"h\\\":1,\\\"s\\\":0}\"}" +} \ No newline at end of file diff --git a/src/__tests__/mocks/message.MEMBERSHIPS_MS_UPDATE.BOUNDED.GZIP.1457552651000.json b/src/__tests__/mocks/message.MEMBERSHIPS_MS_UPDATE.BOUNDED.GZIP.1457552651000.json new file mode 100644 index 00000000..cd1a0736 --- /dev/null +++ b/src/__tests__/mocks/message.MEMBERSHIPS_MS_UPDATE.BOUNDED.GZIP.1457552651000.json @@ -0,0 +1,4 @@ +{ + "type": "message", + "data": "{\"data\":\"{\\\"type\\\":\\\"MEMBERSHIPS_MS_UPDATE\\\",\\\"cn\\\":1457552651000,\\\"l\\\":[],\\\"c\\\": 1,\\\"u\\\": 1,\\\"d\\\":\\\"H4sIAAAAAAAA/2JABxzYeIxQLguYFIBLN8Bl4EABjc+EzOnAsA4QAAD//8YBvWeAAAAA\\\"}\"}" +} \ No newline at end of file diff --git a/src/__tests__/mocks/message.MEMBERSHIPS_MS_UPDATE.KEYLIST.GZIP.1457552652000.json b/src/__tests__/mocks/message.MEMBERSHIPS_MS_UPDATE.KEYLIST.GZIP.1457552652000.json new file mode 100644 index 00000000..7f553dad --- /dev/null +++ b/src/__tests__/mocks/message.MEMBERSHIPS_MS_UPDATE.KEYLIST.GZIP.1457552652000.json @@ -0,0 +1,4 @@ +{ + "type": "message", + "data": "{\"data\":\"{\\\"type\\\":\\\"MEMBERSHIPS_MS_UPDATE\\\",\\\"cn\\\":1457552652000,\\\"l\\\":[\\\"splitters\\\"],\\\"c\\\": 1,\\\"u\\\": 2,\\\"d\\\":\\\"H4sIAAAAAAAA/wTAsRHDUAgD0F2ofwEIkPAqPhdZIW0uu/v97GPXHU004ULuMGrYR6XUbIjlXULPPse+dt1yhJibBODjrTmj3GJ4emduuDDP/w0AAP//18WLsl0AAAA=\\\"}\"}" +} \ No newline at end of file diff --git a/src/__tests__/mocks/message.MEMBERSHIPS_MS_UPDATE.SEGMENT_REMOVAL.1457552653000.json b/src/__tests__/mocks/message.MEMBERSHIPS_MS_UPDATE.SEGMENT_REMOVAL.1457552653000.json new file mode 100644 index 00000000..d13e0f53 --- /dev/null +++ b/src/__tests__/mocks/message.MEMBERSHIPS_MS_UPDATE.SEGMENT_REMOVAL.1457552653000.json @@ -0,0 +1,4 @@ +{ + "type": "message", + "data": "{\"data\":\"{\\\"type\\\":\\\"MEMBERSHIPS_MS_UPDATE\\\",\\\"cn\\\":1457552653000,\\\"l\\\":[\\\"splitters\\\"],\\\"c\\\": 0,\\\"u\\\": 3,\\\"d\\\":\\\"\\\"}\"}" +} \ No newline at end of file diff --git a/src/__tests__/mocks/message.MEMBERSHIPS_MS_UPDATE.UNBOUNDED.1457552650000.json b/src/__tests__/mocks/message.MEMBERSHIPS_MS_UPDATE.UNBOUNDED.1457552650000.json new file mode 100644 index 00000000..4505b73e --- /dev/null +++ b/src/__tests__/mocks/message.MEMBERSHIPS_MS_UPDATE.UNBOUNDED.1457552650000.json @@ -0,0 +1,4 @@ +{ + "type": "message", + "data": "{\"data\":\"{\\\"type\\\":\\\"MEMBERSHIPS_MS_UPDATE\\\",\\\"cn\\\":1457552650000,\\\"l\\\":[],\\\"c\\\": 0,\\\"u\\\": 0,\\\"d\\\":\\\"\\\"}\"}" +} \ No newline at end of file diff --git a/src/__tests__/mocks/message.MY_SEGMENTS_UPDATE.nicolas@split.io.1457552640000.json b/src/__tests__/mocks/message.MY_SEGMENTS_UPDATE.nicolas@split.io.1457552640000.json deleted file mode 100644 index 951d8a31..00000000 --- a/src/__tests__/mocks/message.MY_SEGMENTS_UPDATE.nicolas@split.io.1457552640000.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "type": "message", - "data": "{\"id\":\"mc4i3NENoA:0:0\",\"clientId\":\"NDEzMTY5Mzg0MA==:MTM2ODE2NDMxNA==\",\"timestamp\":1457552640900,\"encoding\":\"json\",\"channel\":\"NzM2MDI5Mzc0_NDEzMjQ1MzA0Nw==_NTcwOTc3MDQx_mySegments\",\"data\":\"{\\\"type\\\":\\\"MY_SEGMENTS_UPDATE\\\",\\\"changeNumber\\\":1457552640000,\\\"includesPayload\\\":false}\"}" -} \ No newline at end of file diff --git a/src/__tests__/mocks/message.V2.BOUNDED.GZIP.1457552651000.json b/src/__tests__/mocks/message.V2.BOUNDED.GZIP.1457552651000.json deleted file mode 100644 index 97c2a73c..00000000 --- a/src/__tests__/mocks/message.V2.BOUNDED.GZIP.1457552651000.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "type": "message", - "data": "{\"data\":\"{\\\"type\\\":\\\"MY_SEGMENTS_UPDATE_V2\\\",\\\"changeNumber\\\":1457552651000,\\\"segmentName\\\":\\\"\\\",\\\"c\\\": 1,\\\"u\\\": 1,\\\"d\\\":\\\"H4sIAAAAAAAA/2JABxzYeIxQLguYFIBLN8Bl4EABjc+EzOnAsA4QAAD//8YBvWeAAAAA\\\"}\"}" -} \ No newline at end of file diff --git a/src/__tests__/mocks/message.V2.KEYLIST.GZIP.1457552652000.json b/src/__tests__/mocks/message.V2.KEYLIST.GZIP.1457552652000.json deleted file mode 100644 index c44ee3ac..00000000 --- a/src/__tests__/mocks/message.V2.KEYLIST.GZIP.1457552652000.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "type": "message", - "data": "{\"data\":\"{\\\"type\\\":\\\"MY_SEGMENTS_UPDATE_V2\\\",\\\"changeNumber\\\":1457552652000,\\\"segmentName\\\":\\\"splitters\\\",\\\"c\\\": 1,\\\"u\\\": 2,\\\"d\\\":\\\"H4sIAAAAAAAA/wTAsRHDUAgD0F2ofwEIkPAqPhdZIW0uu/v97GPXHU004ULuMGrYR6XUbIjlXULPPse+dt1yhJibBODjrTmj3GJ4emduuDDP/w0AAP//18WLsl0AAAA=\\\"}\"}" -} \ No newline at end of file diff --git a/src/__tests__/mocks/message.V2.SEGMENT_REMOVAL.1457552653000.json b/src/__tests__/mocks/message.V2.SEGMENT_REMOVAL.1457552653000.json deleted file mode 100644 index aaf1a3f3..00000000 --- a/src/__tests__/mocks/message.V2.SEGMENT_REMOVAL.1457552653000.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "type": "message", - "data": "{\"data\":\"{\\\"type\\\":\\\"MY_SEGMENTS_UPDATE_V2\\\",\\\"changeNumber\\\":1457552653000,\\\"segmentName\\\":\\\"splitters\\\",\\\"c\\\": 0,\\\"u\\\": 3,\\\"d\\\":\\\"\\\"}\"}" -} \ No newline at end of file diff --git a/src/__tests__/mocks/message.V2.UNBOUNDED.1457552650000.json b/src/__tests__/mocks/message.V2.UNBOUNDED.1457552650000.json deleted file mode 100644 index a7a2e793..00000000 --- a/src/__tests__/mocks/message.V2.UNBOUNDED.1457552650000.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "type": "message", - "data": "{\"data\":\"{\\\"type\\\":\\\"MY_SEGMENTS_UPDATE_V2\\\",\\\"changeNumber\\\":1457552650000,\\\"segmentName\\\":\\\"\\\",\\\"c\\\": 0,\\\"u\\\": 0,\\\"d\\\":\\\"\\\"}\"}" -} \ No newline at end of file diff --git a/src/__tests__/testUtils/eventSourceMock.ts b/src/__tests__/testUtils/eventSourceMock.ts index f47615ed..c231aa9f 100644 --- a/src/__tests__/testUtils/eventSourceMock.ts +++ b/src/__tests__/testUtils/eventSourceMock.ts @@ -13,7 +13,7 @@ */ import { EventEmitter } from '../../utils/MinEvents'; -import { IEventEmitter } from '../../types'; +import SplitIO from '../../../types/splitio'; type ReadyStateType = 0 | 1 | 2; @@ -46,7 +46,7 @@ export default class EventSource { static readonly OPEN: ReadyStateType = 1; static readonly CLOSED: ReadyStateType = 2; - private readonly __emitter: IEventEmitter; + private readonly __emitter: SplitIO.IEventEmitter; private readonly __eventSourceInitDict: EventSourceInitDict; onerror?: (evt: MessageEvent) => any; onmessage?: (evt: MessageEvent) => any; diff --git a/src/__tests__/testUtils/fetchMock.ts b/src/__tests__/testUtils/fetchMock.ts index 94a614f7..780aa231 100644 --- a/src/__tests__/testUtils/fetchMock.ts +++ b/src/__tests__/testUtils/fetchMock.ts @@ -1,4 +1,4 @@ -// http://www.wheresrhys.co.uk/fetch-mock/#usageinstallation +// @TODO upgrade fetch-mock to fix vulnerabilities import fetchMockLib from 'fetch-mock'; const fetchMock = fetchMockLib.sandbox(); diff --git a/src/__tests__/testUtils/index.ts b/src/__tests__/testUtils/index.ts index 5c3db2f7..9824f0cd 100644 --- a/src/__tests__/testUtils/index.ts +++ b/src/__tests__/testUtils/index.ts @@ -3,10 +3,10 @@ const DEFAULT_ERROR_MARGIN = 75; // 0.075 secs if numbers are timestamps in mill /** * Assert if an `actual` and `expected` numeric values are nearly equal. * - * @param {number} actual actual time lapse in millis - * @param {number} expected expected time lapse in millis - * @param {number} epsilon error margin in millis - * @returns {boolean} whether the absolute difference is minor to epsilon value or not + * @param actual - actual time lapse in millis + * @param expected - expected time lapse in millis + * @param epsilon - error margin in millis + * @returns whether the absolute difference is minor to epsilon value or not */ export function nearlyEqual(actual: number, expected: number, epsilon = DEFAULT_ERROR_MARGIN) { const diff = Math.abs(actual - expected); diff --git a/src/dtos/types.ts b/src/dtos/types.ts index efbf0acc..dce1d12d 100644 --- a/src/dtos/types.ts +++ b/src/dtos/types.ts @@ -1,4 +1,4 @@ -import { SplitIO } from '../types'; +import SplitIO from '../../types/splitio'; export type MaybeThenable = T | Promise @@ -30,6 +30,10 @@ export interface IInSegmentMatcherData { segmentName: string } +export interface IInLargeSegmentMatcherData { + largeSegmentName: string +} + export interface IDependencyMatcherData { split: string, treatments: string[] @@ -43,6 +47,7 @@ interface ISplitMatcherBase { attribute: string | null } userDefinedSegmentMatcherData?: null | IInSegmentMatcherData + userDefinedLargeSegmentMatcherData?: null | IInLargeSegmentMatcherData whitelistMatcherData?: null | IWhitelistMatcherData unaryNumericMatcherData?: null | IUnaryNumericMatcherData betweenMatcherData?: null | IBetweenMatcherData @@ -61,6 +66,11 @@ interface IInSegmentMatcher extends ISplitMatcherBase { userDefinedSegmentMatcherData: IInSegmentMatcherData } +interface IInLargeSegmentMatcher extends ISplitMatcherBase { + matcherType: 'IN_LARGE_SEGMENT', + userDefinedLargeSegmentMatcherData: IInLargeSegmentMatcherData +} + interface IWhitelistMatcher extends ISplitMatcherBase { matcherType: 'WHITELIST', whitelistMatcherData: IWhitelistMatcherData @@ -165,7 +175,8 @@ interface IInListSemverMatcher extends ISplitMatcherBase { export type ISplitMatcher = IAllKeysMatcher | IInSegmentMatcher | IWhitelistMatcher | IEqualToMatcher | IGreaterThanOrEqualToMatcher | ILessThanOrEqualToMatcher | IBetweenMatcher | IEqualToSetMatcher | IContainsAnyOfSetMatcher | IContainsAllOfSetMatcher | IPartOfSetMatcher | IStartsWithMatcher | IEndsWithMatcher | IContainsStringMatcher | IInSplitTreatmentMatcher | IEqualToBooleanMatcher | IMatchesStringMatcher | - IEqualToSemverMatcher | IGreaterThanOrEqualToSemverMatcher | ILessThanOrEqualToSemverMatcher | IBetweenSemverMatcher | IInListSemverMatcher + IEqualToSemverMatcher | IGreaterThanOrEqualToSemverMatcher | ILessThanOrEqualToSemverMatcher | IBetweenSemverMatcher | IInListSemverMatcher | + IInLargeSegmentMatcher /** Split object */ export interface ISplitPartition { @@ -218,14 +229,17 @@ export interface ISegmentChangesResponse { till: number } -export interface IMySegmentsResponseItem { - id: string, - name: string +export interface IMySegmentsResponse { + cn?: number, + k?: { + n: string + }[] } -/** Interface of the parsed JSON response of `/mySegments/{userKey}` */ -export interface IMySegmentsResponse { - mySegments: IMySegmentsResponseItem[] +/** Interface of the parsed JSON response of `/memberships/{userKey}` */ +export interface IMembershipsResponse { + ms?: IMySegmentsResponse, + ls?: IMySegmentsResponse } /** Metadata internal type for storages */ diff --git a/src/evaluator/Engine.ts b/src/evaluator/Engine.ts index e3b38975..36f52cb4 100644 --- a/src/evaluator/Engine.ts +++ b/src/evaluator/Engine.ts @@ -5,7 +5,7 @@ import { thenable } from '../utils/promise/thenable'; import { EXCEPTION, NO_CONDITION_MATCH, SPLIT_ARCHIVED, SPLIT_KILLED } from '../utils/labels'; import { CONTROL } from '../utils/constants'; import { ISplit, MaybeThenable } from '../dtos/types'; -import { SplitIO } from '../types'; +import SplitIO from '../../types/splitio'; import { IStorageAsync, IStorageSync } from '../storages/types'; import { IEvaluation, IEvaluationResult, IEvaluator, ISplitEvaluator } from './types'; import { ILogger } from '../logger/types'; @@ -21,7 +21,7 @@ export class Engine { constructor(private baseInfo: ISplit, private evaluator: IEvaluator) { - // in case we don't have a default treatment in the instanciation, use 'control' + // in case we don't have a default treatment in the instantiation, use 'control' if (typeof this.baseInfo.defaultTreatment !== 'string') { this.baseInfo.defaultTreatment = CONTROL; } diff --git a/src/evaluator/__tests__/evaluate-features.spec.ts b/src/evaluator/__tests__/evaluate-features.spec.ts index 45431c64..761f2804 100644 --- a/src/evaluator/__tests__/evaluate-features.spec.ts +++ b/src/evaluator/__tests__/evaluate-features.spec.ts @@ -2,7 +2,6 @@ import { evaluateFeatures, evaluateFeaturesByFlagSets } from '../index'; import { EXCEPTION, NOT_IN_SPLIT, SPLIT_ARCHIVED, SPLIT_KILLED, SPLIT_NOT_FOUND } from '../../utils/labels'; import { loggerMock } from '../../logger/__tests__/sdkLogger.mock'; -import { _Set } from '../../utils/lang/sets'; import { WARN_FLAGSET_WITHOUT_FLAGS } from '../../logger/constants'; const splitsMock = { @@ -17,8 +16,8 @@ const splitsMock = { }; const flagSetsMock = { - reg_and_config: new _Set(['regular', 'config']), - arch_and_killed: new _Set(['killed', 'archived']), + reg_and_config: new Set(['regular', 'config']), + arch_and_killed: new Set(['killed', 'archived']), }; const mockStorage = { @@ -38,7 +37,7 @@ const mockStorage = { return splits; }, getNamesByFlagSets(flagSets) { - return flagSets.map(flagset => flagSetsMock[flagset] || new _Set()); + return flagSets.map(flagset => flagSetsMock[flagset] || new Set()); } } }; @@ -192,7 +191,7 @@ describe('EVALUATOR - Multiple evaluations at once by flag sets', () => { // Should support async storage too expect(await getResultsByFlagsets(['inexistent_set1', 'inexistent_set2'], { splits: { - getNamesByFlagSets(flagSets) { return Promise.resolve(flagSets.map(flagset => flagSetsMock[flagset] || new _Set())); } + getNamesByFlagSets(flagSets) { return Promise.resolve(flagSets.map(flagset => flagSetsMock[flagset] || new Set())); } } })).toEqual({}); expect(loggerMock.warn.mock.calls).toEqual([ diff --git a/src/evaluator/combiners/ifelseif.ts b/src/evaluator/combiners/ifelseif.ts index c96df683..68fe5725 100644 --- a/src/evaluator/combiners/ifelseif.ts +++ b/src/evaluator/combiners/ifelseif.ts @@ -3,7 +3,7 @@ import { ILogger } from '../../logger/types'; import { thenable } from '../../utils/promise/thenable'; import { UNSUPPORTED_MATCHER_TYPE } from '../../utils/labels'; import { CONTROL } from '../../utils/constants'; -import { SplitIO } from '../../types'; +import SplitIO from '../../../types/splitio'; import { IEvaluation, IEvaluator, ISplitEvaluator } from '../types'; import { ENGINE_COMBINER_IFELSEIF, ENGINE_COMBINER_IFELSEIF_NO_TREATMENT, ERROR_ENGINE_COMBINER_IFELSEIF } from '../../logger/constants'; diff --git a/src/evaluator/condition/index.ts b/src/evaluator/condition/index.ts index 64b42e5f..7ffaef79 100644 --- a/src/evaluator/condition/index.ts +++ b/src/evaluator/condition/index.ts @@ -3,7 +3,7 @@ import { thenable } from '../../utils/promise/thenable'; import { NOT_IN_SPLIT } from '../../utils/labels'; import { MaybeThenable } from '../../dtos/types'; import { IEvaluation, IEvaluator, ISplitEvaluator } from '../types'; -import { SplitIO } from '../../types'; +import SplitIO from '../../../types/splitio'; import { ILogger } from '../../logger/types'; // Build Evaluation object if and only if matchingResult is true diff --git a/src/evaluator/index.ts b/src/evaluator/index.ts index 73527d42..c0576019 100644 --- a/src/evaluator/index.ts +++ b/src/evaluator/index.ts @@ -5,9 +5,9 @@ import { CONTROL } from '../utils/constants'; import { ISplit, MaybeThenable } from '../dtos/types'; import { IStorageAsync, IStorageSync } from '../storages/types'; import { IEvaluationResult } from './types'; -import { SplitIO } from '../types'; +import SplitIO from '../../types/splitio'; import { ILogger } from '../logger/types'; -import { ISet, setToArray, returnSetsUnion, _Set } from '../utils/lang/sets'; +import { returnSetsUnion, setToArray } from '../utils/lang/sets'; import { WARN_FLAGSET_WITHOUT_FLAGS } from '../logger/constants'; const treatmentException = { @@ -97,12 +97,12 @@ export function evaluateFeaturesByFlagSets( storage: IStorageSync | IStorageAsync, method: string, ): MaybeThenable> { - let storedFlagNames: MaybeThenable[]>; + let storedFlagNames: MaybeThenable[]>; function evaluate( - featureFlagsByFlagSets: ISet[], + featureFlagsByFlagSets: Set[], ) { - let featureFlags = new _Set(); + let featureFlags = new Set(); for (let i = 0; i < flagSets.length; i++) { const featureFlagByFlagSet = featureFlagsByFlagSets[i]; if (featureFlagByFlagSet.size) { diff --git a/src/evaluator/matchers/__tests__/segment/client_side.spec.ts b/src/evaluator/matchers/__tests__/segment/client_side.spec.ts index 3bace2ca..5e192829 100644 --- a/src/evaluator/matchers/__tests__/segment/client_side.spec.ts +++ b/src/evaluator/matchers/__tests__/segment/client_side.spec.ts @@ -32,3 +32,28 @@ test('MATCHER IN_SEGMENT / should return true ONLY when the segment is defined i expect(await matcherTrue()).toBe(true); // segment found in mySegments list expect(await matcherFalse()).toBe(false); // segment not found in mySegments list }); + +test('MATCHER IN_LARGE_SEGMENT / should return true ONLY when the segment is defined inside the segment storage', async function () { + const segment = 'employees'; + + const matcherTrue = matcherFactory(loggerMock, { + type: matcherTypes.IN_LARGE_SEGMENT, + value: segment + } as IMatcherDto, { + largeSegments: { + isInSegment(segmentName) { + return segment === segmentName; + } + } + } as IStorageSync) as IMatcher; + + const matcherFalse = matcherFactory(loggerMock, { + type: matcherTypes.IN_LARGE_SEGMENT, + value: segment + } as IMatcherDto, { + largeSegments: undefined + } as IStorageSync) as IMatcher; + + expect(await matcherTrue()).toBe(true); // large segment found in mySegments list + expect(await matcherFalse()).toBe(false); // large segment storage is not defined +}); diff --git a/src/evaluator/matchers/index.ts b/src/evaluator/matchers/index.ts index b110fc17..d50c38dd 100644 --- a/src/evaluator/matchers/index.ts +++ b/src/evaluator/matchers/index.ts @@ -1,5 +1,6 @@ import { allMatcherContext } from './all'; import { segmentMatcherContext } from './segment'; +import { largeSegmentMatcherContext } from './large_segment'; import { whitelistMatcherContext } from './whitelist'; import { equalToMatcherContext } from './eq'; import { greaterThanEqualMatcherContext } from './gte'; @@ -48,6 +49,7 @@ const matchers = [ lessThanEqualToSemverMatcherContext, // LESS_THAN_OR_EQUAL_TO_SEMVER: 20 betweenSemverMatcherContext, // BETWEEN_SEMVER: 21 inListSemverMatcherContext, // IN_LIST_SEMVER: 22 + largeSegmentMatcherContext, // IN_LARGE_SEGMENT: 23 ]; /** diff --git a/src/evaluator/matchers/large_segment.ts b/src/evaluator/matchers/large_segment.ts new file mode 100644 index 00000000..408fd5da --- /dev/null +++ b/src/evaluator/matchers/large_segment.ts @@ -0,0 +1,18 @@ +import { MaybeThenable } from '../../dtos/types'; +import { ISegmentsCacheBase } from '../../storages/types'; +import { thenable } from '../../utils/promise/thenable'; + +export function largeSegmentMatcherContext(largeSegmentName: string, storage: { largeSegments?: ISegmentsCacheBase }) { + + return function largeSegmentMatcher(key: string): MaybeThenable { + const isInLargeSegment = storage.largeSegments ? storage.largeSegments.isInSegment(largeSegmentName, key) : false; + + if (thenable(isInLargeSegment)) { + isInLargeSegment.then(result => { + return result; + }); + } + + return isInLargeSegment; + }; +} diff --git a/src/evaluator/matchers/matcherTypes.ts b/src/evaluator/matchers/matcherTypes.ts index 469c7a43..f09d50bf 100644 --- a/src/evaluator/matchers/matcherTypes.ts +++ b/src/evaluator/matchers/matcherTypes.ts @@ -22,6 +22,7 @@ export const matcherTypes: Record = { LESS_THAN_OR_EQUAL_TO_SEMVER: 20, BETWEEN_SEMVER: 21, IN_LIST_SEMVER: 22, + IN_LARGE_SEGMENT: 23, }; export const matcherDataTypes = { diff --git a/src/evaluator/matchers/semver_inlist.ts b/src/evaluator/matchers/semver_inlist.ts index 6d09f7ba..c21b10d8 100644 --- a/src/evaluator/matchers/semver_inlist.ts +++ b/src/evaluator/matchers/semver_inlist.ts @@ -1,11 +1,10 @@ -import { _Set } from '../../utils/lang/sets'; import { Semver } from '../../utils/Semver'; export function inListSemverMatcherContext(ruleAttr: string[]) { // @TODO ruleAttr validation should be done at the `parser` or `matchersTransform` level to reuse for all matchers if (!ruleAttr || ruleAttr.length === 0) throw new Error('whitelistMatcherData is required for IN_LIST_SEMVER matcher type'); - const listOfSemvers = new _Set(ruleAttr.map((version) => new Semver(version).version)); + const listOfSemvers = new Set(ruleAttr.map((version) => new Semver(version).version)); return function inListSemverMatcher(runtimeAttr: string): boolean { const runtimeSemver = new Semver(runtimeAttr).version; diff --git a/src/evaluator/matchers/whitelist.ts b/src/evaluator/matchers/whitelist.ts index 082772ae..309b1540 100644 --- a/src/evaluator/matchers/whitelist.ts +++ b/src/evaluator/matchers/whitelist.ts @@ -1,7 +1,5 @@ -import { _Set } from '../../utils/lang/sets'; - export function whitelistMatcherContext(ruleAttr: string[]) { - const whitelistSet = new _Set(ruleAttr); + const whitelistSet = new Set(ruleAttr); return function whitelistMatcher(runtimeAttr: string): boolean { const isInWhitelist = whitelistSet.has(runtimeAttr); diff --git a/src/evaluator/matchersTransform/index.ts b/src/evaluator/matchersTransform/index.ts index 23c4d538..a5be15e3 100644 --- a/src/evaluator/matchersTransform/index.ts +++ b/src/evaluator/matchersTransform/index.ts @@ -4,7 +4,7 @@ import { segmentTransform } from './segment'; import { whitelistTransform } from './whitelist'; import { numericTransform } from './unaryNumeric'; import { zeroSinceHH, zeroSinceSS } from '../convertions'; -import { IBetweenMatcherData, IInSegmentMatcherData, ISplitMatcher, IUnaryNumericMatcherData } from '../../dtos/types'; +import { IBetweenMatcherData, IInLargeSegmentMatcherData, IInSegmentMatcherData, ISplitMatcher, IUnaryNumericMatcherData } from '../../dtos/types'; import { IMatcherDto } from '../types'; /** @@ -18,6 +18,7 @@ export function matchersTransform(matchers: ISplitMatcher[]): IMatcherDto[] { negate, keySelector, userDefinedSegmentMatcherData, + userDefinedLargeSegmentMatcherData, whitelistMatcherData, /* whitelistObject, provided by 'WHITELIST', 'IN_LIST_SEMVER', set and string matchers */ unaryNumericMatcherData, betweenMatcherData, @@ -35,6 +36,8 @@ export function matchersTransform(matchers: ISplitMatcher[]): IMatcherDto[] { if (type === matcherTypes.IN_SEGMENT) { value = segmentTransform(userDefinedSegmentMatcherData as IInSegmentMatcherData); + } else if (type === matcherTypes.IN_LARGE_SEGMENT) { + value = segmentTransform(userDefinedLargeSegmentMatcherData as IInLargeSegmentMatcherData); } else if (type === matcherTypes.EQUAL_TO) { value = numericTransform(unaryNumericMatcherData as IUnaryNumericMatcherData); dataType = matcherDataTypes.NUMBER; diff --git a/src/evaluator/matchersTransform/segment.ts b/src/evaluator/matchersTransform/segment.ts index 00674cf2..c53264dc 100644 --- a/src/evaluator/matchersTransform/segment.ts +++ b/src/evaluator/matchersTransform/segment.ts @@ -1,8 +1,10 @@ -import { IInSegmentMatcherData } from '../../dtos/types'; +import { IInSegmentMatcherData, IInLargeSegmentMatcherData } from '../../dtos/types'; /** * Extract segment name as a plain string. */ -export function segmentTransform(segment?: IInSegmentMatcherData) { - return segment ? segment.segmentName : undefined; +export function segmentTransform(segment?: IInSegmentMatcherData | IInLargeSegmentMatcherData) { + return segment ? + (segment as IInSegmentMatcherData).segmentName || (segment as IInLargeSegmentMatcherData).largeSegmentName : + undefined; } diff --git a/src/evaluator/parser/index.ts b/src/evaluator/parser/index.ts index d124ad41..a398aa0b 100644 --- a/src/evaluator/parser/index.ts +++ b/src/evaluator/parser/index.ts @@ -9,7 +9,7 @@ import { thenable } from '../../utils/promise/thenable'; import { IEvaluator, IMatcherDto, ISplitEvaluator } from '../types'; import { ISplitCondition, MaybeThenable } from '../../dtos/types'; import { IStorageAsync, IStorageSync } from '../../storages/types'; -import { SplitIO } from '../../types'; +import SplitIO from '../../../types/splitio'; import { ILogger } from '../../logger/types'; import { ENGINE_MATCHER_ERROR, ENGINE_MATCHER_RESULT } from '../../logger/constants'; diff --git a/src/evaluator/types.ts b/src/evaluator/types.ts index a34c33c7..92e3446d 100644 --- a/src/evaluator/types.ts +++ b/src/evaluator/types.ts @@ -1,6 +1,6 @@ import { IBetweenMatcherData, IBetweenStringMatcherData, IDependencyMatcherData, MaybeThenable } from '../dtos/types'; import { IStorageAsync, IStorageSync } from '../storages/types'; -import { SplitIO } from '../types'; +import SplitIO from '../../types/splitio'; import { ILogger } from '../logger/types'; export interface IDependencyMatcherValue { diff --git a/src/evaluator/value/index.ts b/src/evaluator/value/index.ts index c564a68f..95b4000c 100644 --- a/src/evaluator/value/index.ts +++ b/src/evaluator/value/index.ts @@ -1,4 +1,4 @@ -import { SplitIO } from '../../types'; +import SplitIO from '../../../types/splitio'; import { IMatcherDto } from '../types'; import { ILogger } from '../../logger/types'; import { sanitize } from './sanitize'; diff --git a/src/evaluator/value/sanitize.ts b/src/evaluator/value/sanitize.ts index d12de8ed..9fbf74f7 100644 --- a/src/evaluator/value/sanitize.ts +++ b/src/evaluator/value/sanitize.ts @@ -1,4 +1,4 @@ -import { SplitIO } from '../../types'; +import SplitIO from '../../../types/splitio'; import { IDependencyMatcherValue } from '../types'; import { ILogger } from '../../logger/types'; import { isObject, uniq, toString, toNumber } from '../../utils/lang'; diff --git a/src/integrations/__tests__/browser.spec.ts b/src/integrations/__tests__/browser.spec.ts deleted file mode 100644 index 40316654..00000000 --- a/src/integrations/__tests__/browser.spec.ts +++ /dev/null @@ -1,98 +0,0 @@ -import { GOOGLE_ANALYTICS_TO_SPLIT, SPLIT_TO_GOOGLE_ANALYTICS } from '../../utils/constants/browser'; -import { SPLIT_IMPRESSION, SPLIT_EVENT } from '../../utils/constants'; -import { IIntegrationManager } from '../types'; -import { loggerMock } from '../../logger/__tests__/sdkLogger.mock'; - -// Mock integration modules (GaToSplit and SplitToGa). - -jest.mock('../ga/GaToSplit'); -import { GaToSplit as GaToSplitMock } from '../ga/GaToSplit'; -jest.mock('../ga/SplitToGa'); -import { SplitToGa as SplitToGaMock } from '../ga/SplitToGa'; - -const SplitToGaQueueMethod = jest.fn(); -(SplitToGaMock as unknown as jest.Mock).mockImplementation(() => { - return { - queue: SplitToGaQueueMethod - }; -}); - - -const fakeParams = { - storage: 'fakeStorage', - settings: { - core: 'fakeCore', - log: loggerMock - } -}; - -function clearMocks() { - (GaToSplitMock as jest.Mock).mockClear(); - (SplitToGaMock as unknown as jest.Mock).mockClear(); - SplitToGaQueueMethod.mockClear(); -} - -// Test target -import { integrationsManagerFactory as browserIMF } from '../browser'; -import { BrowserIntegration } from '../ga/types'; - -describe('IntegrationsManagerFactory for browser', () => { - - test('API', () => { - expect(typeof browserIMF).toBe('function'); // The module should return a function which acts as a factory. - - // @ts-expect-error - const instance1 = browserIMF([]); - expect(instance1).toBe(undefined); // The instance should be undefined if settings.integrations does not contain integrations that register a listener. - - let integrations: BrowserIntegration[] = [{ type: GOOGLE_ANALYTICS_TO_SPLIT }, { type: SPLIT_TO_GOOGLE_ANALYTICS }]; - const instance2 = browserIMF(integrations, fakeParams as any) as IIntegrationManager; - expect(GaToSplitMock).toBeCalledTimes(1); // GaToSplit invoked once - expect(SplitToGaMock).toBeCalledTimes(1); // SplitToGa invoked once - expect(typeof instance2.handleImpression).toBe('function'); // The instance should implement the handleImpression method if settings.integrations has items that register a listener. - expect(typeof instance2.handleEvent).toBe('function'); // The instance should implement the handleEvent method if settings.integrations has items that register a listener. - - clearMocks(); - - integrations = [{ type: GOOGLE_ANALYTICS_TO_SPLIT }, { type: SPLIT_TO_GOOGLE_ANALYTICS }, { type: GOOGLE_ANALYTICS_TO_SPLIT }, { type: SPLIT_TO_GOOGLE_ANALYTICS }, { type: SPLIT_TO_GOOGLE_ANALYTICS }]; - browserIMF(integrations, fakeParams as any); - expect(GaToSplitMock).toBeCalledTimes(2); // GaToSplit invoked twice - expect(SplitToGaMock).toBeCalledTimes(3); // SplitToGa invoked thrice - - clearMocks(); - }); - - test('Interaction with GaToSplit integration module', () => { - const integrations: BrowserIntegration[] = [{ - type: 'GOOGLE_ANALYTICS_TO_SPLIT', - prefix: 'some-prefix' - }]; - browserIMF(integrations, fakeParams as any); - - expect((GaToSplitMock as jest.Mock).mock.calls).toEqual([[integrations[0], fakeParams]]); // Invokes GaToSplit integration module with options, storage and core settings - - clearMocks(); - }); - - test('Interaction with SplitToGa integration module', () => { - const integrations: BrowserIntegration[] = [{ - type: 'SPLIT_TO_GOOGLE_ANALYTICS', - events: true - }]; - const instance = browserIMF(integrations, fakeParams as any); - - expect((SplitToGaMock as unknown as jest.Mock).mock.calls).toEqual([[fakeParams.settings.log, integrations[0]]]); // Invokes SplitToGa integration module with options - - const fakeImpression = 'fake'; // @ts-expect-error - instance.handleImpression(fakeImpression); - expect(SplitToGaQueueMethod.mock.calls).toEqual([[{ payload: fakeImpression, type: SPLIT_IMPRESSION }]]); // Invokes SplitToGa.queue method with tracked impression - - clearMocks(); - - const fakeEvent = 'fake'; // @ts-expect-error - instance.handleEvent(fakeEvent); - expect(SplitToGaQueueMethod.mock.calls).toEqual([[{ payload: fakeEvent, type: SPLIT_EVENT }]]); // Invokes SplitToGa.queue method with tracked event - - clearMocks(); - }); -}); diff --git a/src/integrations/browser.ts b/src/integrations/browser.ts deleted file mode 100644 index d4ad8de8..00000000 --- a/src/integrations/browser.ts +++ /dev/null @@ -1,35 +0,0 @@ -import { GOOGLE_ANALYTICS_TO_SPLIT, SPLIT_TO_GOOGLE_ANALYTICS } from '../utils/constants/browser'; -import { IIntegration, IIntegrationManager, IIntegrationFactoryParams } from './types'; -import { BrowserIntegration } from './ga/types'; -import { pluggableIntegrationsManagerFactory } from './pluggable'; -import { GoogleAnalyticsToSplit } from './ga/GoogleAnalyticsToSplit'; -import { SplitToGoogleAnalytics } from './ga/SplitToGoogleAnalytics'; - -/** - * IntegrationsManager factory for the browser variant of the isomorphic JS SDK. - * The integrations manager instantiates integration modules, and bypass tracked events and impressions to them. - * - * @param integrations valid integration settings object for browser sdk - * @param params information of the Sdk factory instance that integrations can access to - * - * @returns integration manager or undefined if `integrations` are not present in settings. - */ -export function integrationsManagerFactory( - integrations: BrowserIntegration[], - params: IIntegrationFactoryParams -): IIntegrationManager | undefined { - - // maps integration config items into integration factories to reuse the pluggable integration manager - const integrationFactories: Array<(params: IIntegrationFactoryParams) => IIntegration | void> = integrations - .map(integrationOptions => { - switch (integrationOptions.type) { - case GOOGLE_ANALYTICS_TO_SPLIT: return GoogleAnalyticsToSplit(integrationOptions); - case SPLIT_TO_GOOGLE_ANALYTICS: return SplitToGoogleAnalytics(integrationOptions); - } - }) - .filter(integrationFactory => { - return integrationFactory && typeof integrationFactory === 'function'; - }); - - return pluggableIntegrationsManagerFactory(integrationFactories, params); -} diff --git a/src/integrations/ga/GaToSplit.ts b/src/integrations/ga/GaToSplit.ts deleted file mode 100644 index a996625f..00000000 --- a/src/integrations/ga/GaToSplit.ts +++ /dev/null @@ -1,299 +0,0 @@ -import { objectAssign } from '../../utils/lang/objectAssign'; -import { isString, isFiniteNumber, uniqAsStrings } from '../../utils/lang'; -import { - validateEvent, - validateEventValue, - validateEventProperties, - validateKey, - validateTrafficType -} from '../../utils/inputValidation'; -import { SplitIO } from '../../types'; -import { Identity, GoogleAnalyticsToSplitOptions } from './types'; -import { ILogger } from '../../logger/types'; -import { IIntegrationFactoryParams } from '../types'; -import { ITelemetryTracker } from '../../trackers/types'; - -const logPrefix = 'ga-to-split: '; -const logNameMapper = 'ga-to-split:mapper'; - -/** - * Provides a plugin to use with analytics.js, accounting for the possibility - * that the global command queue has been renamed or not yet defined. - * @param window Reference to global object. - * @param pluginName The plugin name identifier. - * @param pluginConstructor The plugin constructor function. - * @param log Logger instance. - * @param autoRequire If true, log error when auto-require script is not detected - */ -function providePlugin(window: any, pluginName: string, pluginConstructor: Function, log: ILogger, autoRequire: boolean, telemetryTracker?: ITelemetryTracker) { - // get reference to global command queue. Init it if not defined yet. - const gaAlias = window.GoogleAnalyticsObject || 'ga'; - window[gaAlias] = window[gaAlias] || function () { - (window[gaAlias].q = window[gaAlias].q || []).push(arguments); - }; - - // provides the plugin for use with analytics.js. - window[gaAlias]('provide', pluginName, pluginConstructor); - - const hasAutoRequire = window[gaAlias].q && window[gaAlias].q.push !== [].push; - if (autoRequire && !hasAutoRequire) { // Expecting spy on ga.q push method but not found - log.error(logPrefix + 'integration is configured to autorequire the splitTracker plugin, but the necessary script does not seem to have run. Please check the docs.'); - } - if (telemetryTracker && hasAutoRequire) { - telemetryTracker.addTag('integration:ga-autorequire'); - } -} - -// Default mapping: object used for building the default mapper from hits to Split events -const defaultMapping = { - eventTypeId: { - event: 'eventAction', - social: 'socialAction', - }, - eventValue: { - event: 'eventValue', - timing: 'timingValue', - }, - eventProperties: { - pageview: ['page'], - screenview: ['screenName'], - event: ['eventCategory', 'eventLabel'], - social: ['socialNetwork', 'socialTarget'], - timing: ['timingCategory', 'timingVar', 'timingLabel'], - exception: ['exDescription', 'exFatal'], - } -}; - -/** - * Build a mapper function based on a mapping object - * - * @param {object} mapping - */ -function mapperBuilder(mapping: typeof defaultMapping) { - return function (model: UniversalAnalytics.Model): SplitIO.EventData { - const hitType: string = model.get('hitType'); - // @ts-expect-error - const eventTypeId = model.get(mapping.eventTypeId[hitType] || 'hitType'); - // @ts-expect-error - const value = model.get(mapping.eventValue[hitType]); - - const properties: Record = {}; // @ts-expect-error - const fields: string[] = mapping.eventProperties[hitType]; - if (fields) { - for (let i = 0; i < fields.length; i++) { - const fieldValue = model.get(fields[i]); - if (fieldValue !== undefined) properties[fields[i]] = fieldValue; - } - } - - return { - eventTypeId, - value, - properties, - timestamp: Date.now(), - }; - }; -} - -// exposed for unit testing purposses. -export const defaultMapper = mapperBuilder(defaultMapping); - -export const defaultPrefix = 'ga'; - -/** - * Return a new list of identities removing invalid and duplicated ones. - * - * @param {Array} identities list of identities - * @returns list of valid and unique identities. The list might be empty if `identities` is not an array or all its elements are invalid. - */ -export function validateIdentities(identities?: Identity[]) { - if (!Array.isArray(identities)) - return []; - - // Remove duplicated identities - const uniqueIdentities = uniqAsStrings(identities); - - // Filter based on rum-agent identities validator - return uniqueIdentities.filter(identity => { - if (!identity) return false; - - const maybeKey = identity.key; - const maybeTT = identity.trafficType; - - if (!isString(maybeKey) && !isFiniteNumber(maybeKey)) - return false; - if (!isString(maybeTT)) - return false; - - return true; - }); -} - -/** - * Checks if EventData fields (except EventTypeId) are valid, and logs corresponding warnings. - * EventTypeId is validated separately. - * - * @param {EventData} data event data instance to validate. Precondition: data != undefined - * @returns {boolean} Whether the data instance is a valid EventData or not. - */ -export function validateEventData(log: ILogger, eventData: any): eventData is SplitIO.EventData { - if (!validateEvent(log, eventData.eventTypeId, logNameMapper)) - return false; - - if (validateEventValue(log, eventData.value, logNameMapper) === false) - return false; - - const { properties } = validateEventProperties(log, eventData.properties, logNameMapper); - if (properties === false) - return false; - - if (eventData.timestamp && !isFiniteNumber(eventData.timestamp)) - return false; - - if (eventData.key && validateKey(log, eventData.key, logNameMapper) === false) - return false; - - if (eventData.trafficTypeName && validateTrafficType(log, eventData.trafficTypeName, logNameMapper) === false) - return false; - - return true; -} - -const INVALID_PREFIX_REGEX = /^[^a-zA-Z0-9]+/; -const INVALID_SUBSTRING_REGEX = /[^-_.:a-zA-Z0-9]+/g; -/** - * Fixes the passed string value to comply with EventTypeId format, by removing invalid characters and truncating if necessary. - * - * @param {object} log factory logger - * @param {string} eventTypeId string value to fix. - * @returns {string} Fixed version of `eventTypeId`. - */ -export function fixEventTypeId(log: ILogger, eventTypeId: any) { - // return the input eventTypeId if it cannot be fixed - if (!isString(eventTypeId) || eventTypeId.length === 0) { - return eventTypeId; - } - - // replace invalid substrings and truncate - const fixed = eventTypeId - .replace(INVALID_PREFIX_REGEX, '') - .replace(INVALID_SUBSTRING_REGEX, '_'); - const truncated = fixed.slice(0, 80); - if (truncated.length < fixed.length) log.warn(logPrefix + 'EventTypeId was truncated because it cannot be more than 80 characters long.'); - return truncated; -} - -/** - * GaToSplit integration. - * This function provides the SplitTracker plugin to ga command queue. - * - * @param {object} sdkOptions options passed at the SDK integrations settings (isomorphic SDK) or the GoogleAnalyticsToSplit plugin (pluggable browser SDK) - * @param {object} storage SDK storage passed to track events - * @param {object} coreSettings core settings used to define an identity if no one provided as SDK or plugin options - * @param {object} log factory logger - */ -export function GaToSplit(sdkOptions: GoogleAnalyticsToSplitOptions, params: IIntegrationFactoryParams) { - - const { storage, settings: { core: coreSettings, log }, telemetryTracker } = params; - - const defaultOptions = { - prefix: defaultPrefix, - // We set default identities if key and TT are present in settings.core - identities: (coreSettings.key && coreSettings.trafficType) ? - [{ key: coreSettings.key, trafficType: coreSettings.trafficType }] : - undefined - }; - - class SplitTracker { - - private tracker: UniversalAnalytics.Tracker; - - // Constructor for the SplitTracker plugin. - constructor(tracker: UniversalAnalytics.Tracker, pluginOptions: GoogleAnalyticsToSplitOptions) { - - // precedence of options: SDK options (config.integrations) overwrite pluginOptions (`ga('require', 'splitTracker', pluginOptions)`) - const opts = objectAssign({}, defaultOptions, sdkOptions, pluginOptions) as GoogleAnalyticsToSplitOptions & { identities: Identity[] }; - - this.tracker = tracker; - - // Validate identities - const validIdentities = validateIdentities(opts.identities); - - if (validIdentities.length === 0) { - log.warn(logPrefix + 'No valid identities were provided. Please check that you are passing a valid list of identities or providing a traffic type at the SDK configuration.'); - return; - } - - const invalids = validIdentities.length - opts.identities.length; - if (invalids) { - log.warn(logPrefix + `${invalids} identities were discarded because they are invalid or duplicated. Identities must be an array of objects with key and trafficType.`); - } - opts.identities = validIdentities; - - // Validate prefix - if (!isString(opts.prefix)) { - log.warn(logPrefix + 'The provided `prefix` was ignored since it is invalid. Please check that you are passing a string object as `prefix`.'); - opts.prefix = undefined; - } - - // Overwrite sendHitTask to perform plugin tasks: - // 1) filter hits - // 2) map hits to Split events - // 3) handle events, i.e., validate and send them to Split BE - const originalSendHitTask = tracker.get('sendHitTask'); - tracker.set('sendHitTask', function (model: UniversalAnalytics.Model) { - originalSendHitTask(model); - - // filter hit if `hits` flag is false or if it comes from Split-to-GA integration - if (opts.hits === false || model.get('splitHit')) return; - try { - if (opts.filter && !opts.filter(model)) return; - } catch (err) { - log.warn(logPrefix + `custom filter threw: ${err}`); - return; - } - - // map hit into an EventData instance - let eventData: SplitIO.EventData = defaultMapper(model); - if (opts.mapper) { - try { - eventData = opts.mapper(model, eventData as SplitIO.EventData); - } catch (err) { - log.warn(logPrefix + `custom mapper threw: ${err}`); - return; - } - if (!eventData) - return; - } - - // Add prefix. Nothing is appended if the prefix is falsy, e.g. undefined or ''. - if (opts.prefix) eventData.eventTypeId = `${opts.prefix}.${eventData.eventTypeId}`; - - eventData.eventTypeId = fixEventTypeId(log, eventData.eventTypeId); - - if (!validateEventData(log, eventData)) - return; - - // Store the event - if (eventData.key && eventData.trafficTypeName) { - storage.events.track(eventData); - } else { // Store the event for each Key-TT pair (identities), if key and TT is not present in eventData - opts.identities.forEach(identity => { - const event = objectAssign({ - key: identity.key, - trafficTypeName: identity.trafficType, - }, eventData); - storage.events.track(event); - }); - } - }); - - log.info(logPrefix + 'integration started'); - } - - } - - // Register the plugin, even if config is invalid, since, if not provided, it will block `ga` command queue. - // eslint-disable-next-line no-undef - providePlugin(window, 'splitTracker', SplitTracker, log, sdkOptions.autoRequire === true, telemetryTracker); -} diff --git a/src/integrations/ga/GoogleAnalyticsToSplit.ts b/src/integrations/ga/GoogleAnalyticsToSplit.ts deleted file mode 100644 index b6463bb2..00000000 --- a/src/integrations/ga/GoogleAnalyticsToSplit.ts +++ /dev/null @@ -1,14 +0,0 @@ -import { IIntegrationFactoryParams, IntegrationFactory } from '../types'; -import { GaToSplit } from './GaToSplit'; -import { GoogleAnalyticsToSplitOptions } from './types'; - -export function GoogleAnalyticsToSplit(options: GoogleAnalyticsToSplitOptions = {}): IntegrationFactory { - - // GaToSplit integration factory - function GoogleAnalyticsToSplitFactory(params: IIntegrationFactoryParams) { - return GaToSplit(options, params); - } - - GoogleAnalyticsToSplitFactory.type = 'GOOGLE_ANALYTICS_TO_SPLIT'; - return GoogleAnalyticsToSplitFactory; -} diff --git a/src/integrations/ga/SplitToGa.ts b/src/integrations/ga/SplitToGa.ts deleted file mode 100644 index dd469676..00000000 --- a/src/integrations/ga/SplitToGa.ts +++ /dev/null @@ -1,135 +0,0 @@ -/* eslint-disable no-undef */ -import { uniq } from '../../utils/lang'; -import { SPLIT_IMPRESSION, SPLIT_EVENT } from '../../utils/constants'; -import { SplitIO } from '../../types'; -import { IIntegration } from '../types'; -import { SplitToGoogleAnalyticsOptions } from './types'; -import { ILogger } from '../../logger/types'; - -const logPrefix = 'split-to-ga: '; -const noGaWarning = '`ga` command queue not found.'; -const noHit = 'No hit was sent.'; - -export class SplitToGa implements IIntegration { - - // A falsy object represents the default tracker - static defaultTrackerNames = ['']; - - private trackerNames: string[]; - private filter?: (data: SplitIO.IntegrationData) => boolean; - private mapper?: (data: SplitIO.IntegrationData, defaultMapping: UniversalAnalytics.FieldsObject) => UniversalAnalytics.FieldsObject; - private impressions: boolean | undefined; - private events: boolean | undefined; - private log: ILogger; - - // Default mapper function. - static defaultMapper({ type, payload }: SplitIO.IntegrationData): UniversalAnalytics.FieldsObject { - switch (type) { - case SPLIT_IMPRESSION: - return { - hitType: 'event', - eventCategory: 'split-impression', - eventAction: 'Evaluate ' + (payload as SplitIO.ImpressionData).impression.feature, - eventLabel: 'Treatment: ' + (payload as SplitIO.ImpressionData).impression.treatment + '. Targeting rule: ' + (payload as SplitIO.ImpressionData).impression.label + '.', - nonInteraction: true, - }; - case SPLIT_EVENT: - return { - hitType: 'event', - eventCategory: 'split-event', - eventAction: (payload as SplitIO.EventData).eventTypeId, - eventValue: (payload as SplitIO.EventData).value, - nonInteraction: true, - }; - } - } - - // Util to access ga command queue, accounting for the possibility that it has been renamed. - static getGa(): UniversalAnalytics.ga | undefined { // @ts-expect-error - return typeof window !== 'undefined' ? window[window['GoogleAnalyticsObject'] || 'ga'] : undefined; - } - - /** - * Validates if a given object is a UniversalAnalytics.FieldsObject instance, and logs a warning if not. - * It checks that the object contains a `hitType`, since it is the minimal field required to send the hit - * and avoid the GA error `No hit type specified. Aborting hit.`. - * Other validations (e.g., an `event` hitType must have a `eventCategory` and `eventAction`) are handled - * and logged (as warnings or errors depending the case) by GA debugger, but the hit is sent anyway. - * - * @param {object} log factory logger - * @param {UniversalAnalytics.FieldsObject} fieldsObject object to validate. - * @returns {boolean} Whether the data instance is a valid FieldsObject or not. - */ - static validateFieldsObject(log: ILogger, fieldsObject: any): fieldsObject is UniversalAnalytics.FieldsObject { - if (fieldsObject && fieldsObject.hitType) return true; - - log.warn(logPrefix + 'your custom mapper returned an invalid FieldsObject instance. It must be an object with at least a `hitType` field.'); - return false; - } - - /** - * constructor description - * @param {object} options options passed at the SDK integrations settings (isomorphic SDK) or the SplitToGoogleAnalytics plugin (pluggable browser SDK) - */ - constructor(log: ILogger, options: SplitToGoogleAnalyticsOptions) { - - this.trackerNames = SplitToGa.defaultTrackerNames; - this.log = log; - - if (options) { - if (typeof options.filter === 'function') this.filter = options.filter; - if (typeof options.mapper === 'function') this.mapper = options.mapper; - // We strip off duplicated values if we received a `trackerNames` param. - // We don't warn if a tracker does not exist, since the user might create it after the SDK is initialized. - // Note: GA allows to create and get trackers using a string or number as tracker name, and does nothing if other types are used. - if (Array.isArray(options.trackerNames)) this.trackerNames = uniq(options.trackerNames); - - // No need to validate `impressions` and `events` flags. Any other value than `false` is ignored (considered true by default). - this.impressions = options.impressions; - this.events = options.events; - } - - log.info(logPrefix + 'integration started'); - if (typeof SplitToGa.getGa() !== 'function') log.warn(logPrefix + `${noGaWarning} No hits will be sent until it is available.`); - } - - queue(data: SplitIO.IntegrationData) { - // access ga command queue via `getGa` method, accounting for the possibility that - // the global `ga` reference was not yet mutated by analytics.js. - const ga = SplitToGa.getGa(); - if (ga) { - - if (this.impressions === false && data.type === SPLIT_IMPRESSION) return; - if (this.events === false && data.type === SPLIT_EVENT) return; - - let fieldsObject: UniversalAnalytics.FieldsObject & { splitHit?: boolean }; - try { // only try/catch filter and mapper, which might be defined by the user - // filter - if (this.filter && !this.filter(data)) return; - - // map data into a FieldsObject instance - fieldsObject = SplitToGa.defaultMapper(data); - if (this.mapper) { - fieldsObject = this.mapper(data, fieldsObject); - // don't send the hit if it is falsy or invalid - if (!fieldsObject || !SplitToGa.validateFieldsObject(this.log, fieldsObject)) return; - } - } catch (err) { - this.log.warn(logPrefix + `queue method threw: ${err}. ${noHit}`); - return; - } - - // send the hit - this.trackerNames.forEach(trackerName => { - const sendCommand = trackerName ? `${trackerName}.send` : 'send'; - // mark the hit as a Split one to avoid the loop. - fieldsObject.splitHit = true; - // Send to GA using our reference to the GA object. - ga(sendCommand, fieldsObject); - }); - } else { - this.log.warn(logPrefix + `${noGaWarning} ${noHit}`); - } - } - -} diff --git a/src/integrations/ga/SplitToGoogleAnalytics.ts b/src/integrations/ga/SplitToGoogleAnalytics.ts deleted file mode 100644 index 101df26f..00000000 --- a/src/integrations/ga/SplitToGoogleAnalytics.ts +++ /dev/null @@ -1,14 +0,0 @@ -import { IIntegrationFactoryParams, IntegrationFactory } from '../types'; -import { SplitToGa } from './SplitToGa'; -import { SplitToGoogleAnalyticsOptions } from './types'; - -export function SplitToGoogleAnalytics(options: SplitToGoogleAnalyticsOptions = {}): IntegrationFactory { - - // SplitToGa integration factory - function SplitToGoogleAnalyticsFactory(params: IIntegrationFactoryParams) { - return new SplitToGa(params.settings.log, options); - } - - SplitToGoogleAnalyticsFactory.type = 'SPLIT_TO_GOOGLE_ANALYTICS'; - return SplitToGoogleAnalyticsFactory; -} diff --git a/src/integrations/ga/__tests__/GaToSplit.spec.ts b/src/integrations/ga/__tests__/GaToSplit.spec.ts deleted file mode 100644 index 1417c6a1..00000000 --- a/src/integrations/ga/__tests__/GaToSplit.spec.ts +++ /dev/null @@ -1,295 +0,0 @@ -/* eslint-disable no-undef */ -import { IEventsCacheSync } from '../../../storages/types'; -import { SplitIO, ISettings } from '../../../types'; -import { GaToSplit, validateIdentities, defaultPrefix, defaultMapper, validateEventData, fixEventTypeId } from '../GaToSplit'; -import { gaMock, gaRemove, modelMock } from './gaMock'; -import { loggerMock } from '../../../logger/__tests__/sdkLogger.mock'; - -const hitSample: UniversalAnalytics.FieldsObject = { - hitType: 'pageview', - page: '/path', -}; -const modelSample = modelMock(hitSample); -const expectedDefaultEvent = { - eventTypeId: 'pageview', - value: undefined, - properties: { page: hitSample.page }, - timestamp: 0, -}; - -test('validateIdentities', () => { - expect(validateIdentities(undefined)).toEqual([]); // @ts-expect-error - expect(validateIdentities(null)).toEqual([]); // @ts-expect-error - expect(validateIdentities(123)).toEqual([]); // @ts-expect-error - expect(validateIdentities(true)).toEqual([]); // @ts-expect-error - expect(validateIdentities('something')).toEqual([]); // @ts-expect-error - expect(validateIdentities({})).toEqual([]); // @ts-expect-error - expect(validateIdentities(/asd/ig)).toEqual([]); // @ts-expect-error - expect(validateIdentities(function () { })).toEqual([]); - - expect(validateIdentities([])).toEqual([]); // @ts-expect-error - expect(validateIdentities([undefined, /asd/ig, function () { }])).toEqual([]); - expect(validateIdentities([{ - key: 'key', trafficType: 'user' // First occurence of this item - }, { // @ts-expect-error - key: 'key', trafficType: function () { } // Invalid item (invalid TT) - }, { - key: 'keyu', trafficType: 'ser' // First occurence of this item - }, { // @ts-expect-error - key: true, trafficType: 'user' // Invalid item (invalid key) - }, { - key: 'key2', trafficType: 'user2' // First occurence of this item - }, { // @ts-expect-error - key: 12, trafficType: 'user' // First occurence of this item - }, { - key: 'key', trafficType: 'user' // Duplicated item - }, // @ts-expect-error - {} // Invalid item (undefined key and traffic type) - ])).toEqual([{ - key: 'key', trafficType: 'user' - }, { - key: 'keyu', trafficType: 'ser' - }, { - key: 'key2', trafficType: 'user2' - }, { - key: 12, trafficType: 'user' - }]); -}); - -test('validateEventData', () => { - expect(() => { validateEventData(loggerMock, undefined); }).toThrow(); // throws exception if passed object is undefined - expect(() => { validateEventData(loggerMock, null); }).toThrow(); // throws exception if passed object is null - - expect(validateEventData(loggerMock, {})).toBe(false); // event must have a valid eventTypeId - expect(validateEventData(loggerMock, { eventTypeId: 'type' })).toBe(true); // event must have a valid eventTypeId - expect(validateEventData(loggerMock, { eventTypeId: 123 })).toBe(false); // event must have a valid eventTypeId - - expect(validateEventData(loggerMock, { eventTypeId: 'type', value: 'value' })).toBe(false); // event must have a valid value if present - expect(validateEventData(loggerMock, { eventTypeId: 'type', value: 0 })).toBe(true); // event must have a valid value if present - - expect(validateEventData(loggerMock, { eventTypeId: 'type', properties: ['prop1'] })).toBe(false); // event must have valid properties if present - expect(validateEventData(loggerMock, { eventTypeId: 'type', properties: { prop1: 'prop1' } })).toBe(true); // event must have valid properties if present - - expect(validateEventData(loggerMock, { eventTypeId: 'type', timestamp: true })).toBe(false); // event must have a valid timestamp if present - expect(validateEventData(loggerMock, { eventTypeId: 'type', timestamp: Date.now() })).toBe(true); // event must have a valid timestamp if present - - expect(validateEventData(loggerMock, { eventTypeId: 'type', key: true })).toBe(false); // event must have a valid key if present - expect(validateEventData(loggerMock, { eventTypeId: 'type', key: 'key' })).toBe(true); // event must have a valid key if present - - expect(validateEventData(loggerMock, { eventTypeId: 'type', trafficTypeName: true })).toBe(false); // event must have a valid trafficTypeName if present - expect(validateEventData(loggerMock, { eventTypeId: 'type', trafficTypeName: 'tt' })).toBe(true); // event must have a valid trafficTypeName if present -}); - -test('fixEventTypeId', () => { - expect(fixEventTypeId(loggerMock, undefined)).toBe(undefined); - expect(fixEventTypeId(loggerMock, 111)).toBe(111); - expect(fixEventTypeId(loggerMock, '')).toBe(''); - expect(fixEventTypeId(loggerMock, '()')).toBe(''); - expect(fixEventTypeId(loggerMock, '()+_')).toBe(''); - expect(fixEventTypeId(loggerMock, ' some event ')).toBe('some_event_'); - expect(fixEventTypeId(loggerMock, ' -*- some -.%^ event =+ ')).toBe('some_-._event_'); -}); - -test('defaultMapper', () => { - const initTimestamp = Date.now(); - const defaultEvent = defaultMapper(modelSample); - - expect(defaultEvent.eventTypeId).toBe(expectedDefaultEvent.eventTypeId); // should return the corresponding default event instance for a given pageview hit - expect(defaultEvent.value).toBe(expectedDefaultEvent.value); - expect(defaultEvent.properties).toEqual(expectedDefaultEvent.properties); - expect(initTimestamp <= defaultEvent.timestamp && defaultEvent.timestamp <= Date.now()).toBe(true); -}); - -const coreSettings = { - authorizationKey: 'sdkkey', - key: 'key', - trafficType: 'user', -} as ISettings['core']; -const fakeStorage = { - // @ts-expect-error - events: { - track: jest.fn() - } as IEventsCacheSync -}; -const fakeParams = { - storage: fakeStorage, - settings: { core: coreSettings, log: loggerMock } -}; - -// Returns a new event by copying defaultEvent -function customMapper(model: UniversalAnalytics.Model, defaultEvent: SplitIO.EventData) { - return { ...defaultEvent, properties: { ...defaultEvent.properties, someProp: 'someProp' } }; -} -// Updates defaultEvent -function customMapper2(model: UniversalAnalytics.Model, defaultEvent: SplitIO.EventData) { - // @ts-ignore. The defaultEvent has a property value, that might be empty depending on the hitType - defaultEvent.properties['someProp2'] = 'someProp2'; - return defaultEvent; -} -// Updates defaultEvent adding a `key` and `TT`, to assert that `identities` plugin param is ignored. -function customMapper3(model: UniversalAnalytics.Model, defaultEvent: SplitIO.EventData) { - defaultEvent.key = 'someKey'; - defaultEvent.trafficTypeName = 'someTT'; - return defaultEvent; -} -function customFilter() { - return true; -} -const customIdentities = [{ key: 'key2', trafficType: 'tt2' }]; - -test('GaToSplit', () => { - - // test setup - const { ga, tracker } = gaMock(); - - // provide SplitTracker plugin - GaToSplit({}, fakeParams as any); - // @ts-expect-error - let [arg1, arg2, SplitTracker] = ga.mock.calls.pop() as [string, string, any]; - expect([arg1, arg2]).toEqual(['provide', 'splitTracker']); - expect(typeof SplitTracker === 'function').toBe(true); - - /** Default behavior */ - - // init plugin on default tracker. equivalent to calling `ga('require', 'splitTracker')` - new SplitTracker(tracker); - - // send hit and assert that it was properly tracked as a Split event - window.ga('send', hitSample); - let event = (fakeStorage.events.track as jest.Mock).mock.calls.pop()[0]; - expect(event).toEqual( - { - ...expectedDefaultEvent, - eventTypeId: defaultPrefix + '.' + expectedDefaultEvent.eventTypeId, - key: coreSettings.key, - trafficTypeName: coreSettings.trafficType, - timestamp: event.timestamp, - }); // should track an event using the default mapper and key and traffic type from the SDK config - - /** Custom behavior: plugin options */ - - // init plugin with custom options - new SplitTracker(tracker, { mapper: customMapper, filter: customFilter, identities: customIdentities, prefix: '' }); - - // send hit and assert that it was properly tracked as a Split event - window.ga('send', hitSample); - event = (fakeStorage.events.track as jest.Mock).mock.calls.pop()[0]; - expect(event).toEqual( - { - ...customMapper(modelSample, defaultMapper(modelSample)), - key: customIdentities[0].key, - trafficTypeName: customIdentities[0].trafficType, - timestamp: event.timestamp, - }); // should track an event using a custom mapper and identity from the plugin options - - /** Custom behavior: SDK options */ - - // provide a new SplitTracker plugin with custom SDK options - GaToSplit({ - mapper: customMapper2, filter: customFilter, identities: customIdentities, prefix: '' - }, fakeParams as any); - // @ts-expect-error - [arg1, arg2, SplitTracker] = ga.mock.calls.pop(); - expect([arg1, arg2]).toEqual(['provide', 'splitTracker']); - expect(typeof SplitTracker === 'function').toBe(true); - - // init plugin - new SplitTracker(tracker); - - // send hit and assert that it was properly tracked as a Split event - window.ga('send', hitSample); - event = (fakeStorage.events.track as jest.Mock).mock.calls.pop()[0]; - expect(event).toEqual( - { - ...customMapper2(modelSample, defaultMapper(modelSample)), - key: customIdentities[0].key, - trafficTypeName: customIdentities[0].trafficType, - timestamp: event.timestamp, - }); // should track the event using a custom mapper and identity from the SDK options - - /** Custom behavior: SDK options, including a customMapper that set events key and traffic type */ - - // provide a new SplitTracker plugin with custom SDK options - GaToSplit({ - mapper: customMapper3, filter: customFilter, identities: customIdentities, prefix: '' - }, fakeParams as any); - // @ts-ignore - [arg1, arg2, SplitTracker] = ga.mock.calls.pop(); - expect([arg1, arg2]).toEqual(['provide', 'splitTracker']); - expect(typeof SplitTracker === 'function').toBe(true); - - // init plugin - new SplitTracker(tracker); - - // send hit and assert that it was properly tracked as a Split event - window.ga('send', hitSample); - event = (fakeStorage.events.track as jest.Mock).mock.calls.pop()[0]; - expect(event).toEqual( - { - ...customMapper3(modelSample, defaultMapper(modelSample)), - timestamp: event.timestamp, - }); // should track the event using a custom mapper and identity from the SDK options - - // test teardown - gaRemove(); -}); - -test('GaToSplit: `hits` flag param', () => { - - // test setup - const { ga, tracker } = gaMock(); - GaToSplit({}, fakeParams as any); // @ts-expect-error - let SplitTracker: any = ga.mock.calls.pop()[2]; - - // init plugin with custom options - new SplitTracker(tracker, { hits: false }); - - // send hit and assert that it was not tracked as a Split event - (fakeStorage.events.track as jest.Mock).mockClear(); - window.ga('send', hitSample); - expect(fakeStorage.events.track).toBeCalledTimes(0); - - // test teardown - gaRemove(); -}); - -test('GaToSplit: `autoRequire` script and flag param', () => { - // test setup - gaMock(); - loggerMock.error.mockClear(); - - // Create commands before autoRequire script is executed - window.ga('create', 'UA-ID-X', 'auto', 'tX'); - - GaToSplit({ autoRequire: true }, fakeParams as any); - expect(loggerMock.error).toBeCalledTimes(1); - - window.ga('create', 'UA-ID-Y', 'auto', 'tY'); - - // Run autoRequire iife - require('../autoRequire.js'); - - GaToSplit({ autoRequire: true }, fakeParams as any); - expect(loggerMock.error).toBeCalledTimes(1); - - // Assert auto-require script - window.ga('create', 'UA-ID-0'); - window.ga('create', 'UA-ID-1', 'auto', 't1'); - window.ga('create', 'UA-ID-2', { name: 't2' }); - window.ga('create', 'UA-ID-3', 'auto', { name: 't3' }); - window.ga('create', { trackingId: 'UA-ID-4', name: 't4' }); - - expect(window.ga.q.map(args => args[0])).toEqual([ - 'create' /* tX */, 'provide', - 'create' /* tY */, 'tX.require', - 'tY.require', 'provide', - 'create' /* default */, 'require', - 'create' /* t1 */, 't1.require', - 'create' /* t2 */, 't2.require', - 'create' /* t3 */, 't3.require', - 'create' /* t4 */, 't4.require', - ]); - - // test teardown - gaRemove(); -}); diff --git a/src/integrations/ga/__tests__/SplitToGa.spec.ts b/src/integrations/ga/__tests__/SplitToGa.spec.ts deleted file mode 100644 index d05e4bab..00000000 --- a/src/integrations/ga/__tests__/SplitToGa.spec.ts +++ /dev/null @@ -1,195 +0,0 @@ -import { SplitIO } from '../../../types'; -import { SPLIT_IMPRESSION, SPLIT_EVENT } from '../../../utils/constants'; - -// Mocks -import { loggerMock } from '../../../logger/__tests__/sdkLogger.mock'; -import { gaMock, gaRemove } from './gaMock'; - -// Test target -import { SplitToGa } from '../SplitToGa'; - -const fakeImpressionPayload: SplitIO.ImpressionData = { - impression: { - feature: 'hierarchical_splits_test', - keyName: 'nicolas@split.io', - treatment: 'on', - bucketingKey: undefined, - label: 'expected label', - time: 2000, - changeNumber: 1000, - }, - attributes: undefined, - ip: 'ip', - hostname: 'hostname', - sdkLanguageVersion: 'version', -}; -const fakeImpression: SplitIO.IntegrationData = { - type: SPLIT_IMPRESSION, - payload: fakeImpressionPayload, -}; -const defaultImpressionFieldsObject: UniversalAnalytics.FieldsObject = { - hitType: 'event', - eventCategory: 'split-impression', - eventAction: 'Evaluate ' + fakeImpressionPayload.impression.feature, - eventLabel: 'Treatment: ' + fakeImpressionPayload.impression.treatment + '. Targeting rule: ' + fakeImpressionPayload.impression.label + '.', - nonInteraction: true -}; - -const fakeEventPayload: SplitIO.EventData = { - eventTypeId: 'eventTypeId', - trafficTypeName: 'trafficTypeName', - value: 0, - timestamp: Date.now(), - key: 'key', - properties: {}, -}; -const fakeEvent: SplitIO.IntegrationData = { - type: SPLIT_EVENT, - payload: fakeEventPayload, -}; -const defaultEventFieldsObject = { - hitType: 'event', - eventCategory: 'split-event', - eventAction: fakeEventPayload.eventTypeId, - eventValue: fakeEventPayload.value, - nonInteraction: true -}; - -describe('SplitToGa', () => { - - test('SplitToGa.validateFieldsObject', () => { - expect(SplitToGa.validateFieldsObject(loggerMock, undefined)).toBe(false); - expect(SplitToGa.validateFieldsObject(loggerMock, null)).toBe(false); - expect(SplitToGa.validateFieldsObject(loggerMock, 123)).toBe(false); - expect(SplitToGa.validateFieldsObject(loggerMock, true)).toBe(false); - expect(SplitToGa.validateFieldsObject(loggerMock, 'something')).toBe(false); - expect(SplitToGa.validateFieldsObject(loggerMock, /asd/ig)).toBe(false); - expect(SplitToGa.validateFieldsObject(loggerMock, function () { })).toBe(false); - - expect(SplitToGa.validateFieldsObject(loggerMock, {})).toBe(false); // An empty object is an invalid FieldsObject instance - expect(SplitToGa.validateFieldsObject(loggerMock, { hitType: 10 })).toBe(true); // A fields object instance must have a HitType - expect(SplitToGa.validateFieldsObject(loggerMock, { hitType: 'event', ignoredProp: 'ignoredProp' })).toBe(true); // A fields object instance must have a HitType - }); - - test('SplitToGa.defaultMapper', () => { - // should return the corresponding FieldsObject for a given impression - expect(SplitToGa.defaultMapper(fakeImpression)).toEqual(defaultImpressionFieldsObject); - // should return the corresponding FieldsObject for a given event - expect(SplitToGa.defaultMapper(fakeEvent)).toEqual(defaultEventFieldsObject); - }); - - test('SplitToGa.getGa', () => { - loggerMock.mockClear(); - - const { ga } = gaMock(); - expect(SplitToGa.getGa()).toBe(ga); // should return ga command queue if it exists - - let integration = new SplitToGa(loggerMock, {}); - expect(typeof integration).toBe('object'); - expect(loggerMock.warn).not.toBeCalled(); - - gaRemove(); - expect(SplitToGa.getGa()).toBe(undefined); // should return undefined if ga command queue does not exist - - integration = new SplitToGa(loggerMock, {}); - expect(typeof integration).toBe('object'); // SplitToGa instances should be created even if ga command queue does not exist - // @ts-expect-error - integration.queue('fake-data'); - expect(loggerMock.warn.mock.calls).toEqual([ // Warn when creating and queueing while ga command queue does not exist - ['split-to-ga: `ga` command queue not found. No hits will be sent until it is available.'], - ['split-to-ga: `ga` command queue not found. No hit was sent.'] - ]); - }); - - test('SplitToGa (constructor and queue method)', () => { - - // test setup - const { ga } = gaMock(); - - /** Default behaviour **/ - const instance = new SplitToGa(loggerMock, {}) as SplitToGa; - instance.queue(fakeImpression); - // should queue `ga send` with the default mapped FieldsObject for impressions, appended with `splitHit` field - expect(ga).lastCalledWith('send', { ...defaultImpressionFieldsObject, splitHit: true }); - - instance.queue(fakeEvent); - // should queue `ga send` with the default mapped FieldsObject for events, appended with `splitHit` field - expect(ga).lastCalledWith('send', { ...defaultEventFieldsObject, splitHit: true }); - - expect(ga).toBeCalledTimes(2); - - /** Custom behaviour **/ - // Custom filter - function customFilter(data: SplitIO.IntegrationData) { - return data.type === SPLIT_EVENT; - } - // Custom mapper that returns a new FieldsObject instance - function customMapper() { - return { - hitType: 'event', - someField: 'someField', - } as UniversalAnalytics.FieldsObject; - } - const trackerNames = ['', 'namedTracker']; - const instance2 = new SplitToGa(loggerMock, { - filter: customFilter, - mapper: customMapper, - trackerNames, - }) as SplitToGa; - ga.mockClear(); - instance2.queue(fakeImpression); - expect(ga).not.toBeCalled(); // shouldn't queue `ga send` if a Split data (impression or event) is filtered - - instance2.queue(fakeEvent); - expect(ga.mock.calls).toEqual([ - ['send', { ...customMapper(), splitHit: true }], - [`${trackerNames[1]}.send`, { ...customMapper(), splitHit: true }] - ]); // should queue `ga send` with the custom trackerName and FieldsObject from customMapper, appended with `splitHit` field - - expect(ga).toBeCalledTimes(2); - - // Custom mapper that returns the default FieldsObject - function customMapper2(data: SplitIO.IntegrationData, defaultFieldsObject: UniversalAnalytics.FieldsObject) { - return defaultFieldsObject; - } - const instance3 = new SplitToGa(loggerMock, { - mapper: customMapper2, - }) as SplitToGa; - ga.mockClear(); - instance3.queue(fakeImpression); - // should queue `ga send` with the custom FieldsObject from customMapper2, appended with `splitHit` field - expect(ga).lastCalledWith('send', { ...customMapper2(fakeImpression, defaultImpressionFieldsObject), splitHit: true }); - - expect(ga).toBeCalledTimes(1); - - // Custom mapper that throws an error - function customMapper3() { - throw 'some error'; - } - const instance4 = new SplitToGa(loggerMock, { // @ts-expect-error - mapper: customMapper3, - }) as SplitToGa; - ga.mockClear(); - instance4.queue(fakeImpression); - expect(ga).not.toBeCalled(); // shouldn't queue `ga send` if a custom mapper throw an exception - - // `impressions` flags - const instance5 = new SplitToGa(loggerMock, { - impressions: false, - }) as SplitToGa; - ga.mockClear(); - instance5.queue(fakeImpression); - expect(ga).not.toBeCalled(); // shouldn't queue `ga send` for an impression if `impressions` flag is false - - // `impressions` flags - const instance6 = new SplitToGa(loggerMock, { - events: false, - }) as SplitToGa; - ga.mockClear(); - instance6.queue(fakeEvent); - expect(ga).not.toBeCalled(); // shouldn't queue `ga send` for a event if `events` flag is false - - // test teardown - gaRemove(); - }); -}); diff --git a/src/integrations/ga/__tests__/gaMock.ts b/src/integrations/ga/__tests__/gaMock.ts deleted file mode 100644 index 2a72863d..00000000 --- a/src/integrations/ga/__tests__/gaMock.ts +++ /dev/null @@ -1,60 +0,0 @@ -export function modelMock(fieldsObject: UniversalAnalytics.FieldsObject) { - return { - get(fieldName: string) { - return fieldsObject[fieldName as keyof UniversalAnalytics.FieldsObject]; - }, - set(fieldNameOrObject: string | {}, fieldValue?: any) { - if (typeof fieldNameOrObject === 'object') - fieldsObject = { ...fieldsObject, ...fieldNameOrObject }; - else - fieldsObject[fieldNameOrObject as keyof UniversalAnalytics.FieldsObject] = fieldValue; - } - }; -} - -export function gaMock() { - - const __originalSendHitTask = jest.fn(); - const __tasks: Record = { - sendHitTask: __originalSendHitTask - }; - const ga = jest.fn(function (command) { // @ts-ignore - (ga.q = ga.q || []).push(arguments); - - if (command === 'send') { - const fieldsObject = arguments[1]; - __tasks.sendHitTask(modelMock(fieldsObject)); - } - }); - - const set = jest.fn(function (taskName, taskFunc) { - __tasks[taskName] = taskFunc; - }); - const get = jest.fn(function (taskName) { - return __tasks[taskName]; - }); - - // Add ga to window object - if (typeof window === 'undefined') { // @ts-expect-error - if (global) global.window = {}; - } // @ts-expect-error - // eslint-disable-next-line no-undef - window['GoogleAnalyticsObject'] = 'ga'; - // eslint-disable-next-line no-undef - window['ga'] = window['ga'] || ga; - - return { - ga, - tracker: { - get, - set, - __originalSendHitTask, - } - }; -} - -export function gaRemove() { - if (typeof window !== 'undefined') // @ts-expect-error - // eslint-disable-next-line no-undef - window[window['GoogleAnalyticsObject'] || 'ga'] = undefined; -} diff --git a/src/integrations/ga/autoRequire.js b/src/integrations/ga/autoRequire.js deleted file mode 100644 index a6adad72..00000000 --- a/src/integrations/ga/autoRequire.js +++ /dev/null @@ -1,33 +0,0 @@ -/* eslint-disable no-undef */ -/** - * Auto-require script to use with GoogleAnalyticsToSplit integration - */ -(function (w, g, o) { - w[o] = w[o] || g; - w[g] = w[g] || function () { w[g].q.push(arguments); }; - w[g].q = w[g].q || []; - - var trackerNames = {}; - function name(arg) { return typeof arg === 'object' && typeof arg.name === 'string' && arg.name; } - - function processCommand(command) { // Queue a `require` command if v is a `create` command - if (command && command[0] === 'create') { - var trackerName = name(command[1]) || name(command[2]) || name(command[3]) || (typeof command[3] === 'string' ? command[3] : undefined); // Get tracker name - - if (!trackerNames[trackerName]) { - trackerNames[trackerName] = true; - w[g]((trackerName ? trackerName + '.' : '') + 'require', 'splitTracker'); // Auto-require - } - } - } - - w[g].q.forEach(processCommand); // Process already queued commands - - var originalPush = w[g].q.push; - w[g].q.push = function (command) { // Spy new queued commands - var result = originalPush.apply(this, arguments); - processCommand(command); - return result; - }; - -})(window, 'ga', 'GoogleAnalyticsObject'); diff --git a/src/integrations/ga/types.ts b/src/integrations/ga/types.ts deleted file mode 100644 index dfa5f11e..00000000 --- a/src/integrations/ga/types.ts +++ /dev/null @@ -1,153 +0,0 @@ -import { SplitIO } from '../../types'; - -/** - * A pair of user key and it's trafficType, required for tracking valid Split events. - * @typedef {Object} Identity - * @property {string} key The user key. - * @property {string} trafficType The key traffic type. - */ -export type Identity = { - key: string; - trafficType: string; -}; - -/** - * Options for GoogleAnalyticsToSplit integration plugin - */ -export interface GoogleAnalyticsToSplitOptions { - /** - * Optional flag to filter GA hits from being tracked as Split events. - * @property {boolean} hits - * @default true - */ - hits?: boolean, - /** - * Optional predicate used to define a custom filter for tracking GA hits as Split events. - * For example, the following filter allows to track only 'event' hits: - * `(model) => model.get('hitType') === 'event'` - * By default, all hits are tracked as Split events. - */ - filter?: (model: UniversalAnalytics.Model) => boolean, - /** - * Optional function useful when you need to modify the Split event before tracking it. - * This function is invoked with two arguments: - * 1. the GA model object representing the hit. - * 2. the default format of the mapped Split event instance. - * The return value must be a Split event, that can be the second argument or a new object. - * - * For example, the following mapper adds a custom property to events: - * `(model, defaultMapping) => { - * defaultMapping.properties.someProperty = SOME_VALUE; - * return defaultMapping; - * }` - */ - mapper?: (model: UniversalAnalytics.Model, defaultMapping: SplitIO.EventData) => SplitIO.EventData, - /** - * Optional prefix for EventTypeId, to prevent any kind of data collision between events. - * @property {string} prefix - * @default 'ga' - */ - prefix?: string, - /** - * List of Split identities (key & traffic type pairs) used to track events. - * If not provided, events are sent using the key and traffic type provided at SDK config - */ - identities?: Identity[], - /** - * Optional flag to log an error if the `auto-require` script is not detected. - * The auto-require script automatically requires the `splitTracker` plugin for created trackers, - * and should be placed right after your Google Analytics, Google Tag Manager or gtag.js script tag. - * - * @see {@link https://help.split.io/hc/en-us/articles/360040838752#set-up-with-gtm-and-gtag.js} - * - * @property {boolean} autoRequire - * @default false - */ - autoRequire?: boolean, -} - -/** - * Enable 'Google Analytics to Split' integration, to track Google Analytics hits as Split events. - * Used by the browser variant of the isomorphic JS SDK. - * - * @see {@link https://help.split.io/hc/en-us/articles/360040838752#google-analytics-to-split} - */ -export interface IGoogleAnalyticsToSplitConfig extends GoogleAnalyticsToSplitOptions { - type: 'GOOGLE_ANALYTICS_TO_SPLIT' -} - -/** - * Options for SplitToGoogleAnalytics integration plugin - */ -export interface SplitToGoogleAnalyticsOptions { - /** - * Optional flag to filter Split impressions from being tracked as GA hits. - * @property {boolean} impressions - * @default true - */ - impressions?: boolean, - /** - * Optional flag to filter Split events from being tracked as GA hits. - * @property {boolean} events - * @default true - */ - events?: boolean, - /** - * Optional predicate used to define a custom filter for tracking Split data (events and impressions) as GA hits. - * For example, the following filter allows to track only impressions, equivalent to setting events to false: - * `(data) => data.type === 'IMPRESSION'` - */ - filter?: (data: SplitIO.IntegrationData) => boolean, - /** - * Optional function useful when you need to modify the GA hit before sending it. - * This function is invoked with two arguments: - * 1. the input data (Split event or impression). - * 2. the default format of the mapped FieldsObject instance (GA hit). - * The return value must be a FieldsObject, that can be the second argument or a new object. - * - * For example, the following mapper adds a custom dimension to hits: - * `(data, defaultMapping) => { - * defaultMapping.dimension1 = SOME_VALUE; - * return defaultMapping; - * }` - * - * Default FieldsObject instance for data.type === 'IMPRESSION': - * `{ - * hitType: 'event', - * eventCategory: 'split-impression', - * eventAction: 'Evaluate ' + data.payload.impression.feature, - * eventLabel: 'Treatment: ' + data.payload.impression.treatment + '. Targeting rule: ' + data.payload.impression.label + '.', - * nonInteraction: true, - * }` - * Default FieldsObject instance for data.type === 'EVENT': - * `{ - * hitType: 'event', - * eventCategory: 'split-event', - * eventAction: data.payload.eventTypeId, - * eventValue: data.payload.value, - * nonInteraction: true, - * }` - */ - mapper?: (data: SplitIO.IntegrationData, defaultMapping: UniversalAnalytics.FieldsObject) => UniversalAnalytics.FieldsObject, - /** - * List of tracker names to send the hit. An empty string represents the default tracker. - * If not provided, hits are only sent to default tracker. - */ - trackerNames?: string[], -} - -/** - * Enable 'Split to Google Analytics' integration, to track Split impressions and events as Google Analytics hits. - * Used by the browser variant of the isomorphic JS SDK. - * - * @see {@link https://help.split.io/hc/en-us/articles/360040838752#split-to-google-analytics} - */ -export interface ISplitToGoogleAnalyticsConfig extends SplitToGoogleAnalyticsOptions { - type: 'SPLIT_TO_GOOGLE_ANALYTICS' -} - -/** - * Available integration options for the browser - * Used by the browser variant of the isomorphic JS SDK. - */ -export type BrowserIntegration = ISplitToGoogleAnalyticsConfig | IGoogleAnalyticsToSplitConfig; diff --git a/src/integrations/pluggable.ts b/src/integrations/pluggable.ts index df4ccd21..b1b7a12f 100644 --- a/src/integrations/pluggable.ts +++ b/src/integrations/pluggable.ts @@ -1,13 +1,13 @@ import { SPLIT_IMPRESSION, SPLIT_EVENT } from '../utils/constants'; -import { SplitIO } from '../types'; +import SplitIO from '../../types/splitio'; import { IIntegration, IIntegrationManager, IIntegrationFactoryParams } from './types'; /** * Factory function for IntegrationsManager with pluggable integrations. * The integrations manager instantiates integration, and bypass tracked events and impressions to them. * - * @param integrations validated list of integration factories - * @param params information of the Sdk factory instance that integrations can access to + * @param integrations - validated list of integration factories + * @param params - information of the Sdk factory instance that integrations can access to * * @returns integration manager or undefined if `integrations` are not present in settings. */ diff --git a/src/integrations/types.ts b/src/integrations/types.ts index d2ac4fb0..b0059f26 100644 --- a/src/integrations/types.ts +++ b/src/integrations/types.ts @@ -1,6 +1,7 @@ import { IEventsCacheBase } from '../storages/types'; import { IEventsHandler, IImpressionsHandler, ITelemetryTracker } from '../trackers/types'; -import { ISettings, SplitIO } from '../types'; +import { ISettings } from '../types'; +import SplitIO from '../../types/splitio'; export interface IIntegration { queue(data: SplitIO.IntegrationData): void @@ -14,7 +15,7 @@ export interface IIntegrationFactoryParams { telemetryTracker: ITelemetryTracker } -export type IntegrationFactory = { +export type IntegrationFactory = SplitIO.IntegrationFactory & { readonly type: string (params: IIntegrationFactoryParams): IIntegration | void } diff --git a/src/listeners/browser.ts b/src/listeners/browser.ts index 409241cc..12f13b74 100644 --- a/src/listeners/browser.ts +++ b/src/listeners/browser.ts @@ -5,7 +5,8 @@ import { IRecorderCacheSync, IStorageSync } from '../storages/types'; import { fromImpressionsCollector } from '../sync/submitters/impressionsSubmitter'; import { fromImpressionCountsCollector } from '../sync/submitters/impressionCountsSubmitter'; import { IResponse, ISplitApi } from '../services/types'; -import { ImpressionDTO, ISettings } from '../types'; +import { ISettings } from '../types'; +import SplitIO from '../../types/splitio'; import { ImpressionsPayload } from '../sync/submitters/types'; import { OPTIMIZED, DEBUG, NONE } from '../utils/constants'; import { objectAssign } from '../utils/lang/objectAssign'; @@ -22,7 +23,7 @@ const EVENT_NAME = 'for visibilitychange and pagehide events.'; */ export class BrowserSignalListener implements ISignalListener { - private fromImpressionsCollector: (data: ImpressionDTO[]) => ImpressionsPayload; + private fromImpressionsCollector: (data: SplitIO.ImpressionDTO[]) => ImpressionsPayload; constructor( private syncManager: ISyncManager | undefined, @@ -115,7 +116,6 @@ export class BrowserSignalListener implements ISignalListener { * Returns true if beacon API was used successfully, false otherwise. */ private _sendBeacon(url: string, data: any, extraMetadata?: {}) { - // eslint-disable-next-line compat/compat if (typeof navigator !== 'undefined' && navigator.sendBeacon) { const json = { entries: data, @@ -130,7 +130,7 @@ export class BrowserSignalListener implements ISignalListener { const payload = JSON.stringify(json); // https://xgwang.me/posts/you-may-not-know-beacon/#it-may-throw-error%2C-be-sure-to-catch - try { // eslint-disable-next-line compat/compat + try { return navigator.sendBeacon(url, payload); } catch (e) { return false; diff --git a/src/logger/__tests__/index.spec.ts b/src/logger/__tests__/index.spec.ts index 20db51e9..3b6e7dec 100644 --- a/src/logger/__tests__/index.spec.ts +++ b/src/logger/__tests__/index.spec.ts @@ -1,5 +1,4 @@ -import { LogLevel } from '../../types'; -import { _Map } from '../../utils/lang/maps'; +import SplitIO from '../../../types/splitio'; import { Logger, LogLevels, isLogLevelString, _sprintf } from '../index'; // We'll set this only once. These are the constants we will use for @@ -37,9 +36,9 @@ test('SPLIT LOGGER / Logger class shape', () => { expect(typeof logger.setLogLevel).toBe('function'); // instance.setLogLevel should be a method. }); -const LOG_LEVELS_IN_ORDER = ['DEBUG', 'INFO', 'WARN', 'ERROR', 'NONE']; +const LOG_LEVELS_IN_ORDER: SplitIO.LogLevel[] = ['DEBUG', 'INFO', 'WARN', 'ERROR', 'NONE']; /* Utility function to avoid repeating too much code */ -function testLogLevels(levelToTest: LogLevel) { +function testLogLevels(levelToTest: SplitIO.LogLevel) { // Builds the expected message. const buildExpectedMessage = (lvl: string, category: string, msg: string, showLevel?: boolean) => { let res = ''; @@ -59,7 +58,7 @@ function testLogLevels(levelToTest: LogLevel) { const logMethod = levelToTest.toLowerCase(); const logCategory = `test-category-${logMethod}`; const instance = new Logger({ prefix: logCategory, showLevel }, - useCodes ? new _Map([[1, 'Test log for level %s with showLevel: %s %s']]) : undefined); + useCodes ? new Map([[1, 'Test log for level %s with showLevel: %s %s']]) : undefined); LOG_LEVELS_IN_ORDER.forEach((logLevel, i) => { const logMsg = `Test log for level ${levelToTest} with showLevel: ${showLevel} ${logLevelLogsCounter}`; diff --git a/src/logger/__tests__/sdkLogger.mock.ts b/src/logger/__tests__/sdkLogger.mock.ts index a2cc184f..b7c4aa25 100644 --- a/src/logger/__tests__/sdkLogger.mock.ts +++ b/src/logger/__tests__/sdkLogger.mock.ts @@ -1,4 +1,4 @@ -import { LogLevel } from '../../types'; +import SplitIO from '../../../types/splitio'; export const loggerMock = { warn: jest.fn(), @@ -16,6 +16,6 @@ export const loggerMock = { } }; -export function getLoggerLogLevel(logger: any): LogLevel | undefined { +export function getLoggerLogLevel(logger: any): SplitIO.LogLevel | undefined { if (logger) return logger.options.logLevel; } diff --git a/src/logger/browser/DebugLogger.ts b/src/logger/browser/DebugLogger.ts index 105e1890..354a497b 100644 --- a/src/logger/browser/DebugLogger.ts +++ b/src/logger/browser/DebugLogger.ts @@ -1,7 +1,6 @@ import { Logger } from '../index'; import { codesDebug } from '../messages/debug'; -import { _Map } from '../../utils/lang/maps'; export function DebugLogger() { - return new Logger({ logLevel: 'DEBUG' }, new _Map(codesDebug)); + return new Logger({ logLevel: 'DEBUG' }, new Map(codesDebug)); } diff --git a/src/logger/browser/ErrorLogger.ts b/src/logger/browser/ErrorLogger.ts index f0702d89..4a685237 100644 --- a/src/logger/browser/ErrorLogger.ts +++ b/src/logger/browser/ErrorLogger.ts @@ -1,7 +1,6 @@ import { Logger } from '../index'; import { codesError } from '../messages/error'; -import { _Map } from '../../utils/lang/maps'; export function ErrorLogger() { - return new Logger({ logLevel: 'ERROR' }, new _Map(codesError)); + return new Logger({ logLevel: 'ERROR' }, new Map(codesError)); } diff --git a/src/logger/browser/InfoLogger.ts b/src/logger/browser/InfoLogger.ts index bdf9be75..a57d1cf2 100644 --- a/src/logger/browser/InfoLogger.ts +++ b/src/logger/browser/InfoLogger.ts @@ -1,7 +1,6 @@ import { Logger } from '../index'; import { codesInfo } from '../messages/info'; -import { _Map } from '../../utils/lang/maps'; export function InfoLogger() { - return new Logger({ logLevel: 'INFO' }, new _Map(codesInfo)); + return new Logger({ logLevel: 'INFO' }, new Map(codesInfo)); } diff --git a/src/logger/browser/WarnLogger.ts b/src/logger/browser/WarnLogger.ts index 8456d012..ebeb59ab 100644 --- a/src/logger/browser/WarnLogger.ts +++ b/src/logger/browser/WarnLogger.ts @@ -1,7 +1,6 @@ import { Logger } from '../index'; import { codesWarn } from '../messages/warn'; -import { _Map } from '../../utils/lang/maps'; export function WarnLogger() { - return new Logger({ logLevel: 'WARN' }, new _Map(codesWarn)); + return new Logger({ logLevel: 'WARN' }, new Map(codesWarn)); } diff --git a/src/logger/constants.ts b/src/logger/constants.ts index 36f6a139..520a5707 100644 --- a/src/logger/constants.ts +++ b/src/logger/constants.ts @@ -79,7 +79,7 @@ export const WARN_SPLITS_FILTER_IGNORED = 219; export const WARN_SPLITS_FILTER_INVALID = 220; export const WARN_SPLITS_FILTER_EMPTY = 221; export const WARN_SDK_KEY = 222; -export const STREAMING_PARSING_MY_SEGMENTS_UPDATE_V2 = 223; +export const STREAMING_PARSING_MEMBERSHIPS_UPDATE = 223; export const STREAMING_PARSING_SPLIT_UPDATE = 224; export const WARN_INVALID_FLAGSET = 225; export const WARN_LOWERCASE_FLAGSET = 226; @@ -109,7 +109,6 @@ export const ERROR_EMPTY = 319; export const ERROR_EMPTY_ARRAY = 320; export const ERROR_INVALID_CONFIG_PARAM = 321; export const ERROR_HTTP = 322; -export const ERROR_LOCALHOST_MODULE_REQUIRED = 323; export const ERROR_STORAGE_INVALID = 324; export const ERROR_NOT_BOOLEAN = 325; export const ERROR_MIN_CONFIG_PARAM = 326; diff --git a/src/logger/index.ts b/src/logger/index.ts index 136b2e58..662e1f86 100644 --- a/src/logger/index.ts +++ b/src/logger/index.ts @@ -1,10 +1,9 @@ import { objectAssign } from '../utils/lang/objectAssign'; import { ILoggerOptions, ILogger } from './types'; import { find, isObject } from '../utils/lang'; -import { LogLevel } from '../types'; -import { IMap, _Map } from '../utils/lang/maps'; +import SplitIO from '../../types/splitio'; -export const LogLevels: { [level: string]: LogLevel } = { +export const LogLevels: SplitIO.ILoggerAPI['LogLevel'] = { DEBUG: 'DEBUG', INFO: 'INFO', WARN: 'WARN', @@ -20,7 +19,7 @@ const LogLevelIndexes = { NONE: 5 }; -export function isLogLevelString(str: string): str is LogLevel { +export function isLogLevelString(str: string): str is SplitIO.LogLevel { return !!find(LogLevels, (lvl: string) => str === lvl); } @@ -47,16 +46,16 @@ const defaultOptions = { export class Logger implements ILogger { private options: Required; - private codes: IMap; + private codes: Map; private logLevel: number; - constructor(options?: ILoggerOptions, codes?: IMap) { + constructor(options?: ILoggerOptions, codes?: Map) { this.options = objectAssign({}, defaultOptions, options); - this.codes = codes || new _Map(); + this.codes = codes || new Map(); this.logLevel = LogLevelIndexes[this.options.logLevel]; } - setLogLevel(logLevel: LogLevel) { + setLogLevel(logLevel: SplitIO.LogLevel) { this.options.logLevel = logLevel; this.logLevel = LogLevelIndexes[logLevel]; } @@ -77,7 +76,7 @@ export class Logger implements ILogger { if (this._shouldLog(LogLevelIndexes.ERROR)) this._log(LogLevels.ERROR, msg, args); } - private _log(level: LogLevel, msg: string | number, args?: any[]) { + private _log(level: SplitIO.LogLevel, msg: string | number, args?: any[]) { if (typeof msg === 'number') { const format = this.codes.get(msg); msg = format ? _sprintf(format, args) : `Message code ${msg}${args ? ', with args: ' + args.toString() : ''}`; @@ -90,7 +89,7 @@ export class Logger implements ILogger { console.log(formattedText); } - private _generateLogMessage(level: LogLevel, text: string) { + private _generateLogMessage(level: SplitIO.LogLevel, text: string) { const textPre = ' => '; let result = ''; diff --git a/src/logger/messages/error.ts b/src/logger/messages/error.ts index ca02daf0..2c0b0c63 100644 --- a/src/logger/messages/error.ts +++ b/src/logger/messages/error.ts @@ -32,7 +32,6 @@ export const codesError: [number, string][] = [ [c.ERROR_NOT_BOOLEAN, '%s: provided param must be a boolean value.'], // initialization / settings validation [c.ERROR_INVALID_CONFIG_PARAM, c.LOG_PREFIX_SETTINGS + ': you passed an invalid "%s" config param. It should be one of the following values: %s. Defaulting to "%s".'], - [c.ERROR_LOCALHOST_MODULE_REQUIRED, c.LOG_PREFIX_SETTINGS + ': an invalid value was received for "sync.localhostMode" config. A valid entity should be provided for localhost mode.'], [c.ERROR_STORAGE_INVALID, c.LOG_PREFIX_SETTINGS+': the provided storage is invalid.%s Falling back into default MEMORY storage'], [c.ERROR_MIN_CONFIG_PARAM, c.LOG_PREFIX_SETTINGS + ': the provided "%s" config param is lower than allowed. Setting to the minimum value %s seconds'], [c.ERROR_TOO_MANY_SETS, c.LOG_PREFIX_SETTINGS + ': the amount of flag sets provided are big causing uri length error.'], diff --git a/src/logger/messages/info.ts b/src/logger/messages/info.ts index 907c9fc7..94333bf6 100644 --- a/src/logger/messages/info.ts +++ b/src/logger/messages/info.ts @@ -23,7 +23,7 @@ export const codesInfo: [number, string][] = codesWarn.concat([ [c.POLLING_START, c.LOG_PREFIX_SYNC_POLLING + 'Starting polling'], [c.POLLING_STOP, c.LOG_PREFIX_SYNC_POLLING + 'Stopping polling'], [c.SYNC_SPLITS_FETCH_RETRY, c.LOG_PREFIX_SYNC_SPLITS + 'Retrying download of feature flags #%s. Reason: %s'], - [c.SUBMITTERS_PUSH_FULL_QUEUE, c.LOG_PREFIX_SYNC_SUBMITTERS + 'Flushing full %s queue and reseting timer.'], + [c.SUBMITTERS_PUSH_FULL_QUEUE, c.LOG_PREFIX_SYNC_SUBMITTERS + 'Flushing full %s queue and resetting timer.'], [c.SUBMITTERS_PUSH, c.LOG_PREFIX_SYNC_SUBMITTERS + 'Pushing %s.'], [c.STREAMING_REFRESH_TOKEN, c.LOG_PREFIX_SYNC_STREAMING + 'Refreshing streaming token in %s seconds, and connecting streaming in %s seconds.'], [c.STREAMING_RECONNECT, c.LOG_PREFIX_SYNC_STREAMING + 'Attempting to reconnect streaming in %s seconds.'], diff --git a/src/logger/messages/warn.ts b/src/logger/messages/warn.ts index 6bad73e8..52487f95 100644 --- a/src/logger/messages/warn.ts +++ b/src/logger/messages/warn.ts @@ -32,7 +32,7 @@ export const codesWarn: [number, string][] = codesError.concat([ [c.WARN_SPLITS_FILTER_EMPTY, c.LOG_PREFIX_SETTINGS + ': feature flag filter configuration must be a non-empty array of filter objects.'], [c.WARN_SDK_KEY, c.LOG_PREFIX_SETTINGS + ': You already have %s. We recommend keeping only one instance of the factory at all times (Singleton pattern) and reusing it throughout your application'], - [c.STREAMING_PARSING_MY_SEGMENTS_UPDATE_V2, c.LOG_PREFIX_SYNC_STREAMING + 'Fetching MySegments due to an error processing %s notification: %s'], + [c.STREAMING_PARSING_MEMBERSHIPS_UPDATE, c.LOG_PREFIX_SYNC_STREAMING + 'Fetching Memberships due to an error processing %s notification: %s'], [c.STREAMING_PARSING_SPLIT_UPDATE, c.LOG_PREFIX_SYNC_STREAMING + 'Fetching SplitChanges due to an error processing SPLIT_UPDATE notification: %s'], [c.WARN_INVALID_FLAGSET, '%s: you passed %s, flag set must adhere to the regular expressions %s. This means a flag set must start with a letter or number, be in lowercase, alphanumeric and have a max length of 50 characters. %s was discarded.'], [c.WARN_LOWERCASE_FLAGSET, '%s: flag set %s should be all lowercase - converting string to lowercase.'], diff --git a/src/logger/sdkLogger.ts b/src/logger/sdkLogger.ts index 41c3635a..42d9be42 100644 --- a/src/logger/sdkLogger.ts +++ b/src/logger/sdkLogger.ts @@ -1,14 +1,14 @@ import { LogLevels, isLogLevelString } from './index'; -import { ILoggerAPI } from '../types'; +import SplitIO from '../../types/splitio'; import { ILogger } from './types'; import { ERROR_LOGLEVEL_INVALID } from './constants'; /** * The public Logger utility API exposed via SplitFactory, used to update the log level. * - * @param log the factory logger instance to handle + * @param log - the factory logger instance to handle */ -export function createLoggerAPI(log: ILogger): ILoggerAPI { +export function createLoggerAPI(log: ILogger): SplitIO.ILoggerAPI { function setLogLevel(logLevel: string) { if (isLogLevelString(logLevel)) { @@ -27,7 +27,7 @@ export function createLoggerAPI(log: ILogger): ILoggerAPI { }, /** * Sets a custom log Level for the SDK. - * @param {string} logLevel - Custom LogLevel value. + * @param logLevel - Custom LogLevel value. */ setLogLevel, /** diff --git a/src/logger/types.ts b/src/logger/types.ts index 79ec1b07..2f05b3ba 100644 --- a/src/logger/types.ts +++ b/src/logger/types.ts @@ -1,23 +1,21 @@ -import { LogLevel } from '../types'; +import SplitIO from '../../types/splitio'; export interface ILoggerOptions { prefix?: string, - logLevel?: LogLevel, + logLevel?: SplitIO.LogLevel, showLevel?: boolean, // @TODO remove this param eventually since it is not being set `false` anymore } -export interface ILogger { - setLogLevel(logLevel: LogLevel): void +export interface ILogger extends SplitIO.ILogger { + debug(msg: any): void; + debug(msg: string | number, args?: any[]): void; - debug(msg: any): void - debug(msg: string | number, args?: any[]): void + info(msg: any): void; + info(msg: string | number, args?: any[]): void; - info(msg: any): void - info(msg: string | number, args?: any[]): void + warn(msg: any): void; + warn(msg: string | number, args?: any[]): void; - warn(msg: any): void - warn(msg: string | number, args?: any[]): void - - error(msg: any): void - error(msg: string | number, args?: any[]): void + error(msg: any): void; + error(msg: string | number, args?: any[]): void; } diff --git a/src/readiness/__tests__/readinessManager.spec.ts b/src/readiness/__tests__/readinessManager.spec.ts index ae8b9389..e59a3cba 100644 --- a/src/readiness/__tests__/readinessManager.spec.ts +++ b/src/readiness/__tests__/readinessManager.spec.ts @@ -2,8 +2,20 @@ import { readinessManagerFactory } from '../readinessManager'; import { EventEmitter } from '../../utils/MinEvents'; import { IReadinessManager } from '../types'; import { SDK_READY, SDK_UPDATE, SDK_SPLITS_ARRIVED, SDK_SEGMENTS_ARRIVED, SDK_READY_FROM_CACHE, SDK_SPLITS_CACHE_LOADED, SDK_READY_TIMED_OUT } from '../constants'; +import { ISettings } from '../../types'; + +const settings = { + startup: { + readyTimeout: 0, + } +} as unknown as ISettings; + +const settingsWithTimeout = { + startup: { + readyTimeout: 50 + } +} as unknown as ISettings; -const timeoutMs = 100; const statusFlagsCount = 7; function assertInitialStatus(readinessManager: IReadinessManager) { @@ -19,7 +31,7 @@ function assertInitialStatus(readinessManager: IReadinessManager) { test('READINESS MANAGER / Share splits but segments (without timeout enabled)', (done) => { expect.assertions(2 + statusFlagsCount * 2); - const readinessManager = readinessManagerFactory(EventEmitter); + const readinessManager = readinessManagerFactory(EventEmitter, settings); const readinessManager2 = readinessManager.shared(); assertInitialStatus(readinessManager); // all status flags must be false @@ -52,7 +64,7 @@ test('READINESS MANAGER / Share splits but segments (without timeout enabled)', }); test('READINESS MANAGER / Ready event should be fired once', () => { - const readinessManager = readinessManagerFactory(EventEmitter); + const readinessManager = readinessManagerFactory(EventEmitter, settings); let counter = 0; readinessManager.gate.on(SDK_READY, () => { @@ -71,7 +83,7 @@ test('READINESS MANAGER / Ready event should be fired once', () => { }); test('READINESS MANAGER / Ready from cache event should be fired once', (done) => { - const readinessManager = readinessManagerFactory(EventEmitter); + const readinessManager = readinessManagerFactory(EventEmitter, settings); let counter = 0; readinessManager.gate.on(SDK_READY_FROM_CACHE, () => { @@ -96,7 +108,7 @@ test('READINESS MANAGER / Ready from cache event should be fired once', (done) = }); test('READINESS MANAGER / Update event should be fired after the Ready event', () => { - const readinessManager = readinessManagerFactory(EventEmitter); + const readinessManager = readinessManagerFactory(EventEmitter, settings); let isReady = false; let counter = 0; @@ -123,7 +135,7 @@ test('READINESS MANAGER / Update event should be fired after the Ready event', ( test('READINESS MANAGER / Segment updates should not be propagated', (done) => { let updateCounter = 0; - const readinessManager = readinessManagerFactory(EventEmitter); + const readinessManager = readinessManagerFactory(EventEmitter, settings); const readinessManager2 = readinessManager.shared(); readinessManager2.gate.on(SDK_UPDATE, () => { @@ -145,13 +157,14 @@ test('READINESS MANAGER / Segment updates should not be propagated', (done) => { }); }); -describe('READINESS MANAGER / Timeout ready event', () => { +describe('READINESS MANAGER / Timeout event', () => { let readinessManager: IReadinessManager; let timeoutCounter: number; beforeEach(() => { // Schedule timeout to be fired before SDK_READY - readinessManager = readinessManagerFactory(EventEmitter, 10); + readinessManager = readinessManagerFactory(EventEmitter, settingsWithTimeout); + readinessManager.init(); // Start the timeout timeoutCounter = 0; readinessManager.gate.on(SDK_READY_TIMED_OUT, () => { @@ -163,7 +176,7 @@ describe('READINESS MANAGER / Timeout ready event', () => { setTimeout(() => { readinessManager.splits.emit(SDK_SPLITS_ARRIVED); readinessManager.segments.emit(SDK_SEGMENTS_ARRIVED); - }, 20); + }, settingsWithTimeout.startup.readyTimeout + 20); }); test('should be fired once', (done) => { @@ -199,7 +212,8 @@ test('READINESS MANAGER / Cancel timeout if ready fired', (done) => { let sdkReadyCalled = false; let sdkReadyTimedoutCalled = false; - const readinessManager = readinessManagerFactory(EventEmitter, timeoutMs); + const readinessManager = readinessManagerFactory(EventEmitter, settingsWithTimeout); + readinessManager.init(); // Start the timeout readinessManager.gate.on(SDK_READY_TIMED_OUT, () => { sdkReadyTimedoutCalled = true; }); readinessManager.gate.once(SDK_READY, () => { sdkReadyCalled = true; }); @@ -209,16 +223,16 @@ test('READINESS MANAGER / Cancel timeout if ready fired', (done) => { expect(sdkReadyTimedoutCalled).toBeFalsy(); expect(sdkReadyCalled).toBeTruthy(); done(); - }, timeoutMs * 3); + }, settingsWithTimeout.startup.readyTimeout * 3); setTimeout(() => { readinessManager.splits.emit(SDK_SPLITS_ARRIVED); readinessManager.segments.emit(SDK_SEGMENTS_ARRIVED); - }, timeoutMs * 0.8); + }, settingsWithTimeout.startup.readyTimeout * 0.8); }); test('READINESS MANAGER / Destroy after it was ready but before timedout', () => { - const readinessManager = readinessManagerFactory(EventEmitter, timeoutMs); + const readinessManager = readinessManagerFactory(EventEmitter, settingsWithTimeout); let counter = 0; @@ -253,7 +267,7 @@ test('READINESS MANAGER / Destroy after it was ready but before timedout', () => }); test('READINESS MANAGER / Destroy before it was ready and timedout', (done) => { - const readinessManager = readinessManagerFactory(EventEmitter, timeoutMs); + const readinessManager = readinessManagerFactory(EventEmitter, settingsWithTimeout); readinessManager.gate.on(SDK_READY, () => { throw new Error('SDK_READY should have not been emitted'); @@ -265,7 +279,7 @@ test('READINESS MANAGER / Destroy before it was ready and timedout', (done) => { setTimeout(() => { readinessManager.destroy(); // Destroy the gate, removing all the listeners and clearing the ready timeout. - }, timeoutMs * 0.5); + }, settingsWithTimeout.startup.readyTimeout * 0.5); setTimeout(() => { readinessManager.splits.emit(SDK_SPLITS_ARRIVED); @@ -273,6 +287,6 @@ test('READINESS MANAGER / Destroy before it was ready and timedout', (done) => { expect('Calling destroy should have removed the readyTimeout and the test should end now.'); done(); - }, timeoutMs * 1.5); + }, settingsWithTimeout.startup.readyTimeout * 1.5); }); diff --git a/src/readiness/__tests__/sdkReadinessManager.spec.ts b/src/readiness/__tests__/sdkReadinessManager.spec.ts index 4264341e..17d98102 100644 --- a/src/readiness/__tests__/sdkReadinessManager.spec.ts +++ b/src/readiness/__tests__/sdkReadinessManager.spec.ts @@ -1,10 +1,11 @@ // @ts-nocheck import { loggerMock } from '../../logger/__tests__/sdkLogger.mock'; -import { IEventEmitter } from '../../types'; +import SplitIO from '../../../types/splitio'; import { SDK_READY, SDK_READY_FROM_CACHE, SDK_READY_TIMED_OUT, SDK_UPDATE } from '../constants'; import { sdkReadinessManagerFactory } from '../sdkReadinessManager'; import { IReadinessManager } from '../types'; import { ERROR_CLIENT_LISTENER, CLIENT_READY_FROM_CACHE, CLIENT_READY, CLIENT_NO_LISTENER } from '../../logger/constants'; +import { fullSettings } from '../../utils/settingsValidation/__tests__/settings.mocks'; const EventEmitterMock = jest.fn(() => ({ on: jest.fn(), @@ -14,7 +15,7 @@ const EventEmitterMock = jest.fn(() => ({ addListener: jest.fn(), off: jest.fn(), removeListener: jest.fn() -})) as new () => IEventEmitter; +})) as new () => SplitIO.IEventEmitter; // Makes readinessManager emit SDK_READY & update isReady flag function emitReadyEvent(readinessManager: IReadinessManager) { @@ -40,7 +41,7 @@ describe('SDK Readiness Manager - Event emitter', () => { test('Providing the gate object to get the SDK status interface that manages events', () => { expect(typeof sdkReadinessManagerFactory).toBe('function'); // The module exposes a function. - const sdkReadinessManager = sdkReadinessManagerFactory(loggerMock, EventEmitterMock); + const sdkReadinessManager = sdkReadinessManagerFactory(EventEmitterMock, fullSettings); expect(typeof sdkReadinessManager).toBe('object'); // The function result contains the readiness manager and a sdkStatus object. const gateMock = sdkReadinessManager.readinessManager.gate; const sdkStatus = sdkReadinessManager.sdkStatus; @@ -80,7 +81,7 @@ describe('SDK Readiness Manager - Event emitter', () => { }); test('The event callbacks should work as expected - SDK_READY_FROM_CACHE', () => { - const sdkReadinessManager = sdkReadinessManagerFactory(loggerMock, EventEmitterMock); + const sdkReadinessManager = sdkReadinessManagerFactory(EventEmitterMock, fullSettings); const gateMock = sdkReadinessManager.readinessManager.gate; const readyFromCacheEventCB = gateMock.once.mock.calls[2][1]; @@ -90,7 +91,7 @@ describe('SDK Readiness Manager - Event emitter', () => { }); test('The event callbacks should work as expected - SDK_READY emits with no callbacks', () => { - const sdkReadinessManager = sdkReadinessManagerFactory(loggerMock, EventEmitterMock); + const sdkReadinessManager = sdkReadinessManagerFactory(EventEmitterMock, fullSettings); // Get the callbacks const addListenerCB = sdkReadinessManager.readinessManager.gate.on.mock.calls[1][1]; @@ -116,7 +117,7 @@ describe('SDK Readiness Manager - Event emitter', () => { }); test('The event callbacks should work as expected - SDK_READY emits with callbacks', () => { - const sdkReadinessManager = sdkReadinessManagerFactory(loggerMock, EventEmitterMock); + const sdkReadinessManager = sdkReadinessManagerFactory(EventEmitterMock, fullSettings); // Get the callbacks const addListenerCB = sdkReadinessManager.readinessManager.gate.on.mock.calls[1][1]; @@ -134,7 +135,7 @@ describe('SDK Readiness Manager - Event emitter', () => { }); test('The event callbacks should work as expected - If we end up removing the listeners for SDK_READY, it behaves as if it had none', () => { - const sdkReadinessManager = sdkReadinessManagerFactory(loggerMock, EventEmitterMock); + const sdkReadinessManager = sdkReadinessManagerFactory(EventEmitterMock, fullSettings); const gateMock = sdkReadinessManager.readinessManager.gate; // Get the callbacks @@ -154,7 +155,7 @@ describe('SDK Readiness Manager - Event emitter', () => { }); test('The event callbacks should work as expected - If we end up removing the listeners for SDK_READY, it behaves as if it had none', () => { - const sdkReadinessManager = sdkReadinessManagerFactory(loggerMock, EventEmitterMock); + const sdkReadinessManager = sdkReadinessManagerFactory(EventEmitterMock, fullSettings); const gateMock = sdkReadinessManager.readinessManager.gate; // Get the callbacks @@ -176,7 +177,7 @@ describe('SDK Readiness Manager - Event emitter', () => { test('The event callbacks should work as expected - SDK_READY emits with expected internal callbacks', () => { // the sdkReadinessManager expects more than one SDK_READY callback to not log the "No listeners" warning - const sdkReadinessManager = sdkReadinessManagerFactory(loggerMock, EventEmitterMock); + const sdkReadinessManager = sdkReadinessManagerFactory(EventEmitterMock, fullSettings); sdkReadinessManager.incInternalReadyCbCount(); const gateMock = sdkReadinessManager.readinessManager.gate; @@ -201,7 +202,7 @@ describe('SDK Readiness Manager - Event emitter', () => { describe('SDK Readiness Manager - Ready promise', () => { test('.ready() promise behaviour for clients', async () => { - const sdkReadinessManager = sdkReadinessManagerFactory(loggerMock, EventEmitterMock); + const sdkReadinessManager = sdkReadinessManagerFactory(EventEmitterMock, fullSettings); const ready = sdkReadinessManager.sdkStatus.ready(); expect(ready instanceof Promise).toBe(true); // It should return a promise. @@ -230,7 +231,7 @@ describe('SDK Readiness Manager - Ready promise', () => { // control assertion. stubs already reset. expect(testPassedCount).toBe(2); - const sdkReadinessManagerForTimedout = sdkReadinessManagerFactory(loggerMock, EventEmitterMock); + const sdkReadinessManagerForTimedout = sdkReadinessManagerFactory(EventEmitterMock, fullSettings); const readyForTimeout = sdkReadinessManagerForTimedout.sdkStatus.ready(); @@ -269,7 +270,7 @@ describe('SDK Readiness Manager - Ready promise', () => { }); test('Full blown ready promise count as a callback and resolves on SDK_READY', (done) => { - const sdkReadinessManager = sdkReadinessManagerFactory(loggerMock, EventEmitterMock); + const sdkReadinessManager = sdkReadinessManagerFactory(EventEmitterMock, fullSettings); const readyPromise = sdkReadinessManager.sdkStatus.ready(); // Get the callback @@ -291,7 +292,7 @@ describe('SDK Readiness Manager - Ready promise', () => { }); test('.ready() rejected promises have a default onRejected handler that just logs the error', (done) => { - const sdkReadinessManager = sdkReadinessManagerFactory(loggerMock, EventEmitterMock); + const sdkReadinessManager = sdkReadinessManagerFactory(EventEmitterMock, fullSettings); let readyForTimeout = sdkReadinessManager.sdkStatus.ready(); emitTimeoutEvent(sdkReadinessManager.readinessManager); // make the SDK "timed out" diff --git a/src/readiness/readinessManager.ts b/src/readiness/readinessManager.ts index b640d751..e8f92dce 100644 --- a/src/readiness/readinessManager.ts +++ b/src/readiness/readinessManager.ts @@ -1,16 +1,19 @@ import { objectAssign } from '../utils/lang/objectAssign'; -import { IEventEmitter } from '../types'; +import { ISettings } from '../types'; +import SplitIO from '../../types/splitio'; import { SDK_SPLITS_ARRIVED, SDK_SPLITS_CACHE_LOADED, SDK_SEGMENTS_ARRIVED, SDK_READY_TIMED_OUT, SDK_READY_FROM_CACHE, SDK_UPDATE, SDK_READY } from './constants'; import { IReadinessEventEmitter, IReadinessManager, ISegmentsEventEmitter, ISplitsEventEmitter } from './types'; -function splitsEventEmitterFactory(EventEmitter: new () => IEventEmitter): ISplitsEventEmitter { +function splitsEventEmitterFactory(EventEmitter: new () => SplitIO.IEventEmitter): ISplitsEventEmitter { const splitsEventEmitter = objectAssign(new EventEmitter(), { splitsArrived: false, splitsCacheLoaded: false, + hasInit: false, + initCallbacks: [] }); // `isSplitKill` condition avoids an edge-case of wrongly emitting SDK_READY if: - // - `/mySegments` fetch and SPLIT_KILL occurs before `/splitChanges` fetch, and + // - `/memberships` fetch and SPLIT_KILL occurs before `/splitChanges` fetch, and // - storage has cached splits (for which case `splitsStorage.killLocally` can return true) splitsEventEmitter.on(SDK_SPLITS_ARRIVED, (isSplitKill: boolean) => { if (!isSplitKill) splitsEventEmitter.splitsArrived = true; }); splitsEventEmitter.once(SDK_SPLITS_CACHE_LOADED, () => { splitsEventEmitter.splitsCacheLoaded = true; }); @@ -18,7 +21,7 @@ function splitsEventEmitterFactory(EventEmitter: new () => IEventEmitter): ISpli return splitsEventEmitter; } -function segmentsEventEmitterFactory(EventEmitter: new () => IEventEmitter): ISegmentsEventEmitter { +function segmentsEventEmitterFactory(EventEmitter: new () => SplitIO.IEventEmitter): ISegmentsEventEmitter { const segmentsEventEmitter = objectAssign(new EventEmitter(), { segmentsArrived: false }); @@ -32,10 +35,12 @@ function segmentsEventEmitterFactory(EventEmitter: new () => IEventEmitter): ISe * Factory of readiness manager, which handles the ready / update event propagation. */ export function readinessManagerFactory( - EventEmitter: new () => IEventEmitter, - readyTimeout = 0, + EventEmitter: new () => SplitIO.IEventEmitter, + settings: ISettings, splits: ISplitsEventEmitter = splitsEventEmitterFactory(EventEmitter)): IReadinessManager { + const readyTimeout = settings.startup.readyTimeout; + const segments: ISegmentsEventEmitter = segmentsEventEmitterFactory(EventEmitter); const gate: IReadinessEventEmitter = new EventEmitter(); @@ -54,8 +59,8 @@ export function readinessManagerFactory( // emit SDK_READY_TIMED_OUT let hasTimedout = false; - function timeout() { - if (hasTimedout) return; + function timeout() { // eslint-disable-next-line no-use-before-define + if (hasTimedout || isReady) return; hasTimedout = true; syncLastUpdate(); gate.emit(SDK_READY_TIMED_OUT, 'Split SDK emitted SDK_READY_TIMED_OUT event.'); @@ -63,7 +68,8 @@ export function readinessManagerFactory( let readyTimeoutId: ReturnType; if (readyTimeout > 0) { - readyTimeoutId = setTimeout(timeout, readyTimeout); + if (splits.hasInit) readyTimeoutId = setTimeout(timeout, readyTimeout); + else splits.initCallbacks.push(() => { readyTimeoutId = setTimeout(timeout, readyTimeout); }); } // emit SDK_READY and SDK_UPDATE @@ -118,9 +124,9 @@ export function readinessManagerFactory( segments, gate, - shared(readyTimeout = 0) { + shared() { refCount++; - return readinessManagerFactory(EventEmitter, readyTimeout, splits); + return readinessManagerFactory(EventEmitter, settings, splits); }, // @TODO review/remove next methods when non-recoverable errors are reworked @@ -130,6 +136,12 @@ export function readinessManagerFactory( // tracking and evaluations, while keeping event listeners to emit SDK_READY_TIMED_OUT event setDestroyed() { isDestroyed = true; }, + init() { + if (splits.hasInit) return; + splits.hasInit = true; + splits.initCallbacks.forEach(cb => cb()); + }, + destroy() { isDestroyed = true; syncLastUpdate(); diff --git a/src/readiness/sdkReadinessManager.ts b/src/readiness/sdkReadinessManager.ts index 3d40f952..ee558d47 100644 --- a/src/readiness/sdkReadinessManager.ts +++ b/src/readiness/sdkReadinessManager.ts @@ -2,9 +2,9 @@ import { objectAssign } from '../utils/lang/objectAssign'; import { promiseWrapper } from '../utils/promise/wrapper'; import { readinessManagerFactory } from './readinessManager'; import { ISdkReadinessManager } from './types'; -import { IEventEmitter } from '../types'; +import { ISettings } from '../types'; +import SplitIO from '../../types/splitio'; import { SDK_READY, SDK_READY_TIMED_OUT, SDK_READY_FROM_CACHE, SDK_UPDATE } from './constants'; -import { ILogger } from '../logger/types'; import { ERROR_CLIENT_LISTENER, CLIENT_READY_FROM_CACHE, CLIENT_READY, CLIENT_NO_LISTENER } from '../logger/constants'; const NEW_LISTENER_EVENT = 'newListener'; @@ -14,14 +14,15 @@ const REMOVE_LISTENER_EVENT = 'removeListener'; * SdkReadinessManager factory, which provides the public status API of SDK clients and manager: ready promise, readiness event emitter and constants (SDK_READY, etc). * It also updates logs related warnings and errors. * - * @param readyTimeout time in millis to emit SDK_READY_TIME_OUT event - * @param readinessManager optional readinessManager to use. only used internally for `shared` method + * @param readyTimeout - time in millis to emit SDK_READY_TIME_OUT event + * @param readinessManager - optional readinessManager to use. only used internally for `shared` method */ export function sdkReadinessManagerFactory( - log: ILogger, - EventEmitter: new () => IEventEmitter, - readyTimeout = 0, - readinessManager = readinessManagerFactory(EventEmitter, readyTimeout)): ISdkReadinessManager { + EventEmitter: new () => SplitIO.IEventEmitter, + settings: ISettings, + readinessManager = readinessManagerFactory(EventEmitter, settings)): ISdkReadinessManager { + + const log = settings.log; /** Ready callback warning */ let internalReadyCbCount = 0; @@ -72,8 +73,8 @@ export function sdkReadinessManagerFactory( return { readinessManager, - shared(readyTimeout = 0) { - return sdkReadinessManagerFactory(log, EventEmitter, readyTimeout, readinessManager.shared(readyTimeout)); + shared() { + return sdkReadinessManagerFactory(EventEmitter, settings, readinessManager.shared()); }, incInternalReadyCbCount() { @@ -91,25 +92,7 @@ export function sdkReadinessManagerFactory( SDK_UPDATE, SDK_READY_TIMED_OUT, }, - /** - * Returns a promise that will be resolved once the SDK has finished loading (SDK_READY event emitted) or rejected if the SDK has timedout (SDK_READY_TIMED_OUT event emitted). - * As it's meant to provide similar flexibility to the event approach, given that the SDK might be eventually ready after a timeout event, calling the `ready` method after the - * SDK had timed out will return a new promise that should eventually resolve if the SDK gets ready. - * - * Caveats: the method was designed to avoid an unhandled Promise rejection if the rejection case is not handled, so that `onRejected` handler is optional when using promises. - * However, when using async/await syntax, the rejection should be explicitly propagated like in the following example: - * ``` - * try { - * await client.ready().catch((e) => { throw e; }); - * // SDK is ready - * } catch(e) { - * // SDK has timedout - * } - * ``` - * - * @function ready - * @returns {Promise} - */ + ready() { if (readinessManager.hasTimedout()) { if (!readinessManager.isReady()) { diff --git a/src/readiness/types.ts b/src/readiness/types.ts index f0bbdb7a..df3c2603 100644 --- a/src/readiness/types.ts +++ b/src/readiness/types.ts @@ -1,4 +1,5 @@ -import { IEventEmitter, IStatusInterface } from '../types'; +import { IStatusInterface } from '../types'; +import SplitIO from '../../types/splitio'; /** Splits data emitter */ @@ -6,12 +7,14 @@ type SDK_SPLITS_ARRIVED = 'state::splits-arrived' type SDK_SPLITS_CACHE_LOADED = 'state::splits-cache-loaded' type ISplitsEvent = SDK_SPLITS_ARRIVED | SDK_SPLITS_CACHE_LOADED -export interface ISplitsEventEmitter extends IEventEmitter { +export interface ISplitsEventEmitter extends SplitIO.IEventEmitter { emit(event: ISplitsEvent, ...args: any[]): boolean on(event: ISplitsEvent, listener: (...args: any[]) => void): this; once(event: ISplitsEvent, listener: (...args: any[]) => void): this; splitsArrived: boolean splitsCacheLoaded: boolean + hasInit: boolean, + initCallbacks: (() => void)[] } /** Segments data emitter */ @@ -19,7 +22,7 @@ export interface ISplitsEventEmitter extends IEventEmitter { type SDK_SEGMENTS_ARRIVED = 'state::segments-arrived' type ISegmentsEvent = SDK_SEGMENTS_ARRIVED -export interface ISegmentsEventEmitter extends IEventEmitter { +export interface ISegmentsEventEmitter extends SplitIO.IEventEmitter { emit(event: ISegmentsEvent, ...args: any[]): boolean on(event: ISegmentsEvent, listener: (...args: any[]) => void): this; once(event: ISegmentsEvent, listener: (...args: any[]) => void): this; @@ -35,7 +38,7 @@ export type SDK_UPDATE = 'state::update' export type SDK_DESTROY = 'state::destroy' export type IReadinessEvent = SDK_READY_TIMED_OUT | SDK_READY | SDK_READY_FROM_CACHE | SDK_UPDATE | SDK_DESTROY -export interface IReadinessEventEmitter extends IEventEmitter { +export interface IReadinessEventEmitter extends SplitIO.IEventEmitter { emit(event: IReadinessEvent, ...args: any[]): boolean } @@ -59,9 +62,10 @@ export interface IReadinessManager { timeout(): void, setDestroyed(): void, destroy(): void, + init(): void, /** for client-side */ - shared(readyTimeout?: number): IReadinessManager, + shared(): IReadinessManager, } /** SDK readiness manager */ @@ -77,5 +81,5 @@ export interface ISdkReadinessManager { incInternalReadyCbCount(): void /** for client-side */ - shared(readyTimeout?: number): ISdkReadinessManager + shared(): ISdkReadinessManager } diff --git a/src/sdkClient/__tests__/sdkClientMethod.spec.ts b/src/sdkClient/__tests__/sdkClientMethod.spec.ts index 27be5258..068d0278 100644 --- a/src/sdkClient/__tests__/sdkClientMethod.spec.ts +++ b/src/sdkClient/__tests__/sdkClientMethod.spec.ts @@ -3,6 +3,7 @@ import { CONSUMER_MODE, STANDALONE_MODE } from '../../utils/constants'; import { sdkClientMethodFactory } from '../sdkClientMethod'; import { assertClientApi } from './testUtils'; import { telemetryTrackerFactory } from '../../trackers/telemetryTracker'; +import { IBasicClient } from '../../types'; const errorMessage = 'Shared Client not supported by the storage mechanism. Create isolated instances instead.'; @@ -14,7 +15,8 @@ const paramMocks = [ sdkReadinessManager: { sdkStatus: jest.fn(), readinessManager: { destroy: jest.fn() } }, signalListener: undefined, settings: { mode: CONSUMER_MODE, log: loggerMock, core: { authorizationKey: 'sdk key '} }, - telemetryTracker: telemetryTrackerFactory() + telemetryTracker: telemetryTrackerFactory(), + clients: {} }, // SyncManager (i.e., Sync SDK) and Signal listener { @@ -23,7 +25,8 @@ const paramMocks = [ sdkReadinessManager: { sdkStatus: jest.fn(), readinessManager: { destroy: jest.fn() } }, signalListener: { stop: jest.fn() }, settings: { mode: STANDALONE_MODE, log: loggerMock, core: { authorizationKey: 'sdk key '} }, - telemetryTracker: telemetryTrackerFactory() + telemetryTracker: telemetryTrackerFactory(), + clients: {} } ]; @@ -35,7 +38,7 @@ test.each(paramMocks)('sdkClientMethodFactory', (params, done: any) => { expect(typeof sdkClientMethod).toBe('function'); // calling the function should return a client instance - const client = sdkClientMethod(); + const client = sdkClientMethod() as unknown as IBasicClient; assertClientApi(client, params.sdkReadinessManager.sdkStatus); // multiple calls should return the same instance diff --git a/src/sdkClient/__tests__/sdkClientMethodCS.spec.ts b/src/sdkClient/__tests__/sdkClientMethodCS.spec.ts index dbe57ec9..8822a21f 100644 --- a/src/sdkClient/__tests__/sdkClientMethodCS.spec.ts +++ b/src/sdkClient/__tests__/sdkClientMethodCS.spec.ts @@ -1,8 +1,7 @@ -import { sdkClientMethodCSFactory as sdkClientMethodCSWithTTFactory } from '../sdkClientMethodCSWithTT'; import { sdkClientMethodCSFactory } from '../sdkClientMethodCS'; import { assertClientApi } from './testUtils'; import { telemetryTrackerFactory } from '../../trackers/telemetryTracker'; -import { settingsWithKey, settingsWithKeyAndTT, settingsWithKeyObject } from '../../utils/settingsValidation/__tests__/settings.mocks'; +import { settingsWithKey, settingsWithKeyObject } from '../../utils/settingsValidation/__tests__/settings.mocks'; const partialStorages: { destroy: jest.Mock }[] = []; @@ -14,15 +13,15 @@ const storageMock = { }) }; -const partialSdkReadinessManagers: { sdkStatus: jest.Mock, readinessManager: { destroy: jest.Mock } }[] = []; +const partialSdkReadinessManagers: { sdkStatus: jest.Mock, readinessManager: { init: jest.Mock, destroy: jest.Mock } }[] = []; const sdkReadinessManagerMock = { sdkStatus: jest.fn(), - readinessManager: { destroy: jest.fn() }, + readinessManager: { init: jest.fn(), destroy: jest.fn() }, shared: jest.fn(() => { partialSdkReadinessManagers.push({ sdkStatus: jest.fn(), - readinessManager: { destroy: jest.fn() }, + readinessManager: { init: jest.fn(), destroy: jest.fn() }, }); return partialSdkReadinessManagers[partialSdkReadinessManagers.length - 1]; }) @@ -45,7 +44,8 @@ const params = { syncManager: syncManagerMock, signalListener: { stop: jest.fn() }, settings: settingsWithKey, - telemetryTracker: telemetryTrackerFactory() + telemetryTracker: telemetryTrackerFactory(), + clients: {}, }; const invalidAttributes = [ @@ -71,15 +71,10 @@ describe('sdkClientMethodCSFactory', () => { partialStorages.length = 0; partialSdkReadinessManagers.length = 0; partialSyncManagers.length = 0; + params.clients = {}; }); - // list of factory functions and their types (whether it ignores TT or not) - const testTargets = [ - [sdkClientMethodCSWithTTFactory, false], - [sdkClientMethodCSFactory, true] - ]; - - test.each(testTargets)('main client', (sdkClientMethodCSFactory) => { + test('main client', () => { // @ts-expect-error const sdkClientMethod = sdkClientMethodCSFactory(params); @@ -104,21 +99,20 @@ describe('sdkClientMethodCSFactory', () => { }); - test.each(testTargets)('multiple clients', async (sdkClientMethodCSFactory, ignoresTT) => { + test('multiple clients', async () => { // @ts-expect-error const sdkClientMethod = sdkClientMethodCSFactory(params); - // calling the function with a diferent key than settings, should return a new client instance + // calling the function with a different key than settings, should return a new client instance const newClients = new Set([ - sdkClientMethod('other-key'), // new client - sdkClientMethod('other-key', 'other-tt'), // new client - sdkClientMethod({ matchingKey: 'other-key', bucketingKey: 'buck' }) // new client + sdkClientMethod('other-key'), // @ts-expect-error + sdkClientMethod('other-key', 'ignored-tt'), + sdkClientMethod({ matchingKey: 'other-key', bucketingKey: 'buck' }) ]); - if (ignoresTT) expect(newClients.size).toBe(2); - else expect(newClients.size).toBe(3); + expect(newClients.size).toBe(2); - // each new client must follog the Client API + // each new client must follow the Client API newClients.forEach(newClient => { assertClientApi(newClient); expect(newClient).not.toBe(sdkClientMethod()); @@ -148,7 +142,7 @@ describe('sdkClientMethodCSFactory', () => { }); - test.each(testTargets)('return main client instance if called with same key', (sdkClientMethodCSFactory) => { + test('returns main client instance if called with same key', () => { params.settings = settingsWithKey; // @ts-expect-error @@ -161,20 +155,7 @@ describe('sdkClientMethodCSFactory', () => { expect(params.syncManager.shared).not.toBeCalled(); }); - test.each(testTargets)('return main client instance if called with same key and TT', (sdkClientMethodCSFactory) => { - - params.settings = settingsWithKeyAndTT; - // @ts-expect-error - const sdkClientMethod = sdkClientMethodCSFactory(params); - - expect(sdkClientMethod()).toBe(sdkClientMethod(settingsWithKeyAndTT.core.key, settingsWithKeyAndTT.core.trafficType)); - - expect(params.storage.shared).not.toBeCalled(); - expect(params.sdkReadinessManager.shared).not.toBeCalled(); - expect(params.syncManager.shared).not.toBeCalled(); - }); - - test.each(testTargets)('return main client instance if called with same key object', (sdkClientMethodCSFactory) => { + test('returns main client instance if called with same key object', () => { // @ts-expect-error params.settings = settingsWithKeyObject; // @ts-expect-error @@ -187,39 +168,37 @@ describe('sdkClientMethodCSFactory', () => { expect(params.syncManager.shared).not.toBeCalled(); }); - test.each(testTargets)('return same client instance if called with same key or traffic type (input validation)', (sdkClientMethodCSFactory, ignoresTT) => { + test('returns same client instance if called with same key (input validation)', () => { // @ts-expect-error const sdkClientMethod = sdkClientMethodCSFactory(params); - const clientInstance = sdkClientMethod('key', 'tt'); + const clientInstance = sdkClientMethod('key'); - expect(sdkClientMethod('key', 'tT')).toBe(clientInstance); // No new client created: TT is lowercased / ignored - expect(sdkClientMethod(' key ', 'tt')).toBe(clientInstance); // No new client created: key is trimmed - expect(sdkClientMethod({ matchingKey: 'key ', bucketingKey: ' key' }, 'TT')).toBe(clientInstance); // No new client created: key object is equivalent to 'key' string + expect(sdkClientMethod('key')).toBe(clientInstance); // No new client created: same key + expect(sdkClientMethod(' key ')).toBe(clientInstance); // No new client created: key is trimmed + expect(sdkClientMethod({ matchingKey: 'key ', bucketingKey: ' key' })).toBe(clientInstance); // No new client created: key object is equivalent to 'key' string expect(params.storage.shared).toBeCalledTimes(1); expect(params.sdkReadinessManager.shared).toBeCalledTimes(1); expect(params.syncManager.shared).toBeCalledTimes(1); - expect(sdkClientMethod('KEY', 'tt')).not.toBe(clientInstance); // New client created: key is case-sensitive - if (!ignoresTT) expect(sdkClientMethod('key', 'TT ')).not.toBe(clientInstance); // New client created: TT is not trimmed + expect(sdkClientMethod('KEY')).not.toBe(clientInstance); // New client created: key is case-sensitive - const clientCount = ignoresTT ? 2 : 3; + const clientCount = 2; expect(params.storage.shared).toBeCalledTimes(clientCount); expect(params.sdkReadinessManager.shared).toBeCalledTimes(clientCount); expect(params.syncManager.shared).toBeCalledTimes(clientCount); }); - test.each(testTargets)('invalid calls throw an error', (sdkClientMethodCSFactory, ignoresTT) => { + test('invalid calls throw an error', () => { // @ts-expect-error - const sdkClientMethod = sdkClientMethodCSFactory(params); + const sdkClientMethod = sdkClientMethodCSFactory(params); // @ts-expect-error expect(() => sdkClientMethod({ matchingKey: settingsWithKey.core.key, bucketingKey: undefined })).toThrow('Shared Client needs a valid key.'); - if (!ignoresTT) expect(() => sdkClientMethod('valid-key', ['invalid-TT'])).toThrow('Shared Client needs a valid traffic type or no traffic type at all.'); }); - test.each(testTargets)('attributes binding - main client', (sdkClientMethodCSFactory) => { + test('attributes binding - main client', () => { // @ts-expect-error - const sdkClientMethod = sdkClientMethodCSFactory(params); + const sdkClientMethod = sdkClientMethodCSFactory(params) as any; // should return a function expect(typeof sdkClientMethod).toBe('function'); @@ -271,7 +250,7 @@ describe('sdkClientMethodCSFactory', () => { }); - test.each(testTargets)('attributes binding - shared clients', (sdkClientMethodCSFactory) => { + test('attributes binding - shared clients', () => { // @ts-expect-error const sdkClientMethod = sdkClientMethodCSFactory(params); diff --git a/src/sdkClient/client.ts b/src/sdkClient/client.ts index 98073e9d..01c5053d 100644 --- a/src/sdkClient/client.ts +++ b/src/sdkClient/client.ts @@ -6,7 +6,7 @@ import { validateTrafficTypeExistence } from '../utils/inputValidation/trafficTy import { SDK_NOT_READY } from '../utils/labels'; import { CONTROL, TREATMENT, TREATMENTS, TREATMENT_WITH_CONFIG, TREATMENTS_WITH_CONFIG, TRACK, TREATMENTS_WITH_CONFIG_BY_FLAGSETS, TREATMENTS_BY_FLAGSETS, TREATMENTS_BY_FLAGSET, TREATMENTS_WITH_CONFIG_BY_FLAGSET, GET_TREATMENTS_WITH_CONFIG, GET_TREATMENTS_BY_FLAG_SETS, GET_TREATMENTS_WITH_CONFIG_BY_FLAG_SETS, GET_TREATMENTS_BY_FLAG_SET, GET_TREATMENTS_WITH_CONFIG_BY_FLAG_SET, GET_TREATMENT_WITH_CONFIG, GET_TREATMENT, GET_TREATMENTS, TRACK_FN_LABEL } from '../utils/constants'; import { IEvaluationResult } from '../evaluator/types'; -import { SplitIO, ImpressionDTO } from '../types'; +import SplitIO from '../../types/splitio'; import { IMPRESSION, IMPRESSION_QUEUEING } from '../logger/constants'; import { ISdkFactoryContext } from '../sdkFactory/types'; import { isConsumerMode } from '../utils/settingsValidation/mode'; @@ -34,7 +34,7 @@ export function clientFactory(params: ISdkFactoryContext): SplitIO.IClient | Spl const stopTelemetryTracker = telemetryTracker.trackEval(withConfig ? TREATMENT_WITH_CONFIG : TREATMENT); const wrapUp = (evaluationResult: IEvaluationResult) => { - const queue: ImpressionDTO[] = []; + const queue: SplitIO.ImpressionDTO[] = []; const treatment = processEvaluation(evaluationResult, featureFlagName, key, attributes, withConfig, methodName, queue); impressionsTracker.track(queue, attributes); @@ -59,7 +59,7 @@ export function clientFactory(params: ISdkFactoryContext): SplitIO.IClient | Spl const stopTelemetryTracker = telemetryTracker.trackEval(withConfig ? TREATMENTS_WITH_CONFIG : TREATMENTS); const wrapUp = (evaluationResults: Record) => { - const queue: ImpressionDTO[] = []; + const queue: SplitIO.ImpressionDTO[] = []; const treatments: Record = {}; Object.keys(evaluationResults).forEach(featureFlagName => { treatments[featureFlagName] = processEvaluation(evaluationResults[featureFlagName], featureFlagName, key, attributes, withConfig, methodName, queue); @@ -87,7 +87,7 @@ export function clientFactory(params: ISdkFactoryContext): SplitIO.IClient | Spl const stopTelemetryTracker = telemetryTracker.trackEval(method); const wrapUp = (evaluationResults: Record) => { - const queue: ImpressionDTO[] = []; + const queue: SplitIO.ImpressionDTO[] = []; const treatments: Record = {}; const evaluations = evaluationResults; Object.keys(evaluations).forEach(featureFlagName => { @@ -128,7 +128,7 @@ export function clientFactory(params: ISdkFactoryContext): SplitIO.IClient | Spl attributes: SplitIO.Attributes | undefined, withConfig: boolean, invokingMethodName: string, - queue: ImpressionDTO[] + queue: SplitIO.ImpressionDTO[] ): SplitIO.Treatment | SplitIO.TreatmentWithConfig { const matchingKey = getMatching(key); const bucketingKey = getBucketing(key); @@ -199,6 +199,5 @@ export function clientFactory(params: ISdkFactoryContext): SplitIO.IClient | Spl getTreatmentsByFlagSet, getTreatmentsWithConfigByFlagSet, track, - isClientSide: false } as SplitIO.IClient | SplitIO.IAsyncClient; } diff --git a/src/sdkClient/clientAttributesDecoration.ts b/src/sdkClient/clientAttributesDecoration.ts index 57413ad9..cf31b5d3 100644 --- a/src/sdkClient/clientAttributesDecoration.ts +++ b/src/sdkClient/clientAttributesDecoration.ts @@ -1,6 +1,6 @@ import { AttributesCacheInMemory } from '../storages/inMemory/AttributesCacheInMemory'; import { validateAttributesDeep } from '../utils/inputValidation/attributes'; -import { SplitIO } from '../types'; +import SplitIO from '../../types/splitio'; import { ILogger } from '../logger/types'; import { objectAssign } from '../utils/lang/objectAssign'; @@ -80,9 +80,9 @@ export function clientAttributesDecoration = {}; @@ -95,8 +95,8 @@ export function clientAttributesDecoration) { @@ -117,7 +117,7 @@ export function clientAttributesDecoration { - // Cleanup event listeners - signalListener && signalListener.stop(); - - // @TODO stop only if last client is destroyed - if (uniqueKeysTracker) uniqueKeysTracker.stop(); + // For main client, cleanup event listeners and scheduled jobs + if (!isSharedClient) { + signalListener && signalListener.stop(); + uniqueKeysTracker && uniqueKeysTracker.stop(); + } // Cleanup storage return storage.destroy(); diff --git a/src/sdkClient/sdkClientMethod.ts b/src/sdkClient/sdkClientMethod.ts index 9cd117ea..e2a4112e 100644 --- a/src/sdkClient/sdkClientMethod.ts +++ b/src/sdkClient/sdkClientMethod.ts @@ -1,15 +1,18 @@ -import { SplitIO } from '../types'; +import SplitIO from '../../types/splitio'; import { sdkClientFactory } from './sdkClient'; import { RETRIEVE_CLIENT_DEFAULT } from '../logger/constants'; import { ISdkFactoryContext } from '../sdkFactory/types'; /** - * Factory of client method for server-side SDKs (ISDK and IAsyncSDK) + * Factory of client method for server-side SDKs */ export function sdkClientMethodFactory(params: ISdkFactoryContext): () => SplitIO.IClient | SplitIO.IAsyncClient { const log = params.settings.log; const clientInstance = sdkClientFactory(params); + // Only one client in server-side without bound key + params.clients[''] = clientInstance; + return function client() { if (arguments.length > 0) { throw new Error('Shared Client not supported by the storage mechanism. Create isolated instances instead.'); diff --git a/src/sdkClient/sdkClientMethodCS.ts b/src/sdkClient/sdkClientMethodCS.ts index 4fd0d626..ebc755a1 100644 --- a/src/sdkClient/sdkClientMethodCS.ts +++ b/src/sdkClient/sdkClientMethodCS.ts @@ -1,5 +1,5 @@ import { clientCSDecorator } from './clientCS'; -import { SplitIO } from '../types'; +import SplitIO from '../../types/splitio'; import { validateKey } from '../utils/inputValidation/key'; import { getMatching, keyParser } from '../utils/key'; import { sdkClientFactory } from './sdkClient'; @@ -8,18 +8,14 @@ import { objectAssign } from '../utils/lang/objectAssign'; import { RETRIEVE_CLIENT_DEFAULT, NEW_SHARED_CLIENT, RETRIEVE_CLIENT_EXISTING, LOG_PREFIX_CLIENT_INSTANTIATION } from '../logger/constants'; import { SDK_SEGMENTS_ARRIVED } from '../readiness/constants'; import { ISdkFactoryContext } from '../sdkFactory/types'; - -function buildInstanceId(key: SplitIO.SplitKey) { - // @ts-ignore - return `${key.matchingKey ? key.matchingKey : key}-${key.bucketingKey ? key.bucketingKey : key}-`; -} +import { buildInstanceId } from './identity'; /** * Factory of client method for the client-side API variant where TT is ignored. * Therefore, clients don't have a bound TT for the track method. */ -export function sdkClientMethodCSFactory(params: ISdkFactoryContext): (key?: SplitIO.SplitKey) => SplitIO.ICsClient { - const { storage, syncManager, sdkReadinessManager, settings: { core: { key }, startup: { readyTimeout }, log } } = params; +export function sdkClientMethodCSFactory(params: ISdkFactoryContext): (key?: SplitIO.SplitKey) => SplitIO.IBrowserClient { + const { clients, storage, syncManager, sdkReadinessManager, settings: { core: { key }, log } } = params; const mainClientInstance = clientCSDecorator( log, @@ -31,8 +27,7 @@ export function sdkClientMethodCSFactory(params: ISdkFactoryContext): (key?: Spl const defaultInstanceId = buildInstanceId(parsedDefaultKey); // Cache instances created per factory. - const clientInstances: Record = {}; - clientInstances[defaultInstanceId] = mainClientInstance; + clients[defaultInstanceId] = mainClientInstance; return function client(key?: SplitIO.SplitKey) { if (key === undefined) { @@ -40,7 +35,7 @@ export function sdkClientMethodCSFactory(params: ISdkFactoryContext): (key?: Spl return mainClientInstance; } - // Validate the key value. The trafficType (2nd argument) is ignored + // Validate the key value const validKey = validateKey(log, key, LOG_PREFIX_CLIENT_INSTANTIATION); if (validKey === false) { throw new Error('Shared Client needs a valid key.'); @@ -48,10 +43,10 @@ export function sdkClientMethodCSFactory(params: ISdkFactoryContext): (key?: Spl const instanceId = buildInstanceId(validKey); - if (!clientInstances[instanceId]) { + if (!clients[instanceId]) { const matchingKey = getMatching(validKey); - const sharedSdkReadiness = sdkReadinessManager.shared(readyTimeout); + const sharedSdkReadiness = sdkReadinessManager.shared(); const sharedStorage = storage.shared && storage.shared(matchingKey, (err) => { if (err) { sharedSdkReadiness.readinessManager.timeout(); @@ -70,24 +65,21 @@ export function sdkClientMethodCSFactory(params: ISdkFactoryContext): (key?: Spl // As shared clients reuse all the storage information, we don't need to check here if we // will use offline or online mode. We should stick with the original decision. - clientInstances[instanceId] = clientCSDecorator( + clients[instanceId] = clientCSDecorator( log, sdkClientFactory(objectAssign({}, params, { sdkReadinessManager: sharedSdkReadiness, storage: sharedStorage || storage, syncManager: sharedSyncManager, - signalListener: undefined, // only the main client "destroy" method stops the signal listener }), true) as SplitIO.IClient, validKey ); - sharedSyncManager && sharedSyncManager.start(); - log.info(NEW_SHARED_CLIENT); } else { log.debug(RETRIEVE_CLIENT_EXISTING); } - return clientInstances[instanceId]; + return clients[instanceId] as SplitIO.IBrowserClient; }; } diff --git a/src/sdkClient/sdkClientMethodCSWithTT.ts b/src/sdkClient/sdkClientMethodCSWithTT.ts deleted file mode 100644 index fb4aa77e..00000000 --- a/src/sdkClient/sdkClientMethodCSWithTT.ts +++ /dev/null @@ -1,104 +0,0 @@ -import { clientCSDecorator } from './clientCS'; -import { SplitIO } from '../types'; -import { validateKey } from '../utils/inputValidation/key'; -import { validateTrafficType } from '../utils/inputValidation/trafficType'; -import { getMatching, keyParser } from '../utils/key'; -import { sdkClientFactory } from './sdkClient'; -import { ISyncManagerCS } from '../sync/types'; -import { objectAssign } from '../utils/lang/objectAssign'; -import { RETRIEVE_CLIENT_DEFAULT, NEW_SHARED_CLIENT, RETRIEVE_CLIENT_EXISTING, LOG_PREFIX_CLIENT_INSTANTIATION } from '../logger/constants'; -import { SDK_SEGMENTS_ARRIVED } from '../readiness/constants'; -import { ISdkFactoryContext } from '../sdkFactory/types'; - -function buildInstanceId(key: SplitIO.SplitKey, trafficType?: string) { - // @ts-ignore - return `${key.matchingKey ? key.matchingKey : key}-${key.bucketingKey ? key.bucketingKey : key}-${trafficType !== undefined ? trafficType : ''}`; -} - -/** - * Factory of client method for the client-side (browser) variant of the Isomorphic JS SDK, - * where clients can have a bound TT for the track method, which is provided via the settings - * (default client) or the client method (shared clients). - */ -export function sdkClientMethodCSFactory(params: ISdkFactoryContext): (key?: SplitIO.SplitKey, trafficType?: string) => SplitIO.ICsClient { - const { storage, syncManager, sdkReadinessManager, settings: { core: { key, trafficType }, startup: { readyTimeout }, log } } = params; - - const mainClientInstance = clientCSDecorator( - log, - sdkClientFactory(params) as SplitIO.IClient, - key, - trafficType - ); - - const parsedDefaultKey = keyParser(key); - const defaultInstanceId = buildInstanceId(parsedDefaultKey, trafficType); - - // Cache instances created per factory. - const clientInstances: Record = {}; - clientInstances[defaultInstanceId] = mainClientInstance; - - return function client(key?: SplitIO.SplitKey, trafficType?: string) { - if (key === undefined) { - log.debug(RETRIEVE_CLIENT_DEFAULT); - return mainClientInstance; - } - - // Validate the key value - const validKey = validateKey(log, key, LOG_PREFIX_CLIENT_INSTANTIATION); - if (validKey === false) { - throw new Error('Shared Client needs a valid key.'); - } - - let validTrafficType; - if (trafficType !== undefined) { - validTrafficType = validateTrafficType(log, trafficType, LOG_PREFIX_CLIENT_INSTANTIATION); - if (validTrafficType === false) { - throw new Error('Shared Client needs a valid traffic type or no traffic type at all.'); - } - } - const instanceId = buildInstanceId(validKey, validTrafficType); - - if (!clientInstances[instanceId]) { - const matchingKey = getMatching(validKey); - - const sharedSdkReadiness = sdkReadinessManager.shared(readyTimeout); - const sharedStorage = storage.shared && storage.shared(matchingKey, (err) => { - if (err) { - sharedSdkReadiness.readinessManager.timeout(); - return; - } - // Emit SDK_READY in consumer mode for shared clients - sharedSdkReadiness.readinessManager.segments.emit(SDK_SEGMENTS_ARRIVED); - }); - - // 3 possibilities: - // - Standalone mode: both syncManager and sharedSyncManager are defined - // - Consumer mode: both syncManager and sharedSyncManager are undefined - // - Consumer partial mode: syncManager is defined (only for submitters) but sharedSyncManager is undefined - // @ts-ignore - const sharedSyncManager = syncManager && sharedStorage && (syncManager as ISyncManagerCS).shared(matchingKey, sharedSdkReadiness.readinessManager, sharedStorage); - - // As shared clients reuse all the storage information, we don't need to check here if we - // will use offline or online mode. We should stick with the original decision. - clientInstances[instanceId] = clientCSDecorator( - log, - sdkClientFactory(objectAssign({}, params, { - sdkReadinessManager: sharedSdkReadiness, - storage: sharedStorage || storage, - syncManager: sharedSyncManager, - signalListener: undefined, // only the main client "destroy" method stops the signal listener - }), true) as SplitIO.IClient, - validKey, - validTrafficType - ); - - sharedSyncManager && sharedSyncManager.start(); - - log.info(NEW_SHARED_CLIENT); - } else { - log.debug(RETRIEVE_CLIENT_EXISTING); - } - - return clientInstances[instanceId]; - }; -} diff --git a/src/sdkFactory/__tests__/index.spec.ts b/src/sdkFactory/__tests__/index.spec.ts index c7b51b2a..e46296be 100644 --- a/src/sdkFactory/__tests__/index.spec.ts +++ b/src/sdkFactory/__tests__/index.spec.ts @@ -1,12 +1,12 @@ import { ISdkFactoryParams } from '../types'; import { sdkFactory } from '../index'; import { fullSettings } from '../../utils/settingsValidation/__tests__/settings.mocks'; -import { SplitIO } from '../../types'; +import SplitIO from '../../../types/splitio'; import { EventEmitter } from '../../utils/MinEvents'; /** Mocks */ -const clientInstance = 'client'; +const clientInstance = { destroy: jest.fn() }; const managerInstance = 'manager'; const mockStorage = { splits: jest.fn(), @@ -26,11 +26,11 @@ jest.mock('../../trackers/telemetryTracker', () => { }; }); -// IAsyncSDK, minimal params +// IBrowserAsyncSDK, minimal params const paramsForAsyncSDK = { settings: fullSettings, storageFactory: jest.fn(() => mockStorage), - sdkClientMethodFactory: jest.fn(() => clientInstance), + sdkClientMethodFactory: jest.fn(({ clients }) => (key?: string) => { clients[key || ''] = clientInstance; return clientInstance; }), sdkManagerFactory: jest.fn(() => managerInstance), impressionsObserverFactory: jest.fn(), platform: { @@ -40,7 +40,7 @@ const paramsForAsyncSDK = { const SignalListenerInstanceMock = { start: jest.fn() }; -// ISDK, full params +// IBrowserSDK, full params const fullParamsForSyncSDK = { ...paramsForAsyncSDK, syncManagerFactory: jest.fn(), @@ -59,11 +59,12 @@ const fullParamsForSyncSDK = { /** End Mocks */ -function assertSdkApi(sdk: SplitIO.IAsyncSDK | SplitIO.ISDK | SplitIO.ICsSDK, params: any) { +function assertSdkApi(sdk: SplitIO.IAsyncSDK | SplitIO.ISDK | SplitIO.IBrowserAsyncSDK | SplitIO.IBrowserSDK, params: any) { expect(sdk.Logger).toBe(loggerApiMock); expect(sdk.settings).toBe(params.settings); expect(sdk.client).toBe(params.sdkClientMethodFactory.mock.results[0].value); expect(sdk.manager()).toBe(params.sdkManagerFactory.mock.results[0].value); + expect(sdk.destroy()).toBeDefined(); } function assertModulesCalled(params: any) { @@ -92,22 +93,18 @@ describe('sdkFactory', () => { afterEach(jest.clearAllMocks); - test('creates IAsyncSDK instance', () => { + test.each([paramsForAsyncSDK, fullParamsForSyncSDK])('creates SDK instance', async (params) => { - const sdk = sdkFactory(paramsForAsyncSDK as unknown as ISdkFactoryParams); + const sdk = sdkFactory(params as unknown as ISdkFactoryParams); // should return an object that conforms to SDK interface - assertSdkApi(sdk, paramsForAsyncSDK); + assertSdkApi(sdk, params); - assertModulesCalled(paramsForAsyncSDK); - }); - - test('creates ISDK instance', () => { - const sdk = sdkFactory(fullParamsForSyncSDK as unknown as ISdkFactoryParams); - - // should return an object that conforms to SDK interface - assertSdkApi(sdk, fullParamsForSyncSDK); + assertModulesCalled(params); - assertModulesCalled(fullParamsForSyncSDK); + // Factory destroy should call client destroy + expect(sdk.client()).toBe(clientInstance); + expect(await sdk.destroy()).toBeUndefined(); + expect(sdk.client().destroy).toBeCalledTimes(1); }); }); diff --git a/src/sdkFactory/index.ts b/src/sdkFactory/index.ts index cd15e9ef..572b6bec 100644 --- a/src/sdkFactory/index.ts +++ b/src/sdkFactory/index.ts @@ -3,7 +3,7 @@ import { sdkReadinessManagerFactory } from '../readiness/sdkReadinessManager'; import { impressionsTrackerFactory } from '../trackers/impressionsTracker'; import { eventTrackerFactory } from '../trackers/eventTracker'; import { telemetryTrackerFactory } from '../trackers/telemetryTracker'; -import { SplitIO } from '../types'; +import SplitIO from '../../types/splitio'; import { validateAndTrackApiKey } from '../utils/inputValidation/apiKey'; import { createLoggerAPI } from '../logger/sdkLogger'; import { NEW_FACTORY, RETRIEVE_MANAGER } from '../logger/constants'; @@ -18,21 +18,27 @@ import { NONE, OPTIMIZED } from '../utils/constants'; /** * Modular SDK factory */ -export function sdkFactory(params: ISdkFactoryParams): SplitIO.ICsSDK | SplitIO.ISDK | SplitIO.IAsyncSDK { +export function sdkFactory(params: ISdkFactoryParams): SplitIO.ISDK | SplitIO.IAsyncSDK | SplitIO.IBrowserSDK | SplitIO.IBrowserAsyncSDK { const { settings, platform, storageFactory, splitApiFactory, extraProps, syncManagerFactory, SignalListener, impressionsObserverFactory, integrationsManagerFactory, sdkManagerFactory, sdkClientMethodFactory, - filterAdapterFactory } = params; + filterAdapterFactory, lazyInit } = params; const { log, sync: { impressionsMode } } = settings; // @TODO handle non-recoverable errors, such as, global `fetch` not available, invalid SDK Key, etc. // On non-recoverable errors, we should mark the SDK as destroyed and not start synchronization. - // We will just log and allow for the SDK to end up throwing an SDK_TIMEOUT event for devs to handle. - validateAndTrackApiKey(log, settings.core.authorizationKey); + // initialization + let hasInit = false; + const initCallbacks: (() => void)[] = []; - const sdkReadinessManager = sdkReadinessManagerFactory(log, platform.EventEmitter, settings.startup.readyTimeout); + function whenInit(cb: () => void) { + if (hasInit) cb(); + else initCallbacks.push(cb); + } + + const sdkReadinessManager = sdkReadinessManagerFactory(platform.EventEmitter, settings); const readiness = sdkReadinessManager.readinessManager; const storage = storageFactory({ @@ -48,7 +54,7 @@ export function sdkFactory(params: ISdkFactoryParams): SplitIO.ICsSDK | SplitIO. }, }); // @TODO add support for dataloader: `if (params.dataLoader) params.dataLoader(storage);` - + const clients: Record = {}; const telemetryTracker = telemetryTrackerFactory(storage.telemetry, platform.now); const integrationsManager = integrationsManagerFactory && integrationsManagerFactory({ settings, storage, telemetryTracker }); @@ -67,13 +73,13 @@ export function sdkFactory(params: ISdkFactoryParams): SplitIO.ICsSDK | SplitIO. strategy = strategyDebugFactory(observer); } - const impressionsTracker = impressionsTrackerFactory(settings, storage.impressions, strategy, integrationsManager, storage.telemetry); - const eventTracker = eventTrackerFactory(settings, storage.events, integrationsManager, storage.telemetry); + const impressionsTracker = impressionsTrackerFactory(settings, storage.impressions, strategy, whenInit, integrationsManager, storage.telemetry); + const eventTracker = eventTrackerFactory(settings, storage.events, whenInit, integrationsManager, storage.telemetry); // splitApi is used by SyncManager and Browser signal listener const splitApi = splitApiFactory && splitApiFactory(settings, platform, telemetryTracker); - const ctx: ISdkFactoryContext = { splitApi, eventTracker, impressionsTracker, telemetryTracker, uniqueKeysTracker, sdkReadinessManager, readiness, settings, storage, platform }; + const ctx: ISdkFactoryContext = { clients, splitApi, eventTracker, impressionsTracker, telemetryTracker, uniqueKeysTracker, sdkReadinessManager, readiness, settings, storage, platform }; const syncManager = syncManagerFactory && syncManagerFactory(ctx as ISdkFactoryContextSync); ctx.syncManager = syncManager; @@ -85,8 +91,21 @@ export function sdkFactory(params: ISdkFactoryParams): SplitIO.ICsSDK | SplitIO. const clientMethod = sdkClientMethodFactory(ctx); const managerInstance = sdkManagerFactory(settings, storage.splits, sdkReadinessManager); - syncManager && syncManager.start(); - signalListener && signalListener.start(); + + function init() { + if (hasInit) return; + hasInit = true; + + // We will just log and allow for the SDK to end up throwing an SDK_TIMEOUT event for devs to handle. + validateAndTrackApiKey(log, settings.core.authorizationKey); + readiness.init(); + uniqueKeysTracker && uniqueKeysTracker.start(); + syncManager && syncManager.start(); + signalListener && signalListener.start(); + + initCallbacks.forEach((cb) => cb()); + initCallbacks.length = 0; + } log.info(NEW_FACTORY); @@ -105,5 +124,9 @@ export function sdkFactory(params: ISdkFactoryParams): SplitIO.ICsSDK | SplitIO. Logger: createLoggerAPI(log), settings, - }, extraProps && extraProps(ctx)); + + destroy() { + return Promise.all(Object.keys(clients).map(key => clients[key].destroy())).then(() => { }); + } + }, extraProps && extraProps(ctx), lazyInit ? { init } : init()); } diff --git a/src/sdkFactory/types.ts b/src/sdkFactory/types.ts index c785db33..443b1456 100644 --- a/src/sdkFactory/types.ts +++ b/src/sdkFactory/types.ts @@ -8,7 +8,8 @@ import { IStorageAsync, IStorageSync, IStorageFactoryParams } from '../storages/ import { ISyncManager } from '../sync/types'; import { IImpressionObserver } from '../trackers/impressionObserver/types'; import { IImpressionsTracker, IEventTracker, ITelemetryTracker, IFilterAdapter, IUniqueKeysTracker } from '../trackers/types'; -import { SplitIO, ISettings, IEventEmitter } from '../types'; +import { ISettings } from '../types'; +import SplitIO from '../../types/splitio'; /** * Environment related dependencies. @@ -29,7 +30,7 @@ export interface IPlatform { /** * EventEmitter constructor, like NodeJS.EventEmitter or a polyfill. */ - EventEmitter: new () => IEventEmitter, + EventEmitter: new () => SplitIO.IEventEmitter, /** * Function used to track latencies for telemetry. */ @@ -49,6 +50,7 @@ export interface ISdkFactoryContext { signalListener?: ISignalListener splitApi?: ISplitApi syncManager?: ISyncManager, + clients: Record, } export interface ISdkFactoryContextSync extends ISdkFactoryContext { @@ -67,6 +69,8 @@ export interface ISdkFactoryContextAsync extends ISdkFactoryContext { * Object parameter with the modules required to create an SDK factory instance */ export interface ISdkFactoryParams { + // If true, the `sdkFactory` is pure (no side effects), and the SDK instance includes a `init` method to run initialization side effects + lazyInit?: boolean, // The settings must be already validated settings: ISettings, @@ -75,7 +79,7 @@ export interface ISdkFactoryParams { platform: IPlatform, // Storage factory. The result storage type implies the type of the SDK: - // sync SDK (`ISDK` or `ICsSDK`) with `IStorageSync`, and async SDK (`IAsyncSDK`) with `IStorageAsync` + // sync SDK (`IBrowserSDK` and `ISDK`) with `IStorageSync`, and async SDK (`IBrowserAsyncSDK` and `IAsyncSDK`) with `IStorageAsync` storageFactory: (params: IStorageFactoryParams) => IStorageSync | IStorageAsync, // Factory of Split Api (HTTP Client Service). @@ -90,9 +94,9 @@ export interface ISdkFactoryParams { // Sdk manager factory sdkManagerFactory: typeof sdkManagerFactory, - // Sdk client method factory (ISDK::client method). - // It Allows to distinguish SDK clients with the client-side API (`ICsSDK`) or server-side API (`ISDK` or `IAsyncSDK`). - sdkClientMethodFactory: (params: ISdkFactoryContext) => ({ (): SplitIO.ICsClient; (key: SplitIO.SplitKey, trafficType?: string | undefined): SplitIO.ICsClient; } | (() => SplitIO.IClient) | (() => SplitIO.IAsyncClient)) + // Sdk client method factory. + // It Allows to distinguish SDK clients with the client-side API (`IBrowserSDK` and `IBrowserAsyncSDK`) or server-side API (`ISDK` and `IAsyncSDK`). + sdkClientMethodFactory: (params: ISdkFactoryContext) => ({ (): SplitIO.IBrowserClient; (key: SplitIO.SplitKey): SplitIO.IBrowserClient; } | (() => SplitIO.IClient) | (() => SplitIO.IAsyncClient)) // Impression observer factory. impressionsObserverFactory: () => IImpressionObserver diff --git a/src/sdkManager/__tests__/index.asyncCache.spec.ts b/src/sdkManager/__tests__/index.asyncCache.spec.ts index f812e959..a0277a75 100644 --- a/src/sdkManager/__tests__/index.asyncCache.spec.ts +++ b/src/sdkManager/__tests__/index.asyncCache.spec.ts @@ -9,7 +9,7 @@ import { ISdkReadinessManager } from '../../readiness/types'; import { loggerMock } from '../../logger/__tests__/sdkLogger.mock'; import { metadata } from '../../storages/__tests__/KeyBuilder.spec'; import { RedisAdapter } from '../../storages/inRedis/RedisAdapter'; -import { SplitIO } from '../../types'; +import SplitIO from '../../../types/splitio'; // @ts-expect-error const sdkReadinessManagerMock = { diff --git a/src/sdkManager/index.ts b/src/sdkManager/index.ts index 47aea3fa..1246f16f 100644 --- a/src/sdkManager/index.ts +++ b/src/sdkManager/index.ts @@ -5,7 +5,8 @@ import { validateSplit, validateSplitExistence, validateIfNotDestroyed, validate import { ISplitsCacheAsync, ISplitsCacheSync } from '../storages/types'; import { ISdkReadinessManager } from '../readiness/types'; import { ISplit } from '../dtos/types'; -import { ISettings, SplitIO } from '../types'; +import { ISettings } from '../types'; +import SplitIO from '../../types/splitio'; import { isConsumerMode } from '../utils/settingsValidation/mode'; import { SPLIT_FN_LABEL, SPLITS_FN_LABEL, NAMES_FN_LABEL } from '../utils/constants'; diff --git a/src/services/__tests__/splitApi.spec.ts b/src/services/__tests__/splitApi.spec.ts index 07d6dec4..d935c6de 100644 --- a/src/services/__tests__/splitApi.spec.ts +++ b/src/services/__tests__/splitApi.spec.ts @@ -30,10 +30,10 @@ describe('splitApi', () => { assertHeaders(settings, headers); expect(url).toBe('auth/v2/auth?s=1.1&users=key1&users=key2'); - splitApi.fetchMySegments('userKey'); + splitApi.fetchMemberships('userKey', false, 80); [url, { headers }] = fetchMock.mock.calls[1]; assertHeaders(settings, headers); - expect(url).toBe('sdk/mySegments/userKey'); + expect(url).toBe('sdk/memberships/userKey?till=80'); splitApi.fetchSegmentChanges(-1, 'segmentName', false, 90); [url, { headers }] = fetchMock.mock.calls[2]; diff --git a/src/services/decorateHeaders.ts b/src/services/decorateHeaders.ts index 5764ffab..4a95219f 100644 --- a/src/services/decorateHeaders.ts +++ b/src/services/decorateHeaders.ts @@ -1,8 +1,7 @@ import { objectAssign } from '../utils/lang/objectAssign'; -import { _Set } from '../utils/lang/sets'; import { ISettings } from '../types'; -const FORBIDDEN_HEADERS = new _Set([ +const FORBIDDEN_HEADERS = new Set([ 'splitsdkclientkey', 'splitsdkversion', 'splitsdkmachineip', diff --git a/src/services/splitApi.ts b/src/services/splitApi.ts index 9e2223ea..0b86b58d 100644 --- a/src/services/splitApi.ts +++ b/src/services/splitApi.ts @@ -4,7 +4,7 @@ import { splitHttpClientFactory } from './splitHttpClient'; import { ISplitApi } from './types'; import { objectAssign } from '../utils/lang/objectAssign'; import { ITelemetryTracker } from '../trackers/types'; -import { SPLITS, IMPRESSIONS, IMPRESSIONS_COUNT, EVENTS, TELEMETRY, TOKEN, SEGMENT, MY_SEGMENT } from '../utils/constants'; +import { SPLITS, IMPRESSIONS, IMPRESSIONS_COUNT, EVENTS, TELEMETRY, TOKEN, SEGMENT, MEMBERSHIPS } from '../utils/constants'; import { ERROR_TOO_MANY_SETS } from '../logger/constants'; const noCacheHeaderOptions = { headers: { 'Cache-Control': 'no-cache' } }; @@ -16,13 +16,13 @@ function userKeyToQueryParam(userKey: string) { /** * Factory of SplitApi objects, which group the collection of Split HTTP endpoints used by the SDK * - * @param settings validated settings object - * @param platform object containing environment-specific dependencies - * @param telemetryTracker telemetry tracker + * @param settings - validated settings object + * @param platform - object containing environment-specific dependencies + * @param telemetryTracker - telemetry tracker */ export function splitApiFactory( settings: ISettings, - platform: IPlatform, + platform: Pick, telemetryTracker: ITelemetryTracker ): ISplitApi { @@ -67,22 +67,22 @@ export function splitApiFactory( return splitHttpClient(url, noCache ? noCacheHeaderOptions : undefined, telemetryTracker.trackHttp(SEGMENT)); }, - fetchMySegments(userMatchingKey: string, noCache?: boolean) { + fetchMemberships(userMatchingKey: string, noCache?: boolean, till?: number) { /** * URI encoding of user keys in order to: - * - avoid 400 responses (due to URI malformed). E.g.: '/api/mySegments/%' - * - avoid 404 responses. E.g.: '/api/mySegments/foo/bar' + * - avoid 400 responses (due to URI malformed). E.g.: '/api/memberships/%' + * - avoid 404 responses. E.g.: '/api/memberships/foo/bar' * - match user keys with special characters. E.g.: 'foo%bar', 'foo/bar' */ - const url = `${urls.sdk}/mySegments/${encodeURIComponent(userMatchingKey)}`; - return splitHttpClient(url, noCache ? noCacheHeaderOptions : undefined, telemetryTracker.trackHttp(MY_SEGMENT)); + const url = `${urls.sdk}/memberships/${encodeURIComponent(userMatchingKey)}${till ? '?till=' + till : ''}`; + return splitHttpClient(url, noCache ? noCacheHeaderOptions : undefined, telemetryTracker.trackHttp(MEMBERSHIPS)); }, /** * Post events. * - * @param body Events bulk payload - * @param headers Optionals headers to overwrite default ones. For example, it is used in producer mode to overwrite metadata headers. + * @param body - Events bulk payload + * @param headers - Optionals headers to overwrite default ones. For example, it is used in producer mode to overwrite metadata headers. */ postEventsBulk(body: string, headers?: Record) { const url = `${urls.events}/events/bulk`; @@ -92,8 +92,8 @@ export function splitApiFactory( /** * Post impressions. * - * @param body Impressions bulk payload - * @param headers Optionals headers to overwrite default ones. For example, it is used in producer mode to overwrite metadata headers. + * @param body - Impressions bulk payload + * @param headers - Optionals headers to overwrite default ones. For example, it is used in producer mode to overwrite metadata headers. */ postTestImpressionsBulk(body: string, headers?: Record) { const url = `${urls.events}/testImpressions/bulk`; @@ -106,8 +106,8 @@ export function splitApiFactory( /** * Post impressions counts. * - * @param body Impressions counts payload - * @param headers Optionals headers to overwrite default ones. For example, it is used in producer mode to overwrite metadata headers. + * @param body - Impressions counts payload + * @param headers - Optionals headers to overwrite default ones. For example, it is used in producer mode to overwrite metadata headers. */ postTestImpressionsCount(body: string, headers?: Record) { const url = `${urls.events}/testImpressions/count`; @@ -117,8 +117,8 @@ export function splitApiFactory( /** * Post unique keys for client side. * - * @param body unique keys payload - * @param headers Optionals headers to overwrite default ones. For example, it is used in producer mode to overwrite metadata headers. + * @param body - unique keys payload + * @param headers - Optionals headers to overwrite default ones. For example, it is used in producer mode to overwrite metadata headers. */ postUniqueKeysBulkCs(body: string, headers?: Record) { const url = `${urls.telemetry}/v1/keys/cs`; @@ -128,8 +128,8 @@ export function splitApiFactory( /** * Post unique keys for server side. * - * @param body unique keys payload - * @param headers Optionals headers to overwrite default ones. For example, it is used in producer mode to overwrite metadata headers. + * @param body - unique keys payload + * @param headers - Optionals headers to overwrite default ones. For example, it is used in producer mode to overwrite metadata headers. */ postUniqueKeysBulkSs(body: string, headers?: Record) { const url = `${urls.telemetry}/v1/keys/ss`; diff --git a/src/services/splitHttpClient.ts b/src/services/splitHttpClient.ts index 0bb192e1..2fcebfc1 100644 --- a/src/services/splitHttpClient.ts +++ b/src/services/splitHttpClient.ts @@ -10,10 +10,10 @@ const messageNoFetch = 'Global fetch API is not available.'; /** * Factory of Split HTTP clients, which are HTTP clients with predefined headers for Split endpoints. * - * @param settings SDK settings, used to access authorizationKey, logger instance and metadata (SDK version, ip and hostname) to set additional headers - * @param platform object containing environment-specific dependencies + * @param settings - SDK settings, used to access authorizationKey, logger instance and metadata (SDK version, ip and hostname) to set additional headers + * @param platform - object containing environment-specific dependencies */ -export function splitHttpClientFactory(settings: ISettings, { getOptions, getFetch }: IPlatform): ISplitHttpClient { +export function splitHttpClientFactory(settings: ISettings, { getOptions, getFetch }: Pick): ISplitHttpClient { const { log, core: { authorizationKey }, version, runtime: { ip, hostname } } = settings; const options = getOptions && getOptions(settings); diff --git a/src/services/types.ts b/src/services/types.ts index 116ccec5..34708f90 100644 --- a/src/services/types.ts +++ b/src/services/types.ts @@ -39,7 +39,7 @@ export type IFetchSplitChanges = (since: number, noCache?: boolean, till?: numbe export type IFetchSegmentChanges = (since: number, segmentName: string, noCache?: boolean, till?: number) => Promise -export type IFetchMySegments = (userMatchingKey: string, noCache?: boolean) => Promise +export type IFetchMemberships = (userMatchingKey: string, noCache?: boolean, till?: number) => Promise export type IPostEventsBulk = (body: string, headers?: Record) => Promise @@ -61,7 +61,7 @@ export interface ISplitApi { fetchAuth: IFetchAuth fetchSplitChanges: IFetchSplitChanges fetchSegmentChanges: IFetchSegmentChanges - fetchMySegments: IFetchMySegments + fetchMemberships: IFetchMemberships postEventsBulk: IPostEventsBulk postUniqueKeysBulkCs: IPostUniqueKeysBulkCs postUniqueKeysBulkSs: IPostUniqueKeysBulkSs diff --git a/src/storages/AbstractMySegmentsCacheSync.ts b/src/storages/AbstractMySegmentsCacheSync.ts new file mode 100644 index 00000000..a03fc416 --- /dev/null +++ b/src/storages/AbstractMySegmentsCacheSync.ts @@ -0,0 +1,94 @@ +import { IMySegmentsResponse } from '../dtos/types'; +import { MySegmentsData } from '../sync/polling/types'; +import { ISegmentsCacheSync } from './types'; + +/** + * This class provides a skeletal implementation of the ISegmentsCacheSync interface + * to minimize the effort required to implement this interface. + */ +export abstract class AbstractMySegmentsCacheSync implements ISegmentsCacheSync { + + protected abstract addSegment(name: string): boolean + protected abstract removeSegment(name: string): boolean + protected abstract setChangeNumber(changeNumber?: number): boolean | void + + /** + * For server-side synchronizer: check if `key` is in `name` segment. + * For client-side synchronizer: check if `name` segment is in the cache. `key` is undefined. + */ + abstract isInSegment(name: string, key?: string): boolean + + /** + * clear the cache. + */ + clear() { + this.resetSegments({}); + } + + + // No-op. Not used in client-side. + registerSegments(): boolean { return false; } + update() { return false; } + + /** + * For server-side synchronizer: get the list of segments to fetch changes. + * Also used for the `seC` (segment count) telemetry stat. + */ + abstract getRegisteredSegments(): string[] + + /** + * Only used for the `skC`(segment keys count) telemetry stat: 1 for client-side, and total count of keys in server-side. + */ + // @TODO for client-side it should be the number of clients, but it requires a refactor of MySegments caches to simplify the code. + abstract getKeysCount(): number + + abstract getChangeNumber(name: string): number + + /** + * For server-side synchronizer: the method is not used. + * For client-side synchronizer: it resets or updates the cache. + */ + resetSegments(segmentsData: MySegmentsData | IMySegmentsResponse): boolean { + this.setChangeNumber(segmentsData.cn); + + const { added, removed } = segmentsData as MySegmentsData; + + if (added && removed) { + let isDiff = false; + + added.forEach(segment => { + isDiff = this.addSegment(segment) || isDiff; + }); + + removed.forEach(segment => { + isDiff = this.removeSegment(segment) || isDiff; + }); + + return isDiff; + } + + const names = ((segmentsData as IMySegmentsResponse).k || []).map(s => s.n).sort(); + const storedSegmentKeys = this.getRegisteredSegments().sort(); + + // Extreme fast => everything is empty + if (!names.length && !storedSegmentKeys.length) return false; + + let index = 0; + + while (index < names.length && index < storedSegmentKeys.length && names[index] === storedSegmentKeys[index]) index++; + + // Quick path => no changes + if (index === names.length && index === storedSegmentKeys.length) return false; + + // Slowest path => add and/or remove segments + for (let removeIndex = index; removeIndex < storedSegmentKeys.length; removeIndex++) { + this.removeSegment(storedSegmentKeys[removeIndex]); + } + + for (let addIndex = index; addIndex < names.length; addIndex++) { + this.addSegment(names[addIndex]); + } + + return true; + } +} diff --git a/src/storages/AbstractSegmentsCacheSync.ts b/src/storages/AbstractSegmentsCacheSync.ts deleted file mode 100644 index a3780d48..00000000 --- a/src/storages/AbstractSegmentsCacheSync.ts +++ /dev/null @@ -1,68 +0,0 @@ -/* eslint-disable @typescript-eslint/no-unused-vars */ -/* eslint-disable no-unused-vars */ -import { ISegmentsCacheSync } from './types'; - -/** - * This class provides a skeletal implementation of the ISegmentsCacheSync interface - * to minimize the effort required to implement this interface. - */ -export abstract class AbstractSegmentsCacheSync implements ISegmentsCacheSync { - /** - * For server-side synchronizer: add `segmentKeys` list of keys to `name` segment. - * For client-side synchronizer: add `name` segment to the cache. `segmentKeys` is undefined. - */ - abstract addToSegment(name: string, segmentKeys?: string[]): boolean - - /** - * For server-side synchronizer: remove `segmentKeys` list of keys from `name` segment. - * For client-side synchronizer: remove `name` segment from the cache. `segmentKeys` is undefined. - */ - abstract removeFromSegment(name: string, segmentKeys?: string[]): boolean - - /** - * For server-side synchronizer: check if `key` is in `name` segment. - * For client-side synchronizer: check if `name` segment is in the cache. `key` is undefined. - */ - abstract isInSegment(name: string, key?: string): boolean - - /** - * clear the cache. - */ - abstract clear(): void - - /** - * For server-side synchronizer: add the given list of segments to the cache, with an empty list of keys. The segments that already exist are not modified. - * For client-side synchronizer: the method is not used. - */ - registerSegments(names: string[]): boolean { return false; } - - /** - * For server-side synchronizer: get the list of segments to fetch changes. - * Also used for the `seC` (segment count) telemetry stat. - */ - abstract getRegisteredSegments(): string[] - - /** - * Only used for the `skC`(segment keys count) telemetry stat: 1 for client-side, and total count of keys in server-side. - * @TODO for client-side it should be the number of clients, but it requires a refactor of MySegments caches to simplify the code. - */ - abstract getKeysCount(): number - - /** - * For server-side synchronizer: set the change number of `name` segment. - * For client-side synchronizer: the method is not used. - */ - setChangeNumber(name: string, changeNumber: number): boolean { return true; } - - /** - * For server-side synchronizer: get the change number of `name` segment. - * For client-side synchronizer: the method is not used. - */ - getChangeNumber(name: string): number { return -1; } - - /** - * For server-side synchronizer: the method is not used. - * For client-side synchronizer: reset the cache with the given list of segments. - */ - resetSegments(names: string[]): boolean { return true; } -} diff --git a/src/storages/AbstractSplitsCacheAsync.ts b/src/storages/AbstractSplitsCacheAsync.ts index 9e4e136c..dcf059ed 100644 --- a/src/storages/AbstractSplitsCacheAsync.ts +++ b/src/storages/AbstractSplitsCacheAsync.ts @@ -1,7 +1,6 @@ import { ISplitsCacheAsync } from './types'; import { ISplit } from '../dtos/types'; import { objectAssign } from '../utils/lang/objectAssign'; -import { ISet } from '../utils/lang/sets'; /** * This class provides a skeletal implementation of the ISplitsCacheAsync interface @@ -18,7 +17,7 @@ export abstract class AbstractSplitsCacheAsync implements ISplitsCacheAsync { abstract getChangeNumber(): Promise abstract getAll(): Promise abstract getSplitNames(): Promise - abstract getNamesByFlagSets(flagSets: string[]): Promise[]> + abstract getNamesByFlagSets(flagSets: string[]): Promise[]> abstract trafficTypeExists(trafficType: string): Promise abstract clear(): Promise @@ -40,10 +39,7 @@ export abstract class AbstractSplitsCacheAsync implements ISplitsCacheAsync { * Kill `name` split and set `defaultTreatment` and `changeNumber`. * Used for SPLIT_KILL push notifications. * - * @param {string} name - * @param {string} defaultTreatment - * @param {number} changeNumber - * @returns {Promise} a promise that is resolved once the split kill operation is performed. The fulfillment value is a boolean: `true` if the operation successed updating the split or `false` if no split is updated, + * @returns a promise that is resolved once the split kill operation is performed. The fulfillment value is a boolean: `true` if the operation successed updating the split or `false` if no split is updated, * for instance, if the `changeNumber` is old, or if the split is not found (e.g., `/splitchanges` hasn't been fetched yet), or if the storage fails to apply the update. * The promise will never be rejected. */ diff --git a/src/storages/AbstractSplitsCacheSync.ts b/src/storages/AbstractSplitsCacheSync.ts index d516837e..f82ebbd6 100644 --- a/src/storages/AbstractSplitsCacheSync.ts +++ b/src/storages/AbstractSplitsCacheSync.ts @@ -1,8 +1,7 @@ import { ISplitsCacheSync } from './types'; import { ISplit } from '../dtos/types'; import { objectAssign } from '../utils/lang/objectAssign'; -import { ISet } from '../utils/lang/sets'; -import { IN_SEGMENT } from '../utils/constants'; +import { IN_SEGMENT, IN_LARGE_SEGMENT } from '../utils/constants'; /** * This class provides a skeletal implementation of the ISplitsCacheSync interface @@ -32,7 +31,7 @@ export abstract class AbstractSplitsCacheSync implements ISplitsCacheSync { return splits; } - abstract setChangeNumber(changeNumber: number): boolean + abstract setChangeNumber(changeNumber: number): boolean | void abstract getChangeNumber(): number @@ -60,10 +59,7 @@ export abstract class AbstractSplitsCacheSync implements ISplitsCacheSync { * Kill `name` split and set `defaultTreatment` and `changeNumber`. * Used for SPLIT_KILL push notifications. * - * @param {string} name - * @param {string} defaultTreatment - * @param {number} changeNumber - * @returns {boolean} `true` if the operation successed updating the split, or `false` if no split is updated, + * @returns `true` if the operation successed updating the split, or `false` if no split is updated, * for instance, if the `changeNumber` is old, or if the split is not found (e.g., `/splitchanges` hasn't been fetched yet), or if the storage fails to apply the update. */ killLocally(name: string, defaultTreatment: string, changeNumber: number): boolean { @@ -80,7 +76,7 @@ export abstract class AbstractSplitsCacheSync implements ISplitsCacheSync { return false; } - abstract getNamesByFlagSets(flagSets: string[]): ISet[] + abstract getNamesByFlagSets(flagSets: string[]): Set[] } @@ -94,7 +90,8 @@ export function usesSegments(split: ISplit) { const matchers = conditions[i].matcherGroup.matchers; for (let j = 0; j < matchers.length; j++) { - if (matchers[j].matcherType === IN_SEGMENT) return true; + const matcher = matchers[j].matcherType; + if (matcher === IN_SEGMENT || matcher === IN_LARGE_SEGMENT) return true; } } diff --git a/src/storages/KeyBuilderCS.ts b/src/storages/KeyBuilderCS.ts index 65b59397..a59d7208 100644 --- a/src/storages/KeyBuilderCS.ts +++ b/src/storages/KeyBuilderCS.ts @@ -1,7 +1,13 @@ import { startsWith } from '../utils/lang'; import { KeyBuilder } from './KeyBuilder'; -export class KeyBuilderCS extends KeyBuilder { +export interface MySegmentsKeyBuilder { + buildSegmentNameKey(segmentName: string): string; + extractSegmentName(builtSegmentKeyName: string): string | undefined; + buildTillKey(): string; +} + +export class KeyBuilderCS extends KeyBuilder implements MySegmentsKeyBuilder { protected readonly regexSplitsCacheKey: RegExp; protected readonly matchingKey: string; @@ -26,18 +32,6 @@ export class KeyBuilderCS extends KeyBuilder { return builtSegmentKeyName.substr(prefix.length); } - // @BREAKING: The key used to start with the matching key instead of the prefix, this was changed on version 10.17.3 - buildOldSegmentNameKey(segmentName: string) { - return `${this.matchingKey}.${this.prefix}.segment.${segmentName}`; - } - // @BREAKING: The key used to start with the matching key instead of the prefix, this was changed on version 10.17.3 - extractOldSegmentKey(builtSegmentKeyName: string) { - const prefix = `${this.matchingKey}.${this.prefix}.segment.`; - - if (startsWith(builtSegmentKeyName, prefix)) - return builtSegmentKeyName.substr(prefix.length); - } - buildLastUpdatedKey() { return `${this.prefix}.splits.lastUpdated`; } @@ -45,4 +39,26 @@ export class KeyBuilderCS extends KeyBuilder { isSplitsCacheKey(key: string) { return this.regexSplitsCacheKey.test(key); } + + buildTillKey() { + return `${this.prefix}.${this.matchingKey}.segments.till`; + } +} + +export function myLargeSegmentsKeyBuilder(prefix: string, matchingKey: string): MySegmentsKeyBuilder { + return { + buildSegmentNameKey(segmentName: string) { + return `${prefix}.${matchingKey}.largeSegment.${segmentName}`; + }, + + extractSegmentName(builtSegmentKeyName: string) { + const p = `${prefix}.${matchingKey}.largeSegment.`; + + if (startsWith(builtSegmentKeyName, p)) return builtSegmentKeyName.substr(p.length); + }, + + buildTillKey() { + return `${prefix}.${matchingKey}.largeSegments.till`; + } + }; } diff --git a/src/storages/__tests__/KeyBuilder.spec.ts b/src/storages/__tests__/KeyBuilder.spec.ts index 890ca61a..e0494ec9 100644 --- a/src/storages/__tests__/KeyBuilder.spec.ts +++ b/src/storages/__tests__/KeyBuilder.spec.ts @@ -116,16 +116,16 @@ test('KEYS / latency and exception keys (telemetry)', () => { test('getStorageHash', () => { expect(getStorageHash({ core: { authorizationKey: '' }, - sync: { __splitFiltersValidation: { queryString: '&names=p1__split,p2__split' }, flagSpecVersion: '1.1' } - } as ISettings)).toBe('fdf7bd89'); + sync: { __splitFiltersValidation: { queryString: '&names=p1__split,p2__split' }, flagSpecVersion: '1.2' } + } as ISettings)).toBe('7ccd6b31'); expect(getStorageHash({ core: { authorizationKey: '' }, - sync: { __splitFiltersValidation: { queryString: '&names=p2__split,p3__split' }, flagSpecVersion: '1.1' } - } as ISettings)).toBe('ee4ec91'); + sync: { __splitFiltersValidation: { queryString: '&names=p2__split,p3__split' }, flagSpecVersion: '1.2' } + } as ISettings)).toBe('2a25d0e1'); expect(getStorageHash({ core: { authorizationKey: '' }, - sync: { __splitFiltersValidation: { queryString: null }, flagSpecVersion: '1.1' } - } as ISettings)).toBe('2a2c20bb'); + sync: { __splitFiltersValidation: { queryString: null }, flagSpecVersion: '1.2' } + } as ISettings)).toBe('db8943b4'); }); diff --git a/src/storages/dataLoader.ts b/src/storages/dataLoader.ts index 1e351157..ce288868 100644 --- a/src/storages/dataLoader.ts +++ b/src/storages/dataLoader.ts @@ -1,26 +1,25 @@ -import { SplitIO } from '../types'; +import { PreloadedData } from '../types'; import { DEFAULT_CACHE_EXPIRATION_IN_MILLIS } from '../utils/constants/browser'; import { DataLoader, ISegmentsCacheSync, ISplitsCacheSync } from './types'; /** * Factory of client-side storage loader * - * @param preloadedData validated data following the format proposed in https://github.com/godaddy/split-javascript-data-loader + * @param preloadedData - validated data following the format proposed in https://github.com/godaddy/split-javascript-data-loader * and extended with a `mySegmentsData` property. * @returns function to preload the storage */ -export function dataLoaderFactory(preloadedData: SplitIO.PreloadedData): DataLoader { +export function dataLoaderFactory(preloadedData: PreloadedData): DataLoader { /** * Storage-agnostic adaptation of `loadDataIntoLocalStorage` function * (https://github.com/godaddy/split-javascript-data-loader/blob/master/src/load-data.js) * - * @param storage object containing `splits` and `segments` cache (client-side variant) - * @param userId user key string of the provided MySegmentsCache - * - * @TODO extend to support SegmentsCache (server-side variant) by making `userId` optional and adding the corresponding logic. - * @TODO extend to load data on shared mySegments storages. Be specific when emitting SDK_READY_FROM_CACHE on shared clients. Maybe the serializer should provide the `useSegments` flag. + * @param storage - object containing `splits` and `segments` cache (client-side variant) + * @param userId - user key string of the provided MySegmentsCache */ + // @TODO extend to support SegmentsCache (server-side variant) by making `userId` optional and adding the corresponding logic. + // @TODO extend to load data on shared mySegments storages. Be specific when emitting SDK_READY_FROM_CACHE on shared clients. Maybe the serializer should provide the `useSegments` flag. return function loadData(storage: { splits: ISplitsCacheSync, segments: ISegmentsCacheSync }, userId: string) { // Do not load data if current preloadedData is empty if (Object.keys(preloadedData).length === 0) return; @@ -50,6 +49,6 @@ export function dataLoaderFactory(preloadedData: SplitIO.PreloadedData): DataLoa return Array.isArray(userIds) && userIds.indexOf(userId) > -1; }); } - storage.segments.resetSegments(mySegmentsData); + storage.segments.resetSegments({ k: mySegmentsData.map(s => ({ n: s })) }); }; } diff --git a/src/storages/findLatencyIndex.ts b/src/storages/findLatencyIndex.ts index aac73e7a..3208bacc 100644 --- a/src/storages/findLatencyIndex.ts +++ b/src/storages/findLatencyIndex.ts @@ -7,7 +7,7 @@ const BASE = 1.5; /** * Calculates buckets from latency in milliseconds * - * @param latencyInMs + * @param latencyInMs - latency in milliseconds * @returns a bucket index from 0 to 22 inclusive */ export function findLatencyIndex(latencyInMs: number): number { diff --git a/src/storages/inLocalStorage/MySegmentsCacheInLocal.ts b/src/storages/inLocalStorage/MySegmentsCacheInLocal.ts index 849a344b..e3b250b5 100644 --- a/src/storages/inLocalStorage/MySegmentsCacheInLocal.ts +++ b/src/storages/inLocalStorage/MySegmentsCacheInLocal.ts @@ -1,36 +1,26 @@ import { ILogger } from '../../logger/types'; -import { AbstractSegmentsCacheSync } from '../AbstractSegmentsCacheSync'; -import { KeyBuilderCS } from '../KeyBuilderCS'; +import { isNaNNumber } from '../../utils/lang'; +import { AbstractMySegmentsCacheSync } from '../AbstractMySegmentsCacheSync'; +import type { MySegmentsKeyBuilder } from '../KeyBuilderCS'; import { LOG_PREFIX, DEFINED } from './constants'; -export class MySegmentsCacheInLocal extends AbstractSegmentsCacheSync { +export class MySegmentsCacheInLocal extends AbstractMySegmentsCacheSync { - private readonly keys: KeyBuilderCS; + private readonly keys: MySegmentsKeyBuilder; private readonly log: ILogger; - constructor(log: ILogger, keys: KeyBuilderCS) { + constructor(log: ILogger, keys: MySegmentsKeyBuilder) { super(); this.log = log; this.keys = keys; // There is not need to flush segments cache like splits cache, since resetSegments receives the up-to-date list of active segments } - /** - * Removes list of segments from localStorage - * @NOTE this method is not being used at the moment. - */ - clear() { - this.log.info(LOG_PREFIX + 'Flushing MySegments data from localStorage'); - - // We cannot simply call `localStorage.clear()` since that implies removing user items from the storage - // We could optimize next sentence, since it implies iterating over all localStorage items - this.resetSegments([]); - } - - addToSegment(name: string): boolean { + protected addSegment(name: string): boolean { const segmentKey = this.keys.buildSegmentNameKey(name); try { + if (localStorage.getItem(segmentKey) === DEFINED) return false; localStorage.setItem(segmentKey, DEFINED); return true; } catch (e) { @@ -39,10 +29,11 @@ export class MySegmentsCacheInLocal extends AbstractSegmentsCacheSync { } } - removeFromSegment(name: string): boolean { + protected removeSegment(name: string): boolean { const segmentKey = this.keys.buildSegmentNameKey(name); try { + if (localStorage.getItem(segmentKey) !== DEFINED) return false; localStorage.removeItem(segmentKey); return true; } catch (e) { @@ -55,82 +46,41 @@ export class MySegmentsCacheInLocal extends AbstractSegmentsCacheSync { return localStorage.getItem(this.keys.buildSegmentNameKey(name)) === DEFINED; } - /** - * Reset (update) the cached list of segments with the given list, removing and adding segments if necessary. - * - * @param {string[]} segmentNames list of segment names - * @returns boolean indicating if the cache was updated (i.e., given list was different from the cached one) - */ - resetSegments(names: string[]): boolean { - let isDiff = false; - let index; - + getRegisteredSegments(): string[] { // Scan current values from localStorage - const storedSegmentNames = Object.keys(localStorage).reduce((accum, key) => { + return Object.keys(localStorage).reduce((accum, key) => { let segmentName = this.keys.extractSegmentName(key); - if (segmentName) { - accum.push(segmentName); - } else { - // @TODO @BREAKING: This is only to clean up "old" keys. Remove this whole else code block and reuse `getRegisteredSegments` method. - segmentName = this.keys.extractOldSegmentKey(key); - - if (segmentName) { // this was an old segment key, let's clean up. - const newSegmentKey = this.keys.buildSegmentNameKey(segmentName); - try { - // If the new format key is not there, create it. - if (!localStorage.getItem(newSegmentKey) && names.indexOf(segmentName) > -1) { - localStorage.setItem(newSegmentKey, DEFINED); - // we are migrating a segment, let's track it. - accum.push(segmentName); - } - localStorage.removeItem(key); // we migrated the current key, let's delete it. - } catch (e) { - this.log.error(e); - } - } - } + if (segmentName) accum.push(segmentName); return accum; }, [] as string[]); + } - // Extreme fast => everything is empty - if (names.length === 0 && storedSegmentNames.length === names.length) - return isDiff; + getKeysCount() { + return 1; + } - // Quick path - if (storedSegmentNames.length !== names.length) { - isDiff = true; + protected setChangeNumber(changeNumber?: number) { + try { + if (changeNumber) localStorage.setItem(this.keys.buildTillKey(), changeNumber + ''); + else localStorage.removeItem(this.keys.buildTillKey()); + } catch (e) { + this.log.error(e); + } + } - storedSegmentNames.forEach(name => this.removeFromSegment(name)); - names.forEach(name => this.addToSegment(name)); - } else { - // Slowest path => we need to find at least 1 difference because - for (index = 0; index < names.length && storedSegmentNames.indexOf(names[index]) !== -1; index++) { - // TODO: why empty statement? - } + getChangeNumber() { + const n = -1; + let value: string | number | null = localStorage.getItem(this.keys.buildTillKey()); - if (index < names.length) { - isDiff = true; + if (value !== null) { + value = parseInt(value, 10); - storedSegmentNames.forEach(name => this.removeFromSegment(name)); - names.forEach(name => this.addToSegment(name)); - } + return isNaNNumber(value) ? n : value; } - return isDiff; - } - - getRegisteredSegments(): string[] { - return Object.keys(localStorage).reduce((accum, key) => { - const segmentName = this.keys.extractSegmentName(key); - if (segmentName) accum.push(segmentName); - return accum; - }, []); - } - - getKeysCount() { - return 1; + return n; } } diff --git a/src/storages/inLocalStorage/SplitsCacheInLocal.ts b/src/storages/inLocalStorage/SplitsCacheInLocal.ts index ccd4859f..93eb6f32 100644 --- a/src/storages/inLocalStorage/SplitsCacheInLocal.ts +++ b/src/storages/inLocalStorage/SplitsCacheInLocal.ts @@ -4,9 +4,9 @@ import { isFiniteNumber, toNumber, isNaNNumber } from '../../utils/lang'; import { KeyBuilderCS } from '../KeyBuilderCS'; import { ILogger } from '../../logger/types'; import { LOG_PREFIX } from './constants'; -import { ISet, _Set, setToArray } from '../../utils/lang/sets'; import { ISettings } from '../../types'; import { getStorageHash } from '../KeyBuilder'; +import { setToArray } from '../../utils/lang/sets'; /** * ISplitsCacheSync implementation that stores split definitions in browser LocalStorage. @@ -20,11 +20,6 @@ export class SplitsCacheInLocal extends AbstractSplitsCacheSync { private hasSync?: boolean; private updateNewFilter?: boolean; - /** - * @param {KeyBuilderCS} keys - * @param {number | undefined} expirationTimestamp - * @param {ISplitFiltersValidation} splitFiltersValidation - */ constructor(settings: ISettings, keys: KeyBuilderCS, expirationTimestamp?: number) { super(); this.keys = keys; @@ -229,7 +224,7 @@ export class SplitsCacheInLocal extends AbstractSplitsCacheSync { /** * Clean Splits cache if its `lastUpdated` timestamp is older than the given `expirationTimestamp`, * - * @param {number | undefined} expirationTimestamp if the value is not a number, data will not be cleaned + * @param expirationTimestamp - if the value is not a number, data will not be cleaned */ private _checkExpiration(expirationTimestamp?: number) { let value: string | number | null = localStorage.getItem(this.keys.buildLastUpdatedKey()); @@ -259,12 +254,12 @@ export class SplitsCacheInLocal extends AbstractSplitsCacheSync { // if the filter didn't change, nothing is done } - getNamesByFlagSets(flagSets: string[]): ISet[] { + getNamesByFlagSets(flagSets: string[]): Set[] { return flagSets.map(flagSet => { const flagSetKey = this.keys.buildFlagSetKey(flagSet); const flagSetFromLocalStorage = localStorage.getItem(flagSetKey); - return new _Set(flagSetFromLocalStorage ? JSON.parse(flagSetFromLocalStorage) : []); + return new Set(flagSetFromLocalStorage ? JSON.parse(flagSetFromLocalStorage) : []); }); } @@ -279,7 +274,7 @@ export class SplitsCacheInLocal extends AbstractSplitsCacheSync { const flagSetFromLocalStorage = localStorage.getItem(flagSetKey); - const flagSetCache = new _Set(flagSetFromLocalStorage ? JSON.parse(flagSetFromLocalStorage) : []); + const flagSetCache = new Set(flagSetFromLocalStorage ? JSON.parse(flagSetFromLocalStorage) : []); flagSetCache.add(featureFlag.name); localStorage.setItem(flagSetKey, JSON.stringify(setToArray(flagSetCache))); @@ -301,7 +296,7 @@ export class SplitsCacheInLocal extends AbstractSplitsCacheSync { if (!flagSetFromLocalStorage) return; - const flagSetCache = new _Set(JSON.parse(flagSetFromLocalStorage)); + const flagSetCache = new Set(JSON.parse(flagSetFromLocalStorage)); flagSetCache.delete(featureFlagName); if (flagSetCache.size === 0) { diff --git a/src/storages/inLocalStorage/__tests__/MySegmentsCacheInLocal.spec.ts b/src/storages/inLocalStorage/__tests__/MySegmentsCacheInLocal.spec.ts index ae6dac82..bb38fe10 100644 --- a/src/storages/inLocalStorage/__tests__/MySegmentsCacheInLocal.spec.ts +++ b/src/storages/inLocalStorage/__tests__/MySegmentsCacheInLocal.spec.ts @@ -1,56 +1,40 @@ import { MySegmentsCacheInLocal } from '../MySegmentsCacheInLocal'; -import { KeyBuilderCS } from '../../KeyBuilderCS'; +import { KeyBuilderCS, myLargeSegmentsKeyBuilder } from '../../KeyBuilderCS'; import { loggerMock } from '../../../logger/__tests__/sdkLogger.mock'; test('SEGMENT CACHE / in LocalStorage', () => { - const keys = new KeyBuilderCS('SPLITIO', 'user'); - const cache = new MySegmentsCacheInLocal(loggerMock, keys); - - cache.clear(); - - cache.addToSegment('mocked-segment'); - cache.addToSegment('mocked-segment-2'); - - expect(cache.isInSegment('mocked-segment')).toBe(true); - expect(cache.getRegisteredSegments()).toEqual(['mocked-segment', 'mocked-segment-2']); - expect(cache.getKeysCount()).toBe(1); - - cache.removeFromSegment('mocked-segment'); - - expect(cache.isInSegment('mocked-segment')).toBe(false); - expect(cache.getRegisteredSegments()).toEqual(['mocked-segment-2']); - expect(cache.getKeysCount()).toBe(1); - -}); - -// @BREAKING: REMOVE when removing this backwards compatibility. -test('SEGMENT CACHE / in LocalStorage migration for mysegments keys', () => { - - const keys = new KeyBuilderCS('LS_BC_test.SPLITIO', 'test_nico'); - const cache = new MySegmentsCacheInLocal(loggerMock, keys); - - const oldKey1 = 'test_nico.LS_BC_test.SPLITIO.segment.segment1'; - const oldKey2 = 'test_nico.LS_BC_test.SPLITIO.segment.segment2'; - const newKey1 = keys.buildSegmentNameKey('segment1'); - const newKey2 = keys.buildSegmentNameKey('segment2'); - - cache.clear(); // cleanup before starting. - - // Not adding a full suite for LS keys now, testing here - expect(oldKey1).toBe(keys.buildOldSegmentNameKey('segment1')); - expect('segment1').toBe(keys.extractOldSegmentKey(oldKey1)); - - // add two segments, one we don't want to send on reset, should only be cleared, other one will be migrated. - localStorage.setItem(oldKey1, '1'); - localStorage.setItem(oldKey2, '1'); - expect(localStorage.getItem(newKey1)).toBe(null); // control assertion - - cache.resetSegments(['segment1']); - - expect(localStorage.getItem(newKey1)).toBe('1'); // The segment key for segment1, as is part of the new list, should be migrated. - expect(localStorage.getItem(newKey2)).toBe(null); // The segment key for segment2 should not be migrated. - expect(localStorage.getItem(oldKey1)).toBe(null); // Old keys are removed. - expect(localStorage.getItem(oldKey2)).toBe(null); // Old keys are removed. - - cache.clear(); + const caches = [ + new MySegmentsCacheInLocal(loggerMock, new KeyBuilderCS('SPLITIO', 'user')), + new MySegmentsCacheInLocal(loggerMock, myLargeSegmentsKeyBuilder('SPLITIO', 'user')) + ]; + + caches.forEach(cache => { + cache.clear(); + + expect(cache.resetSegments({ k: [{ n: 'mocked-segment' }, { n: 'mocked-segment-2' }], cn: 123 })).toBe(true); + expect(cache.getChangeNumber()).toBe(123); + expect(cache.resetSegments({ k: [{ n: 'mocked-segment' }, { n: 'mocked-segment-2' }] })).toBe(false); + expect(cache.getChangeNumber()).toBe(-1); + + expect(cache.isInSegment('mocked-segment')).toBe(true); + expect(cache.getRegisteredSegments()).toEqual(['mocked-segment', 'mocked-segment-2']); + expect(cache.getKeysCount()).toBe(1); + }); + + caches.forEach(cache => { + // @ts-expect-error + cache.resetSegments({ + added: [], + removed: ['mocked-segment'] + }); + + expect(cache.isInSegment('mocked-segment')).toBe(false); + expect(cache.getRegisteredSegments()).toEqual(['mocked-segment-2']); + expect(cache.getKeysCount()).toBe(1); + }); + + expect(localStorage.getItem('SPLITIO.user.segment.mocked-segment-2')).toBe('1'); + expect(localStorage.getItem('SPLITIO.user.segment.mocked-segment')).toBe(null); + expect(localStorage.getItem('SPLITIO.user.largeSegment.mocked-segment-2')).toBe('1'); + expect(localStorage.getItem('SPLITIO.user.largeSegment.mocked-segment')).toBe(null); }); diff --git a/src/storages/inLocalStorage/__tests__/SplitsCacheInLocal.spec.ts b/src/storages/inLocalStorage/__tests__/SplitsCacheInLocal.spec.ts index 732ca8b7..4d8ec076 100644 --- a/src/storages/inLocalStorage/__tests__/SplitsCacheInLocal.spec.ts +++ b/src/storages/inLocalStorage/__tests__/SplitsCacheInLocal.spec.ts @@ -2,7 +2,6 @@ import { SplitsCacheInLocal } from '../SplitsCacheInLocal'; import { KeyBuilderCS } from '../../KeyBuilderCS'; import { splitWithUserTT, splitWithAccountTT, splitWithAccountTTAndUsesSegments, something, somethingElse, featureFlagOne, featureFlagTwo, featureFlagThree, featureFlagWithEmptyFS, featureFlagWithoutFS } from '../../__tests__/testUtils'; import { ISplit } from '../../../dtos/types'; -import { _Set } from '../../../utils/lang/sets'; import { fullSettings } from '../../../utils/settingsValidation/__tests__/settings.mocks'; @@ -174,7 +173,7 @@ test('SPLIT CACHE / LocalStorage / flag set cache tests', () => { } } }, new KeyBuilderCS('SPLITIO', 'user')); - const emptySet = new _Set([]); + const emptySet = new Set([]); cache.addSplits([ [featureFlagOne.name, featureFlagOne], @@ -183,21 +182,21 @@ test('SPLIT CACHE / LocalStorage / flag set cache tests', () => { ]); cache.addSplit(featureFlagWithEmptyFS.name, featureFlagWithEmptyFS); - expect(cache.getNamesByFlagSets(['o'])).toEqual([new _Set(['ff_one', 'ff_two'])]); - expect(cache.getNamesByFlagSets(['n'])).toEqual([new _Set(['ff_one'])]); - expect(cache.getNamesByFlagSets(['e'])).toEqual([new _Set(['ff_one', 'ff_three'])]); + expect(cache.getNamesByFlagSets(['o'])).toEqual([new Set(['ff_one', 'ff_two'])]); + expect(cache.getNamesByFlagSets(['n'])).toEqual([new Set(['ff_one'])]); + expect(cache.getNamesByFlagSets(['e'])).toEqual([new Set(['ff_one', 'ff_three'])]); expect(cache.getNamesByFlagSets(['t'])).toEqual([emptySet]); // 't' not in filter - expect(cache.getNamesByFlagSets(['o', 'n', 'e'])).toEqual([new _Set(['ff_one', 'ff_two']), new _Set(['ff_one']), new _Set(['ff_one', 'ff_three'])]); + expect(cache.getNamesByFlagSets(['o', 'n', 'e'])).toEqual([new Set(['ff_one', 'ff_two']), new Set(['ff_one']), new Set(['ff_one', 'ff_three'])]); cache.addSplit(featureFlagOne.name, { ...featureFlagOne, sets: ['1'] }); expect(cache.getNamesByFlagSets(['1'])).toEqual([emptySet]); // '1' not in filter - expect(cache.getNamesByFlagSets(['o'])).toEqual([new _Set(['ff_two'])]); + expect(cache.getNamesByFlagSets(['o'])).toEqual([new Set(['ff_two'])]); expect(cache.getNamesByFlagSets(['n'])).toEqual([emptySet]); cache.addSplit(featureFlagOne.name, { ...featureFlagOne, sets: ['x'] }); - expect(cache.getNamesByFlagSets(['x'])).toEqual([new _Set(['ff_one'])]); - expect(cache.getNamesByFlagSets(['o', 'e', 'x'])).toEqual([new _Set(['ff_two']), new _Set(['ff_three']), new _Set(['ff_one'])]); + expect(cache.getNamesByFlagSets(['x'])).toEqual([new Set(['ff_one'])]); + expect(cache.getNamesByFlagSets(['o', 'e', 'x'])).toEqual([new Set(['ff_two']), new Set(['ff_three']), new Set(['ff_one'])]); cache.removeSplit(featureFlagOne.name); @@ -214,7 +213,7 @@ test('SPLIT CACHE / LocalStorage / flag set cache tests', () => { // if FlagSets are not defined, it should store all FlagSets in memory. test('SPLIT CACHE / LocalStorage / flag set cache tests without filters', () => { const cacheWithoutFilters = new SplitsCacheInLocal(fullSettings, new KeyBuilderCS('SPLITIO', 'user')); - const emptySet = new _Set([]); + const emptySet = new Set([]); cacheWithoutFilters.addSplits([ [featureFlagOne.name, featureFlagOne], @@ -223,12 +222,12 @@ test('SPLIT CACHE / LocalStorage / flag set cache tests without filters', () => ]); cacheWithoutFilters.addSplit(featureFlagWithEmptyFS.name, featureFlagWithEmptyFS); - expect(cacheWithoutFilters.getNamesByFlagSets(['o'])).toEqual([new _Set(['ff_one', 'ff_two'])]); - expect(cacheWithoutFilters.getNamesByFlagSets(['n'])).toEqual([new _Set(['ff_one'])]); - expect(cacheWithoutFilters.getNamesByFlagSets(['e'])).toEqual([new _Set(['ff_one', 'ff_three'])]); - expect(cacheWithoutFilters.getNamesByFlagSets(['t'])).toEqual([new _Set(['ff_two', 'ff_three'])]); + expect(cacheWithoutFilters.getNamesByFlagSets(['o'])).toEqual([new Set(['ff_one', 'ff_two'])]); + expect(cacheWithoutFilters.getNamesByFlagSets(['n'])).toEqual([new Set(['ff_one'])]); + expect(cacheWithoutFilters.getNamesByFlagSets(['e'])).toEqual([new Set(['ff_one', 'ff_three'])]); + expect(cacheWithoutFilters.getNamesByFlagSets(['t'])).toEqual([new Set(['ff_two', 'ff_three'])]); expect(cacheWithoutFilters.getNamesByFlagSets(['y'])).toEqual([emptySet]); - expect(cacheWithoutFilters.getNamesByFlagSets(['o', 'n', 'e'])).toEqual([new _Set(['ff_one', 'ff_two']), new _Set(['ff_one']), new _Set(['ff_one', 'ff_three'])]); + expect(cacheWithoutFilters.getNamesByFlagSets(['o', 'n', 'e'])).toEqual([new Set(['ff_one', 'ff_two']), new Set(['ff_one']), new Set(['ff_one', 'ff_three'])]); // Validate that the feature flag cache is cleared when calling `clear` method cacheWithoutFilters.clear(); diff --git a/src/storages/inLocalStorage/index.ts b/src/storages/inLocalStorage/index.ts index 7dca8a24..93e735e8 100644 --- a/src/storages/inLocalStorage/index.ts +++ b/src/storages/inLocalStorage/index.ts @@ -3,7 +3,7 @@ import { ImpressionCountsCacheInMemory } from '../inMemory/ImpressionCountsCache import { EventsCacheInMemory } from '../inMemory/EventsCacheInMemory'; import { IStorageFactoryParams, IStorageSync, IStorageSyncFactory } from '../types'; import { validatePrefix } from '../KeyBuilder'; -import { KeyBuilderCS } from '../KeyBuilderCS'; +import { KeyBuilderCS, myLargeSegmentsKeyBuilder } from '../KeyBuilderCS'; import { isLocalStorageAvailable } from '../../utils/env/isLocalStorageAvailable'; import { SplitsCacheInLocal } from './SplitsCacheInLocal'; import { MySegmentsCacheInLocal } from './MySegmentsCacheInLocal'; @@ -38,15 +38,17 @@ export function InLocalStorage(options: InLocalStorageOptions = {}): IStorageSyn const { settings, settings: { log, scheduler: { impressionsQueueSize, eventsQueueSize, }, sync: { impressionsMode, __splitFiltersValidation } } } = params; const matchingKey = getMatching(settings.core.key); - const keys = new KeyBuilderCS(prefix, matchingKey as string); + const keys = new KeyBuilderCS(prefix, matchingKey); const expirationTimestamp = Date.now() - DEFAULT_CACHE_EXPIRATION_IN_MILLIS; const splits = new SplitsCacheInLocal(settings, keys, expirationTimestamp); const segments = new MySegmentsCacheInLocal(log, keys); + const largeSegments = new MySegmentsCacheInLocal(log, myLargeSegmentsKeyBuilder(prefix, matchingKey)); return { splits, segments, + largeSegments, impressions: new ImpressionsCacheInMemory(impressionsQueueSize), impressionCounts: impressionsMode !== DEBUG ? new ImpressionCountsCacheInMemory() : undefined, events: new EventsCacheInMemory(eventsQueueSize), @@ -56,19 +58,20 @@ export function InLocalStorage(options: InLocalStorageOptions = {}): IStorageSyn destroy() { this.splits = new SplitsCacheInMemory(__splitFiltersValidation); this.segments = new MySegmentsCacheInMemory(); + this.largeSegments = new MySegmentsCacheInMemory(); this.impressions.clear(); this.impressionCounts && this.impressionCounts.clear(); this.events.clear(); this.uniqueKeys?.clear(); }, - // When using shared instanciation with MEMORY we reuse everything but segments (they are customer per key). + // When using shared instantiation with MEMORY we reuse everything but segments (they are customer per key). shared(matchingKey: string) { - const childKeysBuilder = new KeyBuilderCS(prefix, matchingKey); return { splits: this.splits, - segments: new MySegmentsCacheInLocal(log, childKeysBuilder), + segments: new MySegmentsCacheInLocal(log, new KeyBuilderCS(prefix, matchingKey)), + largeSegments: new MySegmentsCacheInLocal(log, myLargeSegmentsKeyBuilder(prefix, matchingKey)), impressions: this.impressions, impressionCounts: this.impressionCounts, events: this.events, @@ -77,6 +80,7 @@ export function InLocalStorage(options: InLocalStorageOptions = {}): IStorageSyn destroy() { this.splits = new SplitsCacheInMemory(__splitFiltersValidation); this.segments = new MySegmentsCacheInMemory(); + this.largeSegments = new MySegmentsCacheInMemory(); } }; }, diff --git a/src/storages/inMemory/AttributesCacheInMemory.ts b/src/storages/inMemory/AttributesCacheInMemory.ts index da7445a1..a8b084ea 100644 --- a/src/storages/inMemory/AttributesCacheInMemory.ts +++ b/src/storages/inMemory/AttributesCacheInMemory.ts @@ -1,4 +1,4 @@ -import { SplitIO } from '../../types'; +import SplitIO from '../../../types/splitio'; import { objectAssign } from '../../utils/lang/objectAssign'; export class AttributesCacheInMemory { @@ -9,9 +9,9 @@ export class AttributesCacheInMemory { /** * Create or update the value for the given attribute * - * @param {string} attributeName attribute name - * @param {Object} attributeValue attribute value - * @returns {boolean} the attribute was stored + * @param attributeName - attribute name + * @param attributeValue - attribute value + * @returns the attribute was stored */ setAttribute(attributeName: string, attributeValue: SplitIO.AttributeType) { this.attributesCache[attributeName] = attributeValue; @@ -21,8 +21,8 @@ export class AttributesCacheInMemory { /** * Retrieves the value of a given attribute * - * @param {string} attributeName attribute name - * @returns {Object?} stored attribute value + * @param attributeName - attribute name + * @returns stored attribute value */ getAttribute(attributeName: string) { return this.attributesCache[attributeName]; @@ -31,8 +31,8 @@ export class AttributesCacheInMemory { /** * Create or update all the given attributes * - * @param {[string, Object]} attributes attributes to create or update - * @returns {boolean} attributes were stored + * @param attributes - attributes to create or update + * @returns attributes were stored */ setAttributes(attributes: Record) { this.attributesCache = objectAssign(this.attributesCache, attributes); @@ -42,7 +42,7 @@ export class AttributesCacheInMemory { /** * Retrieve the full attributes map * - * @returns {Map} stored attributes + * @returns stored attributes */ getAll() { return this.attributesCache; @@ -51,8 +51,8 @@ export class AttributesCacheInMemory { /** * Removes a given attribute from the map * - * @param {string} attributeName attribute to remove - * @returns {boolean} attribute removed + * @param attributeName - attribute to remove + * @returns attribute removed */ removeAttribute(attributeName: string) { if (Object.keys(this.attributesCache).indexOf(attributeName) >= 0) { diff --git a/src/storages/inMemory/EventsCacheInMemory.ts b/src/storages/inMemory/EventsCacheInMemory.ts index 64525cdf..5c897868 100644 --- a/src/storages/inMemory/EventsCacheInMemory.ts +++ b/src/storages/inMemory/EventsCacheInMemory.ts @@ -1,4 +1,4 @@ -import { SplitIO } from '../../types'; +import SplitIO from '../../../types/splitio'; import { IEventsCacheSync } from '../types'; const MAX_QUEUE_BYTE_SIZE = 5 * 1024 * 1024; // 5M @@ -12,7 +12,7 @@ export class EventsCacheInMemory implements IEventsCacheSync { /** * - * @param eventsQueueSize number of queued events to call onFullQueueCb. + * @param eventsQueueSize - number of queued events to call onFullQueueCb. * Default value is 0, that means no maximum value, in case we want to avoid this being triggered. */ constructor(eventsQueueSize: number = 0) { diff --git a/src/storages/inMemory/ImpressionsCacheInMemory.ts b/src/storages/inMemory/ImpressionsCacheInMemory.ts index a3d46634..6995481a 100644 --- a/src/storages/inMemory/ImpressionsCacheInMemory.ts +++ b/src/storages/inMemory/ImpressionsCacheInMemory.ts @@ -1,15 +1,15 @@ import { IImpressionsCacheSync } from '../types'; -import { ImpressionDTO } from '../../types'; +import SplitIO from '../../../types/splitio'; export class ImpressionsCacheInMemory implements IImpressionsCacheSync { private onFullQueue?: () => void; private readonly maxQueue: number; - private queue: ImpressionDTO[]; + private queue: SplitIO.ImpressionDTO[]; /** * - * @param impressionsQueueSize number of queued impressions to call onFullQueueCb. + * @param impressionsQueueSize - number of queued impressions to call onFullQueueCb. * Default value is 0, that means no maximum value, in case we want to avoid this being triggered. */ constructor(impressionsQueueSize: number = 0) { @@ -24,7 +24,7 @@ export class ImpressionsCacheInMemory implements IImpressionsCacheSync { /** * Store impressions in sequential order */ - track(data: ImpressionDTO[]) { + track(data: SplitIO.ImpressionDTO[]) { this.queue.push(...data); // Check if the cache queue is full and we need to flush it. @@ -43,7 +43,7 @@ export class ImpressionsCacheInMemory implements IImpressionsCacheSync { /** * Pop the collected data, used as payload for posting. */ - pop(toMerge?: ImpressionDTO[]) { + pop(toMerge?: SplitIO.ImpressionDTO[]) { const data = this.queue; this.clear(); return toMerge ? toMerge.concat(data) : data; diff --git a/src/storages/inMemory/InMemoryStorage.ts b/src/storages/inMemory/InMemoryStorage.ts index ccf3bd6a..e91ce8c6 100644 --- a/src/storages/inMemory/InMemoryStorage.ts +++ b/src/storages/inMemory/InMemoryStorage.ts @@ -11,7 +11,7 @@ import { UniqueKeysCacheInMemory } from './UniqueKeysCacheInMemory'; /** * InMemory storage factory for standalone server-side SplitFactory * - * @param params parameters required by EventsCacheSync + * @param params - parameters required by EventsCacheSync */ export function InMemoryStorageFactory(params: IStorageFactoryParams): IStorageSync { const { settings: { scheduler: { impressionsQueueSize, eventsQueueSize, }, sync: { impressionsMode, __splitFiltersValidation } } } = params; diff --git a/src/storages/inMemory/InMemoryStorageCS.ts b/src/storages/inMemory/InMemoryStorageCS.ts index 84d2351b..dd4262c2 100644 --- a/src/storages/inMemory/InMemoryStorageCS.ts +++ b/src/storages/inMemory/InMemoryStorageCS.ts @@ -11,17 +11,19 @@ import { UniqueKeysCacheInMemoryCS } from './UniqueKeysCacheInMemoryCS'; /** * InMemory storage factory for standalone client-side SplitFactory * - * @param params parameters required by EventsCacheSync + * @param params - parameters required by EventsCacheSync */ export function InMemoryStorageCSFactory(params: IStorageFactoryParams): IStorageSync { const { settings: { scheduler: { impressionsQueueSize, eventsQueueSize, }, sync: { impressionsMode, __splitFiltersValidation } } } = params; const splits = new SplitsCacheInMemory(__splitFiltersValidation); const segments = new MySegmentsCacheInMemory(); + const largeSegments = new MySegmentsCacheInMemory(); const storage = { splits, segments, + largeSegments, impressions: new ImpressionsCacheInMemory(impressionsQueueSize), impressionCounts: impressionsMode !== DEBUG ? new ImpressionCountsCacheInMemory() : undefined, events: new EventsCacheInMemory(eventsQueueSize), @@ -32,17 +34,19 @@ export function InMemoryStorageCSFactory(params: IStorageFactoryParams): IStorag destroy() { this.splits.clear(); this.segments.clear(); + this.largeSegments.clear(); this.impressions.clear(); this.impressionCounts && this.impressionCounts.clear(); this.events.clear(); this.uniqueKeys && this.uniqueKeys.clear(); }, - // When using shared instanciation with MEMORY we reuse everything but segments (they are unique per key) + // When using shared instantiation with MEMORY we reuse everything but segments (they are unique per key) shared() { return { splits: this.splits, segments: new MySegmentsCacheInMemory(), + largeSegments: new MySegmentsCacheInMemory(), impressions: this.impressions, impressionCounts: this.impressionCounts, events: this.events, @@ -52,13 +56,14 @@ export function InMemoryStorageCSFactory(params: IStorageFactoryParams): IStorag destroy() { this.splits = new SplitsCacheInMemory(__splitFiltersValidation); this.segments.clear(); + this.largeSegments.clear(); } }; }, }; // @TODO revisit storage logic in localhost mode - // No tracking data in localhost mode to avoid memory leaks + // No tracking in localhost mode to avoid memory leaks: https://github.com/splitio/javascript-commons/issues/181 if (params.settings.mode === LOCALHOST_MODE) { const noopTrack = () => true; storage.impressions.track = noopTrack; diff --git a/src/storages/inMemory/MySegmentsCacheInMemory.ts b/src/storages/inMemory/MySegmentsCacheInMemory.ts index 08a767a1..546a83c3 100644 --- a/src/storages/inMemory/MySegmentsCacheInMemory.ts +++ b/src/storages/inMemory/MySegmentsCacheInMemory.ts @@ -1,24 +1,25 @@ -import { AbstractSegmentsCacheSync } from '../AbstractSegmentsCacheSync'; +import { AbstractMySegmentsCacheSync } from '../AbstractMySegmentsCacheSync'; /** * Default MySegmentsCacheInMemory implementation that stores MySegments in memory. * Supported by all JS runtimes. */ -export class MySegmentsCacheInMemory extends AbstractSegmentsCacheSync { +export class MySegmentsCacheInMemory extends AbstractMySegmentsCacheSync { private segmentCache: Record = {}; + private cn?: number; - clear() { - this.segmentCache = {}; - } + protected addSegment(name: string): boolean { + if (this.segmentCache[name]) return false; - addToSegment(name: string): boolean { this.segmentCache[name] = true; return true; } - removeFromSegment(name: string): boolean { + protected removeSegment(name: string): boolean { + if (!this.segmentCache[name]) return false; + delete this.segmentCache[name]; return true; @@ -28,48 +29,13 @@ export class MySegmentsCacheInMemory extends AbstractSegmentsCacheSync { return this.segmentCache[name] === true; } - /** - * Reset (update) the cached list of segments with the given list, removing and adding segments if necessary. - * @NOTE based on the way we use segments in the browser, this way is the best option - * - * @param {string[]} names list of segment names - * @returns boolean indicating if the cache was updated (i.e., given list was different from the cached one) - */ - resetSegments(names: string[]): boolean { - let isDiff = false; - let index; - - const storedSegmentKeys = Object.keys(this.segmentCache); - - // Extreme fast => everything is empty - if (names.length === 0 && storedSegmentKeys.length === names.length) - return isDiff; - // Quick path - if (storedSegmentKeys.length !== names.length) { - isDiff = true; - - this.segmentCache = {}; - names.forEach(s => { - this.addToSegment(s); - }); - } else { - // Slowest path => we need to find at least 1 difference because - for (index = 0; index < names.length && this.isInSegment(names[index]); index++) { - // TODO: why empty statement? - } - - if (index < names.length) { - isDiff = true; - - this.segmentCache = {}; - names.forEach(s => { - this.addToSegment(s); - }); - } - } + protected setChangeNumber(changeNumber?: number) { + this.cn = changeNumber; + } - return isDiff; + getChangeNumber() { + return this.cn || -1; } getRegisteredSegments() { diff --git a/src/storages/inMemory/SegmentsCacheInMemory.ts b/src/storages/inMemory/SegmentsCacheInMemory.ts index a7d52b7c..87ca71ce 100644 --- a/src/storages/inMemory/SegmentsCacheInMemory.ts +++ b/src/storages/inMemory/SegmentsCacheInMemory.ts @@ -1,36 +1,24 @@ -import { AbstractSegmentsCacheSync } from '../AbstractSegmentsCacheSync'; -import { ISet, _Set } from '../../utils/lang/sets'; import { isIntegerNumber } from '../../utils/lang'; +import { ISegmentsCacheSync } from '../types'; /** - * Default ISplitsCacheSync implementation that stores split definitions in memory. - * Supported by all JS runtimes. + * Default ISplitsCacheSync implementation for server-side that stores segments definitions in memory. */ -export class SegmentsCacheInMemory extends AbstractSegmentsCacheSync { +export class SegmentsCacheInMemory implements ISegmentsCacheSync { - private segmentCache: Record> = {}; + private segmentCache: Record> = {}; private segmentChangeNumber: Record = {}; - addToSegment(name: string, segmentKeys: string[]): boolean { - const values = this.segmentCache[name]; - const keySet = values ? values : new _Set(); + update(name: string, addedKeys: string[], removedKeys: string[], changeNumber: number) { + const keySet = this.segmentCache[name] || new Set(); - segmentKeys.forEach(k => keySet.add(k)); + addedKeys.forEach(k => keySet.add(k)); + removedKeys.forEach(k => keySet.delete(k)); this.segmentCache[name] = keySet; + this.segmentChangeNumber[name] = changeNumber; - return true; - } - - removeFromSegment(name: string, segmentKeys: string[]): boolean { - const values = this.segmentCache[name]; - const keySet = values ? values : new _Set(); - - segmentKeys.forEach(k => keySet.delete(k)); - - this.segmentCache[name] = keySet; - - return true; + return addedKeys.length > 0 || removedKeys.length > 0; } isInSegment(name: string, key: string): boolean { @@ -50,7 +38,7 @@ export class SegmentsCacheInMemory extends AbstractSegmentsCacheSync { private _registerSegment(name: string) { if (!this.segmentCache[name]) { - this.segmentCache[name] = new _Set(); + this.segmentCache[name] = new Set(); } return true; @@ -74,16 +62,13 @@ export class SegmentsCacheInMemory extends AbstractSegmentsCacheSync { }, 0); } - setChangeNumber(name: string, changeNumber: number) { - this.segmentChangeNumber[name] = changeNumber; - - return true; - } - getChangeNumber(name: string) { const value = this.segmentChangeNumber[name]; return isIntegerNumber(value) ? value : -1; } + // No-op. Not used in server-side + resetSegments() { return false; } + } diff --git a/src/storages/inMemory/SplitsCacheInMemory.ts b/src/storages/inMemory/SplitsCacheInMemory.ts index cf570eea..688b6e24 100644 --- a/src/storages/inMemory/SplitsCacheInMemory.ts +++ b/src/storages/inMemory/SplitsCacheInMemory.ts @@ -1,11 +1,9 @@ import { ISplit, ISplitFiltersValidation } from '../../dtos/types'; import { AbstractSplitsCacheSync, usesSegments } from '../AbstractSplitsCacheSync'; import { isFiniteNumber } from '../../utils/lang'; -import { ISet, _Set } from '../../utils/lang/sets'; /** * Default ISplitsCacheSync implementation that stores split definitions in memory. - * Supported by all JS runtimes. */ export class SplitsCacheInMemory extends AbstractSplitsCacheSync { @@ -13,8 +11,8 @@ export class SplitsCacheInMemory extends AbstractSplitsCacheSync { private splitsCache: Record = {}; private ttCache: Record = {}; private changeNumber: number = -1; - private splitsWithSegmentsCount: number = 0; - private flagSetsCache: Record> = {}; + private segmentsCount: number = 0; + private flagSetsCache: Record> = {}; constructor(splitFiltersValidation?: ISplitFiltersValidation) { super(); @@ -25,7 +23,7 @@ export class SplitsCacheInMemory extends AbstractSplitsCacheSync { this.splitsCache = {}; this.ttCache = {}; this.changeNumber = -1; - this.splitsWithSegmentsCount = 0; + this.segmentsCount = 0; } addSplit(name: string, split: ISplit): boolean { @@ -38,9 +36,8 @@ export class SplitsCacheInMemory extends AbstractSplitsCacheSync { this.removeFromFlagSets(previousSplit.name, previousSplit.sets); - if (usesSegments(previousSplit)) { // Substract from segments count for the previous version of this Split. - this.splitsWithSegmentsCount--; - } + // Subtract from segments count for the previous version of this Split + if (usesSegments(previousSplit)) this.segmentsCount--; } if (split) { @@ -52,7 +49,7 @@ export class SplitsCacheInMemory extends AbstractSplitsCacheSync { this.addToFlagSets(split); // Add to segments count for the new version of the Split - if (usesSegments(split)) this.splitsWithSegmentsCount++; + if (usesSegments(split)) this.segmentsCount++; return true; } else { @@ -72,7 +69,7 @@ export class SplitsCacheInMemory extends AbstractSplitsCacheSync { this.removeFromFlagSets(split.name, split.sets); // Update the segments count. - if (usesSegments(split)) this.splitsWithSegmentsCount--; + if (usesSegments(split)) this.segmentsCount--; return true; } else { @@ -102,11 +99,11 @@ export class SplitsCacheInMemory extends AbstractSplitsCacheSync { } usesSegments(): boolean { - return this.getChangeNumber() === -1 || this.splitsWithSegmentsCount > 0; + return this.getChangeNumber() === -1 || this.segmentsCount > 0; } - getNamesByFlagSets(flagSets: string[]): ISet[] { - return flagSets.map(flagSet => this.flagSetsCache[flagSet] || new _Set()); + getNamesByFlagSets(flagSets: string[]): Set[] { + return flagSets.map(flagSet => this.flagSetsCache[flagSet] || new Set()); } private addToFlagSets(featureFlag: ISplit) { @@ -115,7 +112,7 @@ export class SplitsCacheInMemory extends AbstractSplitsCacheSync { if (this.flagSetsFilter.length > 0 && !this.flagSetsFilter.some(filterFlagSet => filterFlagSet === featureFlagSet)) return; - if (!this.flagSetsCache[featureFlagSet]) this.flagSetsCache[featureFlagSet] = new _Set([]); + if (!this.flagSetsCache[featureFlagSet]) this.flagSetsCache[featureFlagSet] = new Set([]); this.flagSetsCache[featureFlagSet].add(featureFlag.name); }); diff --git a/src/storages/inMemory/TelemetryCacheInMemory.ts b/src/storages/inMemory/TelemetryCacheInMemory.ts index 26fb2b17..7e7e3f98 100644 --- a/src/storages/inMemory/TelemetryCacheInMemory.ts +++ b/src/storages/inMemory/TelemetryCacheInMemory.ts @@ -1,4 +1,4 @@ -import { ImpressionDataType, EventDataType, LastSync, HttpErrors, HttpLatencies, StreamingEvent, Method, OperationType, MethodExceptions, MethodLatencies, TelemetryUsageStatsPayload, UpdatesFromSSEEnum } from '../../sync/submitters/types'; +import { ImpressionDataType, EventDataType, LastSync, HttpErrors, HttpLatencies, StreamingEvent, Method, OperationType, MethodExceptions, MethodLatencies, TelemetryUsageStatsPayload, UpdatesFromSSEEnum, UpdatesFromSSE } from '../../sync/submitters/types'; import { DEDUPED, DROPPED, LOCALHOST_MODE, QUEUED } from '../../utils/constants'; import { findLatencyIndex } from '../findLatencyIndex'; import { ISegmentsCacheSync, ISplitsCacheSync, IStorageFactoryParams, ITelemetryCacheSync } from '../types'; @@ -25,7 +25,7 @@ export function shouldRecordTelemetry({ settings }: IStorageFactoryParams) { export class TelemetryCacheInMemory implements ITelemetryCacheSync { - constructor(private splits?: ISplitsCacheSync, private segments?: ISegmentsCacheSync) { } + constructor(private splits?: ISplitsCacheSync, private segments?: ISegmentsCacheSync, private largeSegments?: ISegmentsCacheSync) { } // isEmpty flag private e = true; @@ -51,6 +51,8 @@ export class TelemetryCacheInMemory implements ITelemetryCacheSync { spC: this.splits && this.splits.getSplitNames().length, seC: this.segments && this.segments.getRegisteredSegments().length, skC: this.segments && this.segments.getKeysCount(), + lsC: this.largeSegments && this.largeSegments.getRegisteredSegments().length, + lskC: this.largeSegments && this.largeSegments.getKeysCount(), sL: this.getSessionLength(), eQ: this.getEventStats(QUEUED), eD: this.getEventStats(DROPPED), @@ -245,22 +247,16 @@ export class TelemetryCacheInMemory implements ITelemetryCacheSync { this.e = false; } - private updatesFromSSE = { - sp: 0, - ms: 0 - }; + private updatesFromSSE: UpdatesFromSSE = {}; popUpdatesFromSSE() { const result = this.updatesFromSSE; - this.updatesFromSSE = { - sp: 0, - ms: 0, - }; + this.updatesFromSSE = {}; return result; } recordUpdatesFromSSE(type: UpdatesFromSSEEnum) { - this.updatesFromSSE[type]++; + this.updatesFromSSE[type] = (this.updatesFromSSE[type] || 0) + 1; this.e = false; } diff --git a/src/storages/inMemory/UniqueKeysCacheInMemory.ts b/src/storages/inMemory/UniqueKeysCacheInMemory.ts index e176aa0a..9c45721c 100644 --- a/src/storages/inMemory/UniqueKeysCacheInMemory.ts +++ b/src/storages/inMemory/UniqueKeysCacheInMemory.ts @@ -1,12 +1,12 @@ import { IUniqueKeysCacheBase } from '../types'; -import { ISet, setToArray, _Set } from '../../utils/lang/sets'; import { UniqueKeysPayloadSs } from '../../sync/submitters/types'; import { DEFAULT_CACHE_SIZE } from '../inRedis/constants'; +import { setToArray } from '../../utils/lang/sets'; /** * Converts `uniqueKeys` data from cache into request payload for SS. */ -export function fromUniqueKeysCollector(uniqueKeys: { [featureName: string]: ISet }): UniqueKeysPayloadSs { +export function fromUniqueKeysCollector(uniqueKeys: { [featureName: string]: Set }): UniqueKeysPayloadSs { const payload = []; const featureNames = Object.keys(uniqueKeys); for (let i = 0; i < featureNames.length; i++) { @@ -27,7 +27,7 @@ export class UniqueKeysCacheInMemory implements IUniqueKeysCacheBase { protected onFullQueue?: () => void; private readonly maxStorage: number; private uniqueTrackerSize = 0; - protected uniqueKeysTracker: { [featureName: string]: ISet } = {}; + protected uniqueKeysTracker: { [featureName: string]: Set } = {}; constructor(uniqueKeysQueueSize = DEFAULT_CACHE_SIZE) { this.maxStorage = uniqueKeysQueueSize; @@ -41,7 +41,7 @@ export class UniqueKeysCacheInMemory implements IUniqueKeysCacheBase { * Store unique keys per feature. */ track(userKey: string, featureName: string) { - if (!this.uniqueKeysTracker[featureName]) this.uniqueKeysTracker[featureName] = new _Set(); + if (!this.uniqueKeysTracker[featureName]) this.uniqueKeysTracker[featureName] = new Set(); const tracker = this.uniqueKeysTracker[featureName]; if (!tracker.has(userKey)) { tracker.add(userKey); diff --git a/src/storages/inMemory/UniqueKeysCacheInMemoryCS.ts b/src/storages/inMemory/UniqueKeysCacheInMemoryCS.ts index 66f54d0c..87133e3d 100644 --- a/src/storages/inMemory/UniqueKeysCacheInMemoryCS.ts +++ b/src/storages/inMemory/UniqueKeysCacheInMemoryCS.ts @@ -1,20 +1,15 @@ import { IUniqueKeysCacheBase } from '../types'; -import { ISet, setToArray, _Set } from '../../utils/lang/sets'; import { UniqueKeysPayloadCs } from '../../sync/submitters/types'; import { DEFAULT_CACHE_SIZE } from '../inRedis/constants'; +import { setToArray } from '../../utils/lang/sets'; export class UniqueKeysCacheInMemoryCS implements IUniqueKeysCacheBase { private onFullQueue?: () => void; private readonly maxStorage: number; private uniqueTrackerSize = 0; - private uniqueKeysTracker: { [userKey: string]: ISet } = {}; + private uniqueKeysTracker: { [userKey: string]: Set } = {}; - /** - * - * @param impressionsQueueSize number of queued impressions to call onFullQueueCb. - * Default value is 0, that means no maximum value, in case we want to avoid this being triggered. - */ constructor(uniqueKeysQueueSize = DEFAULT_CACHE_SIZE) { this.maxStorage = uniqueKeysQueueSize; } @@ -28,7 +23,7 @@ export class UniqueKeysCacheInMemoryCS implements IUniqueKeysCacheBase { */ track(userKey: string, featureName: string) { - if (!this.uniqueKeysTracker[userKey]) this.uniqueKeysTracker[userKey] = new _Set(); + if (!this.uniqueKeysTracker[userKey]) this.uniqueKeysTracker[userKey] = new Set(); const tracker = this.uniqueKeysTracker[userKey]; if (!tracker.has(featureName)) { tracker.add(featureName); @@ -66,7 +61,7 @@ export class UniqueKeysCacheInMemoryCS implements IUniqueKeysCacheBase { /** * Converts `uniqueKeys` data from cache into request payload. */ - private fromUniqueKeysCollector(uniqueKeys: { [userKey: string]: ISet }): UniqueKeysPayloadCs { + private fromUniqueKeysCollector(uniqueKeys: { [userKey: string]: Set }): UniqueKeysPayloadCs { const payload = []; const userKeys = Object.keys(uniqueKeys); for (let k = 0; k < userKeys.length; k++) { diff --git a/src/storages/inMemory/__tests__/MySegmentsCacheInMemory.spec.ts b/src/storages/inMemory/__tests__/MySegmentsCacheInMemory.spec.ts index 7236e950..b936d17c 100644 --- a/src/storages/inMemory/__tests__/MySegmentsCacheInMemory.spec.ts +++ b/src/storages/inMemory/__tests__/MySegmentsCacheInMemory.spec.ts @@ -3,17 +3,22 @@ import { MySegmentsCacheInMemory } from '../MySegmentsCacheInMemory'; test('MY SEGMENTS CACHE / in memory', () => { const cache = new MySegmentsCacheInMemory(); - cache.addToSegment('mocked-segment'); - cache.addToSegment('mocked-segment-2'); + expect(cache.resetSegments({ k: [{ n: 'mocked-segment' }, { n: 'mocked-segment-2' }], cn: 123 })).toBe(true); + expect(cache.getChangeNumber()).toBe(123); + expect(cache.resetSegments({ k: [{ n: 'mocked-segment' }, { n: 'mocked-segment-2' }] })).toBe(false); + expect(cache.getChangeNumber()).toBe(-1); expect(cache.isInSegment('mocked-segment')).toBe(true); expect(cache.getRegisteredSegments()).toEqual(['mocked-segment', 'mocked-segment-2']); expect(cache.getKeysCount()).toBe(1); - cache.removeFromSegment('mocked-segment'); + expect(cache.resetSegments({ k: [{ n: 'mocked-segment-2' }], cn: 150})).toBe(true); expect(cache.isInSegment('mocked-segment')).toBe(false); expect(cache.getRegisteredSegments()).toEqual(['mocked-segment-2']); expect(cache.getKeysCount()).toBe(1); + cache.clear(); + expect(cache.getRegisteredSegments()).toEqual([]); + expect(cache.getChangeNumber()).toBe(-1); }); diff --git a/src/storages/inMemory/__tests__/SegmentsCacheInMemory.spec.ts b/src/storages/inMemory/__tests__/SegmentsCacheInMemory.spec.ts index e6713376..5ee2683c 100644 --- a/src/storages/inMemory/__tests__/SegmentsCacheInMemory.spec.ts +++ b/src/storages/inMemory/__tests__/SegmentsCacheInMemory.spec.ts @@ -2,24 +2,18 @@ import { SegmentsCacheInMemory } from '../SegmentsCacheInMemory'; describe('SEGMENTS CACHE IN MEMORY', () => { - test('isInSegment, set/getChangeNumber, add/removeFromSegment, getKeysCount', () => { + test('isInSegment, getChangeNumber, update, getKeysCount', () => { const cache = new SegmentsCacheInMemory(); - cache.addToSegment('mocked-segment', [ - 'a', 'b', 'c' - ]); - - cache.setChangeNumber('mocked-segment', 1); - - cache.removeFromSegment('mocked-segment', ['d']); + cache.update('mocked-segment', [ 'a', 'b', 'c'], [], 1); + cache.update('mocked-segment', [], ['d'], 1); expect(cache.getChangeNumber('mocked-segment') === 1).toBe(true); - cache.addToSegment('mocked-segment', ['d', 'e']); + cache.update('mocked-segment', [ 'd', 'e'], [], 2); + cache.update('mocked-segment', [], ['a', 'c'], 2); - cache.removeFromSegment('mocked-segment', ['a', 'c']); - - expect(cache.getChangeNumber('mocked-segment') === 1).toBe(true); + expect(cache.getChangeNumber('mocked-segment') === 2).toBe(true); expect(cache.isInSegment('mocked-segment', 'a')).toBe(false); expect(cache.isInSegment('mocked-segment', 'b')).toBe(true); // b @@ -29,7 +23,7 @@ describe('SEGMENTS CACHE IN MEMORY', () => { // getKeysCount expect(cache.getKeysCount()).toBe(3); - cache.addToSegment('mocked-segment-2', ['a', 'b', 'c', 'd', 'e']); + cache.update('mocked-segment-2', ['a', 'b', 'c', 'd', 'e'], [], 2); expect(cache.getKeysCount()).toBe(8); cache.clear(); expect(cache.getKeysCount()).toBe(0); diff --git a/src/storages/inMemory/__tests__/SplitsCacheInMemory.spec.ts b/src/storages/inMemory/__tests__/SplitsCacheInMemory.spec.ts index 14fa62fd..62812586 100644 --- a/src/storages/inMemory/__tests__/SplitsCacheInMemory.spec.ts +++ b/src/storages/inMemory/__tests__/SplitsCacheInMemory.spec.ts @@ -1,7 +1,6 @@ import { SplitsCacheInMemory } from '../SplitsCacheInMemory'; import { ISplit } from '../../../dtos/types'; import { splitWithUserTT, splitWithAccountTT, something, somethingElse, featureFlagWithEmptyFS, featureFlagWithoutFS, featureFlagOne, featureFlagTwo, featureFlagThree } from '../../__tests__/testUtils'; -import { _Set } from '../../../utils/lang/sets'; test('SPLITS CACHE / In Memory', () => { const cache = new SplitsCacheInMemory(); @@ -118,7 +117,7 @@ test('SPLITS CACHE / In Memory / killLocally', () => { test('SPLITS CACHE / In Memory / flag set cache tests', () => { // @ts-ignore const cache = new SplitsCacheInMemory({ groupedFilters: { bySet: ['o', 'n', 'e', 'x'] } }); - const emptySet = new _Set([]); + const emptySet = new Set([]); cache.addSplits([ [featureFlagOne.name, featureFlagOne], @@ -127,21 +126,21 @@ test('SPLITS CACHE / In Memory / flag set cache tests', () => { ]); cache.addSplit(featureFlagWithEmptyFS.name, featureFlagWithEmptyFS); - expect(cache.getNamesByFlagSets(['o'])).toEqual([new _Set(['ff_one', 'ff_two'])]); - expect(cache.getNamesByFlagSets(['n'])).toEqual([new _Set(['ff_one'])]); - expect(cache.getNamesByFlagSets(['e'])).toEqual([new _Set(['ff_one', 'ff_three'])]); + expect(cache.getNamesByFlagSets(['o'])).toEqual([new Set(['ff_one', 'ff_two'])]); + expect(cache.getNamesByFlagSets(['n'])).toEqual([new Set(['ff_one'])]); + expect(cache.getNamesByFlagSets(['e'])).toEqual([new Set(['ff_one', 'ff_three'])]); expect(cache.getNamesByFlagSets(['t'])).toEqual([emptySet]); // 't' not in filter - expect(cache.getNamesByFlagSets(['o', 'n', 'e'])).toEqual([new _Set(['ff_one', 'ff_two']), new _Set(['ff_one']), new _Set(['ff_one', 'ff_three'])]); + expect(cache.getNamesByFlagSets(['o', 'n', 'e'])).toEqual([new Set(['ff_one', 'ff_two']), new Set(['ff_one']), new Set(['ff_one', 'ff_three'])]); cache.addSplit(featureFlagOne.name, { ...featureFlagOne, sets: ['1'] }); expect(cache.getNamesByFlagSets(['1'])).toEqual([emptySet]); // '1' not in filter - expect(cache.getNamesByFlagSets(['o'])).toEqual([new _Set(['ff_two'])]); + expect(cache.getNamesByFlagSets(['o'])).toEqual([new Set(['ff_two'])]); expect(cache.getNamesByFlagSets(['n'])).toEqual([emptySet]); cache.addSplit(featureFlagOne.name, { ...featureFlagOne, sets: ['x'] }); - expect(cache.getNamesByFlagSets(['x'])).toEqual([new _Set(['ff_one'])]); - expect(cache.getNamesByFlagSets(['o', 'e', 'x'])).toEqual([new _Set(['ff_two']), new _Set(['ff_three']), new _Set(['ff_one'])]); + expect(cache.getNamesByFlagSets(['x'])).toEqual([new Set(['ff_one'])]); + expect(cache.getNamesByFlagSets(['o', 'e', 'x'])).toEqual([new Set(['ff_two']), new Set(['ff_three']), new Set(['ff_one'])]); cache.removeSplit(featureFlagOne.name); @@ -158,7 +157,7 @@ test('SPLITS CACHE / In Memory / flag set cache tests', () => { // if FlagSets are not defined, it should store all FlagSets in memory. test('SPLIT CACHE / LocalStorage / flag set cache tests without filters', () => { const cacheWithoutFilters = new SplitsCacheInMemory(); - const emptySet = new _Set([]); + const emptySet = new Set([]); cacheWithoutFilters.addSplits([ [featureFlagOne.name, featureFlagOne], @@ -167,10 +166,10 @@ test('SPLIT CACHE / LocalStorage / flag set cache tests without filters', () => ]); cacheWithoutFilters.addSplit(featureFlagWithEmptyFS.name, featureFlagWithEmptyFS); - expect(cacheWithoutFilters.getNamesByFlagSets(['o'])).toEqual([new _Set(['ff_one', 'ff_two'])]); - expect(cacheWithoutFilters.getNamesByFlagSets(['n'])).toEqual([new _Set(['ff_one'])]); - expect(cacheWithoutFilters.getNamesByFlagSets(['e'])).toEqual([new _Set(['ff_one', 'ff_three'])]); - expect(cacheWithoutFilters.getNamesByFlagSets(['t'])).toEqual([new _Set(['ff_two', 'ff_three'])]); + expect(cacheWithoutFilters.getNamesByFlagSets(['o'])).toEqual([new Set(['ff_one', 'ff_two'])]); + expect(cacheWithoutFilters.getNamesByFlagSets(['n'])).toEqual([new Set(['ff_one'])]); + expect(cacheWithoutFilters.getNamesByFlagSets(['e'])).toEqual([new Set(['ff_one', 'ff_three'])]); + expect(cacheWithoutFilters.getNamesByFlagSets(['t'])).toEqual([new Set(['ff_two', 'ff_three'])]); expect(cacheWithoutFilters.getNamesByFlagSets(['y'])).toEqual([emptySet]); - expect(cacheWithoutFilters.getNamesByFlagSets(['o', 'n', 'e'])).toEqual([new _Set(['ff_one', 'ff_two']), new _Set(['ff_one']), new _Set(['ff_one', 'ff_three'])]); + expect(cacheWithoutFilters.getNamesByFlagSets(['o', 'n', 'e'])).toEqual([new Set(['ff_one', 'ff_two']), new Set(['ff_one']), new Set(['ff_one', 'ff_three'])]); }); diff --git a/src/storages/inMemory/__tests__/TelemetryCacheInMemory.spec.ts b/src/storages/inMemory/__tests__/TelemetryCacheInMemory.spec.ts index 8bed17b7..c6d4340b 100644 --- a/src/storages/inMemory/__tests__/TelemetryCacheInMemory.spec.ts +++ b/src/storages/inMemory/__tests__/TelemetryCacheInMemory.spec.ts @@ -1,4 +1,4 @@ -import { QUEUED, DROPPED, DEDUPED, EVENTS, IMPRESSIONS, IMPRESSIONS_COUNT, MY_SEGMENT, SEGMENT, SPLITS, TELEMETRY, TOKEN, TRACK, TREATMENT, TREATMENTS, TREATMENTS_WITH_CONFIG, TREATMENT_WITH_CONFIG } from '../../../utils/constants'; +import { QUEUED, DROPPED, DEDUPED, EVENTS, IMPRESSIONS, IMPRESSIONS_COUNT, MEMBERSHIPS, SEGMENT, SPLITS, TELEMETRY, TOKEN, TRACK, TREATMENT, TREATMENTS, TREATMENTS_WITH_CONFIG, TREATMENT_WITH_CONFIG } from '../../../utils/constants'; import { EventDataType, ImpressionDataType, Method, OperationType, StreamingEvent } from '../../../sync/submitters/types'; import { TelemetryCacheInMemory } from '../TelemetryCacheInMemory'; @@ -14,7 +14,7 @@ const operationTypes: OperationType[] = [ TELEMETRY, TOKEN, SEGMENT, - MY_SEGMENT + MEMBERSHIPS ]; const methods: Method[] = [ @@ -88,7 +88,7 @@ describe('TELEMETRY CACHE', () => { expect(cache.getLastSynchronization()).toEqual(expectedLastSync); // Overwrite a single operation - cache.recordSuccessfulSync(MY_SEGMENT, 100); + cache.recordSuccessfulSync(MEMBERSHIPS, 100); expect(cache.getLastSynchronization()).toEqual({ ...expectedLastSync, 'ms': 100 }); }); @@ -106,7 +106,7 @@ describe('TELEMETRY CACHE', () => { expect(cache.popHttpErrors()).toEqual({}); // Set a single http error - cache.recordHttpError(MY_SEGMENT, 400); + cache.recordHttpError(MEMBERSHIPS, 400); expect(cache.popHttpErrors()).toEqual({ 'ms': { 400: 1 } }); }); @@ -211,7 +211,7 @@ describe('TELEMETRY CACHE', () => { test('"isEmpty" and "pop" methods', () => { const cache = new TelemetryCacheInMemory(); const expectedEmptyPayload = { - lS: {}, mL: {}, mE: {}, hE: {}, hL: {}, tR: 0, aR: 0, iQ: 0, iDe: 0, iDr: 0, spC: undefined, seC: undefined, skC: undefined, eQ: 0, eD: 0, sE: [], t: [], ufs:{ sp: 0, ms: 0 } + lS: {}, mL: {}, mE: {}, hE: {}, hL: {}, tR: 0, aR: 0, iQ: 0, iDe: 0, iDr: 0, spC: undefined, seC: undefined, skC: undefined, eQ: 0, eD: 0, sE: [], t: [], ufs: {} }; // Initially, the cache is empty @@ -228,20 +228,20 @@ describe('TELEMETRY CACHE', () => { }); test('updates from SSE', () => { - expect(cache.popUpdatesFromSSE()).toEqual({sp: 0, ms: 0}); + expect(cache.popUpdatesFromSSE()).toEqual({}); cache.recordUpdatesFromSSE(SPLITS); cache.recordUpdatesFromSSE(SPLITS); cache.recordUpdatesFromSSE(SPLITS); - cache.recordUpdatesFromSSE(MY_SEGMENT); - cache.recordUpdatesFromSSE(MY_SEGMENT); - expect(cache.popUpdatesFromSSE()).toEqual({sp: 3, ms: 2}); - expect(cache.popUpdatesFromSSE()).toEqual({sp: 0, ms: 0}); + cache.recordUpdatesFromSSE(MEMBERSHIPS); + cache.recordUpdatesFromSSE(MEMBERSHIPS); + expect(cache.popUpdatesFromSSE()).toEqual({ sp: 3, ms: 2 }); + expect(cache.popUpdatesFromSSE()).toEqual({}); cache.recordUpdatesFromSSE(SPLITS); - cache.recordUpdatesFromSSE(MY_SEGMENT); + cache.recordUpdatesFromSSE(MEMBERSHIPS); cache.recordUpdatesFromSSE(SPLITS); - cache.recordUpdatesFromSSE(MY_SEGMENT); - expect(cache.popUpdatesFromSSE()).toEqual({sp: 2, ms: 2}); - expect(cache.popUpdatesFromSSE()).toEqual({sp: 0, ms: 0}); + cache.recordUpdatesFromSSE(MEMBERSHIPS); + expect(cache.popUpdatesFromSSE()).toEqual({ sp: 2, ms: 2 }); + expect(cache.popUpdatesFromSSE()).toEqual({}); }); }); diff --git a/src/storages/inRedis/EventsCacheInRedis.ts b/src/storages/inRedis/EventsCacheInRedis.ts index ecd14a32..c9b84459 100644 --- a/src/storages/inRedis/EventsCacheInRedis.ts +++ b/src/storages/inRedis/EventsCacheInRedis.ts @@ -1,6 +1,6 @@ import { IEventsCacheAsync } from '../types'; import { IMetadata } from '../../dtos/types'; -import { SplitIO } from '../../types'; +import SplitIO from '../../../types/splitio'; import { ILogger } from '../../logger/types'; import { LOG_PREFIX } from './constants'; import { StoredEventWithMetadata } from '../../sync/submitters/types'; diff --git a/src/storages/inRedis/ImpressionsCacheInRedis.ts b/src/storages/inRedis/ImpressionsCacheInRedis.ts index 4ac0acaa..15d02508 100644 --- a/src/storages/inRedis/ImpressionsCacheInRedis.ts +++ b/src/storages/inRedis/ImpressionsCacheInRedis.ts @@ -1,6 +1,6 @@ import { IImpressionsCacheAsync } from '../types'; import { IMetadata } from '../../dtos/types'; -import { ImpressionDTO } from '../../types'; +import SplitIO from '../../../types/splitio'; import { StoredImpressionWithMetadata } from '../../sync/submitters/types'; import { ILogger } from '../../logger/types'; import { impressionsToJSON } from '../utils'; @@ -22,7 +22,7 @@ export class ImpressionsCacheInRedis implements IImpressionsCacheAsync { this.metadata = metadata; } - track(impressions: ImpressionDTO[]): Promise { // @ts-ignore + track(impressions: SplitIO.ImpressionDTO[]): Promise { // @ts-ignore return this.redis.rpush( this.key, impressionsToJSON(impressions, this.metadata), diff --git a/src/storages/inRedis/RedisAdapter.ts b/src/storages/inRedis/RedisAdapter.ts index 6d738606..6a6b423b 100644 --- a/src/storages/inRedis/RedisAdapter.ts +++ b/src/storages/inRedis/RedisAdapter.ts @@ -1,9 +1,9 @@ import ioredis, { Pipeline } from 'ioredis'; import { ILogger } from '../../logger/types'; import { merge, isString } from '../../utils/lang'; -import { _Set, setToArray, ISet } from '../../utils/lang/sets'; import { thenable } from '../../utils/promise/thenable'; import { timeout } from '../../utils/promise/timeout'; +import { setToArray } from '../../utils/lang/sets'; const LOG_PREFIX = 'storage:redis-adapter: '; @@ -37,7 +37,7 @@ export class RedisAdapter extends ioredis { private readonly log: ILogger; private _options: object; private _notReadyCommandsQueue?: IRedisCommand[]; - private _runningCommands: ISet>; + private _runningCommands: Set>; constructor(log: ILogger, storageSettings: Record = {}) { const options = RedisAdapter._defineOptions(storageSettings); @@ -47,7 +47,7 @@ export class RedisAdapter extends ioredis { this.log = log; this._options = options; this._notReadyCommandsQueue = []; - this._runningCommands = new _Set(); + this._runningCommands = new Set(); this._listenToEvents(); this._setTimeoutWrappers(); this._setDisconnectWrapper(); diff --git a/src/storages/inRedis/SegmentsCacheInRedis.ts b/src/storages/inRedis/SegmentsCacheInRedis.ts index 7ec2f20f..42ed3b10 100644 --- a/src/storages/inRedis/SegmentsCacheInRedis.ts +++ b/src/storages/inRedis/SegmentsCacheInRedis.ts @@ -17,24 +17,21 @@ export class SegmentsCacheInRedis implements ISegmentsCacheAsync { this.keys = keys; } - addToSegment(name: string, segmentKeys: string[]) { + /** + * Update the given segment `name` with the lists of `addedKeys`, `removedKeys` and `changeNumber`. + * The returned promise is resolved if the operation success, with `true` if the segment was updated (i.e., some key was added or removed), + * or rejected if it fails (e.g., Redis operation fails). + */ + update(name: string, addedKeys: string[], removedKeys: string[], changeNumber: number) { const segmentKey = this.keys.buildSegmentNameKey(name); - if (segmentKeys.length) { - return this.redis.sadd(segmentKey, segmentKeys).then(() => true); - } else { - return Promise.resolve(true); - } - } - - removeFromSegment(name: string, segmentKeys: string[]) { - const segmentKey = this.keys.buildSegmentNameKey(name); - - if (segmentKeys.length) { - return this.redis.srem(segmentKey, segmentKeys).then(() => true); - } else { - return Promise.resolve(true); - } + return Promise.all([ + addedKeys.length && this.redis.sadd(segmentKey, addedKeys), + removedKeys.length && this.redis.srem(segmentKey, removedKeys), + this.redis.set(this.keys.buildSegmentTillKey(name), changeNumber + '') + ]).then(() => { + return addedKeys.length > 0 || removedKeys.length > 0; + }); } isInSegment(name: string, key: string) { @@ -43,12 +40,6 @@ export class SegmentsCacheInRedis implements ISegmentsCacheAsync { ).then(matches => matches !== 0); } - setChangeNumber(name: string, changeNumber: number) { - return this.redis.set( - this.keys.buildSegmentTillKey(name), changeNumber + '' - ).then(status => status === 'OK'); - } - getChangeNumber(name: string) { return this.redis.get(this.keys.buildSegmentTillKey(name)).then((value: string | null) => { const i = parseInt(value as string, 10); diff --git a/src/storages/inRedis/SplitsCacheInRedis.ts b/src/storages/inRedis/SplitsCacheInRedis.ts index 8822647e..c98dca6e 100644 --- a/src/storages/inRedis/SplitsCacheInRedis.ts +++ b/src/storages/inRedis/SplitsCacheInRedis.ts @@ -4,7 +4,7 @@ import { ILogger } from '../../logger/types'; import { LOG_PREFIX } from './constants'; import { ISplit, ISplitFiltersValidation } from '../../dtos/types'; import { AbstractSplitsCacheAsync } from '../AbstractSplitsCacheAsync'; -import { ISet, _Set, returnDifference } from '../../utils/lang/sets'; +import { returnDifference } from '../../utils/lang/sets'; import type { RedisAdapter } from './RedisAdapter'; /** @@ -186,10 +186,8 @@ export class SplitsCacheInRedis extends AbstractSplitsCacheAsync { * Get list of all split definitions. * The returned promise is resolved with the list of split definitions, * or rejected if redis operation fails. - * - * @TODO we need to benchmark which is the maximun number of commands we could - * pipeline without kill redis performance. */ + // @TODO we need to benchmark which is the maximun number of commands we could pipeline without kill redis performance. getAll(): Promise { return this.redis.keys(this.keys.searchPatternForSplitKeys()) .then((listOfKeys) => this.redis.pipeline(listOfKeys.map(k => ['get', k])).exec()) @@ -215,14 +213,14 @@ export class SplitsCacheInRedis extends AbstractSplitsCacheAsync { * The returned promise is resolved with the list of feature flag names per flag set, * or rejected if the pipelined redis operation fails (e.g., timeout). */ - getNamesByFlagSets(flagSets: string[]): Promise[]> { + getNamesByFlagSets(flagSets: string[]): Promise[]> { return this.redis.pipeline(flagSets.map(flagSet => ['smembers', this.keys.buildFlagSetKey(flagSet)])).exec() .then((results) => results.map(([e, value], index) => { if (e === null) return value; this.log.error(LOG_PREFIX + `Could not read result from get members of flag set ${flagSets[index]} due to an error: ${e}`); })) - .then(namesByFlagSets => namesByFlagSets.map(namesByFlagSet => new _Set(namesByFlagSet))); + .then(namesByFlagSets => namesByFlagSets.map(namesByFlagSet => new Set(namesByFlagSet))); } /** diff --git a/src/storages/inRedis/TelemetryCacheInRedis.ts b/src/storages/inRedis/TelemetryCacheInRedis.ts index 78108c3d..c4564f79 100644 --- a/src/storages/inRedis/TelemetryCacheInRedis.ts +++ b/src/storages/inRedis/TelemetryCacheInRedis.ts @@ -6,7 +6,6 @@ import { findLatencyIndex } from '../findLatencyIndex'; import { getTelemetryConfigStats } from '../../sync/submitters/telemetrySubmitter'; import { CONSUMER_MODE, STORAGE_REDIS } from '../../utils/constants'; import { isNaNNumber, isString } from '../../utils/lang'; -import { _Map } from '../../utils/lang/maps'; import { MAX_LATENCY_BUCKET_COUNT, newBuckets } from '../inMemory/TelemetryCacheInMemory'; import { parseLatencyField, parseExceptionField, parseMetadata } from '../utils'; import type { RedisAdapter } from './RedisAdapter'; @@ -15,9 +14,9 @@ export class TelemetryCacheInRedis implements ITelemetryCacheAsync { /** * Create a Telemetry cache that uses Redis as storage. - * @param log Logger instance. - * @param keys Key builder. - * @param redis Redis client. + * @param log - Logger instance. + * @param keys - Key builder. + * @param redis - Redis client. */ constructor(private readonly log: ILogger, private readonly keys: KeyBuilderSS, private readonly redis: RedisAdapter) { } @@ -46,7 +45,7 @@ export class TelemetryCacheInRedis implements ITelemetryCacheAsync { popLatencies(): Promise { return this.redis.hgetall(this.keys.latencyPrefix).then(latencies => { - const result: MultiMethodLatencies = new _Map(); + const result: MultiMethodLatencies = new Map(); Object.keys(latencies).forEach(field => { @@ -86,7 +85,7 @@ export class TelemetryCacheInRedis implements ITelemetryCacheAsync { popExceptions(): Promise { return this.redis.hgetall(this.keys.exceptionPrefix).then(exceptions => { - const result: MultiMethodExceptions = new _Map(); + const result: MultiMethodExceptions = new Map(); Object.keys(exceptions).forEach(field => { @@ -119,7 +118,7 @@ export class TelemetryCacheInRedis implements ITelemetryCacheAsync { popConfigs(): Promise { return this.redis.hgetall(this.keys.initPrefix).then(configs => { - const result: MultiConfigs = new _Map(); + const result: MultiConfigs = new Map(); Object.keys(configs).forEach(field => { diff --git a/src/storages/inRedis/UniqueKeysCacheInRedis.ts b/src/storages/inRedis/UniqueKeysCacheInRedis.ts index 6abdb88a..2bebe84d 100644 --- a/src/storages/inRedis/UniqueKeysCacheInRedis.ts +++ b/src/storages/inRedis/UniqueKeysCacheInRedis.ts @@ -1,11 +1,11 @@ import { IUniqueKeysCacheBase } from '../types'; import { UniqueKeysCacheInMemory } from '../inMemory/UniqueKeysCacheInMemory'; -import { setToArray } from '../../utils/lang/sets'; import { DEFAULT_CACHE_SIZE, REFRESH_RATE, TTL_REFRESH } from './constants'; import { LOG_PREFIX } from './constants'; import { ILogger } from '../../logger/types'; import { UniqueKeysItemSs } from '../../sync/submitters/types'; import type { RedisAdapter } from './RedisAdapter'; +import { setToArray } from '../../utils/lang/sets'; export class UniqueKeysCacheInRedis extends UniqueKeysCacheInMemory implements IUniqueKeysCacheBase { @@ -63,7 +63,7 @@ export class UniqueKeysCacheInRedis extends UniqueKeysCacheInMemory implements I /** * Async consumer API, used by synchronizer. - * @param count number of items to pop from the queue. If not provided or equal 0, all items will be popped. + * @param count - number of items to pop from the queue. If not provided or equal 0, all items will be popped. */ popNRaw(count = 0): Promise { return this.redis.lrange(this.key, 0, count - 1).then(uniqueKeyItems => { diff --git a/src/storages/inRedis/__tests__/RedisAdapter.spec.ts b/src/storages/inRedis/__tests__/RedisAdapter.spec.ts index a8ef69da..6668803c 100644 --- a/src/storages/inRedis/__tests__/RedisAdapter.spec.ts +++ b/src/storages/inRedis/__tests__/RedisAdapter.spec.ts @@ -2,7 +2,6 @@ import forEach from 'lodash/forEach'; import merge from 'lodash/merge'; import reduce from 'lodash/reduce'; -import { _Set, setToArray } from '../../../utils/lang/sets'; // Mocking sdkLogger import { loggerMock } from '../../../logger/__tests__/sdkLogger.mock'; @@ -95,7 +94,7 @@ describe('STORAGE Redis Adapter', () => { expect(typeof instance._options === 'object').toBe(true); // The instance will have an options object. expect(Array.isArray(instance._notReadyCommandsQueue)).toBe(true); // The instance will have an array as the _notReadyCommandsQueue property. - expect(instance._runningCommands instanceof _Set).toBe(true); // The instance will have a set as the _runningCommands property. + expect(instance._runningCommands instanceof Set).toBe(true); // The instance will have a set as the _runningCommands property. }); test('ioredis constructor params and static method _defineLibrarySettings', () => { @@ -374,7 +373,7 @@ describe('STORAGE Redis Adapter', () => { setTimeout(() => { // queued with rejection timeout wrapper expect(loggerMock.info.mock.calls).toEqual([[LOG_PREFIX + 'Attempting to disconnect but there are 2 commands still waiting for resolution. Defering disconnection until those finish.']]); - Promise.all(setToArray(instance._runningCommands)).catch(e => { + Promise.all(Array.from(instance._runningCommands)).catch(e => { setImmediate(() => { // Allow the callback to execute before checking. expect(loggerMock.warn.mock.calls[0]).toEqual([`${LOG_PREFIX}Pending commands finished with error: ${e}. Proceeding with disconnection.`]); // Should warn about the error but tell user that will disconnect anyways. expect(ioredisMock.disconnect).toBeCalledTimes(1); // Original method should have been called once, asynchronously @@ -394,7 +393,7 @@ describe('STORAGE Redis Adapter', () => { setTimeout(() => { expect(loggerMock.info.mock.calls).toEqual([[LOG_PREFIX + 'Attempting to disconnect but there are 4 commands still waiting for resolution. Defering disconnection until those finish.']]); - Promise.all(setToArray(instance._runningCommands)).then(() => { // This one will go through success path + Promise.all(Array.from(instance._runningCommands)).then(() => { // This one will go through success path setImmediate(() => { expect(loggerMock.debug.mock.calls).toEqual([[LOG_PREFIX + 'Pending commands finished successfully, disconnecting.']]); expect(ioredisMock.disconnect).toBeCalledTimes(1); // Original method should have been called once, asynchronously diff --git a/src/storages/inRedis/__tests__/SegmentsCacheInRedis.spec.ts b/src/storages/inRedis/__tests__/SegmentsCacheInRedis.spec.ts index 6222af95..62799bab 100644 --- a/src/storages/inRedis/__tests__/SegmentsCacheInRedis.spec.ts +++ b/src/storages/inRedis/__tests__/SegmentsCacheInRedis.spec.ts @@ -9,25 +9,21 @@ const keys = new KeyBuilderSS(prefix, metadata); describe('SEGMENTS CACHE IN REDIS', () => { - test('isInSegment, set/getChangeNumber, add/removeFromSegment', async () => { + test('isInSegment, getChangeNumber, update', async () => { const connection = new RedisAdapter(loggerMock); const cache = new SegmentsCacheInRedis(loggerMock, keys, connection); - await cache.addToSegment('mocked-segment', ['a', 'b', 'c']); - - await cache.setChangeNumber('mocked-segment', 1); - - await cache.removeFromSegment('mocked-segment', ['d']); + await cache.update('mocked-segment', ['a', 'b', 'c'], ['d'], 1); expect(await cache.getChangeNumber('mocked-segment') === 1).toBe(true); expect(await cache.getChangeNumber('inexistent-segment')).toBe(-1); // -1 if the segment doesn't exist - await cache.addToSegment('mocked-segment', ['d', 'e']); + await cache.update('mocked-segment', ['d', 'e'], [], 2); - await cache.removeFromSegment('mocked-segment', ['a', 'c']); + await cache.update('mocked-segment', [], ['a', 'c'], 2); - expect(await cache.getChangeNumber('mocked-segment') === 1).toBe(true); + expect(await cache.getChangeNumber('mocked-segment') === 2).toBe(true); expect(await cache.isInSegment('mocked-segment', 'a')).toBe(false); expect(await cache.isInSegment('mocked-segment', 'b')).toBe(true); diff --git a/src/storages/inRedis/__tests__/SplitsCacheInRedis.spec.ts b/src/storages/inRedis/__tests__/SplitsCacheInRedis.spec.ts index d10db711..3f577254 100644 --- a/src/storages/inRedis/__tests__/SplitsCacheInRedis.spec.ts +++ b/src/storages/inRedis/__tests__/SplitsCacheInRedis.spec.ts @@ -4,7 +4,6 @@ import { loggerMock } from '../../../logger/__tests__/sdkLogger.mock'; import { splitWithUserTT, splitWithAccountTT, featureFlagOne, featureFlagThree, featureFlagTwo, featureFlagWithEmptyFS, featureFlagWithoutFS } from '../../__tests__/testUtils'; import { ISplit } from '../../../dtos/types'; import { metadata } from '../../__tests__/KeyBuilder.spec'; -import { _Set } from '../../../utils/lang/sets'; import { RedisAdapter } from '../RedisAdapter'; const prefix = 'splits_cache_ut'; @@ -150,7 +149,7 @@ describe('SPLITS CACHE REDIS', () => { const connection = new RedisAdapter(loggerMock); // @ts-ignore const cache = new SplitsCacheInRedis(loggerMock, keysBuilder, connection, { groupedFilters: { bySet: ['o', 'n', 'e', 'x'] } }); - const emptySet = new _Set([]); + const emptySet = new Set([]); await cache.addSplits([ [featureFlagOne.name, featureFlagOne], @@ -159,27 +158,27 @@ describe('SPLITS CACHE REDIS', () => { ]); await cache.addSplit(featureFlagWithEmptyFS.name, featureFlagWithEmptyFS); - expect(await cache.getNamesByFlagSets(['o'])).toEqual([new _Set(['ff_one', 'ff_two'])]); - expect(await cache.getNamesByFlagSets(['n'])).toEqual([new _Set(['ff_one'])]); - expect(await cache.getNamesByFlagSets(['e'])).toEqual([new _Set(['ff_one', 'ff_three'])]); + expect(await cache.getNamesByFlagSets(['o'])).toEqual([new Set(['ff_one', 'ff_two'])]); + expect(await cache.getNamesByFlagSets(['n'])).toEqual([new Set(['ff_one'])]); + expect(await cache.getNamesByFlagSets(['e'])).toEqual([new Set(['ff_one', 'ff_three'])]); expect(await cache.getNamesByFlagSets(['t'])).toEqual([emptySet]); // 't' not in filter - expect(await cache.getNamesByFlagSets(['o', 'n', 'e'])).toEqual([new _Set(['ff_one', 'ff_two']), new _Set(['ff_one']), new _Set(['ff_one', 'ff_three'])]); + expect(await cache.getNamesByFlagSets(['o', 'n', 'e'])).toEqual([new Set(['ff_one', 'ff_two']), new Set(['ff_one']), new Set(['ff_one', 'ff_three'])]); await cache.addSplit(featureFlagOne.name, { ...featureFlagOne, sets: ['1'] }); expect(await cache.getNamesByFlagSets(['1'])).toEqual([emptySet]); // '1' not in filter - expect(await cache.getNamesByFlagSets(['o'])).toEqual([new _Set(['ff_two'])]); + expect(await cache.getNamesByFlagSets(['o'])).toEqual([new Set(['ff_two'])]); expect(await cache.getNamesByFlagSets(['n'])).toEqual([emptySet]); await cache.addSplit(featureFlagOne.name, { ...featureFlagOne, sets: ['x'] }); - expect(await cache.getNamesByFlagSets(['x'])).toEqual([new _Set(['ff_one'])]); - expect(await cache.getNamesByFlagSets(['o', 'e', 'x'])).toEqual([new _Set(['ff_two']), new _Set(['ff_three']), new _Set(['ff_one'])]); + expect(await cache.getNamesByFlagSets(['x'])).toEqual([new Set(['ff_one'])]); + expect(await cache.getNamesByFlagSets(['o', 'e', 'x'])).toEqual([new Set(['ff_two']), new Set(['ff_three']), new Set(['ff_one'])]); // @ts-ignore Simulate an error in connection.pipeline().exec() jest.spyOn(connection, 'pipeline').mockImplementationOnce(() => { return { exec: () => Promise.resolve([['error', null], [null, ['ff_three']], [null, ['ff_one']]]) }; }); - expect(await cache.getNamesByFlagSets(['o', 'e', 'x'])).toEqual([emptySet, new _Set(['ff_three']), new _Set(['ff_one'])]); + expect(await cache.getNamesByFlagSets(['o', 'e', 'x'])).toEqual([emptySet, new Set(['ff_three']), new Set(['ff_one'])]); (connection.pipeline as jest.Mock).mockRestore(); await cache.removeSplit(featureFlagOne.name); @@ -203,7 +202,7 @@ describe('SPLITS CACHE REDIS', () => { const connection = new RedisAdapter(loggerMock); const cacheWithoutFilters = new SplitsCacheInRedis(loggerMock, keysBuilder, connection); - const emptySet = new _Set([]); + const emptySet = new Set([]); await cacheWithoutFilters.addSplits([ [featureFlagOne.name, featureFlagOne], @@ -212,12 +211,12 @@ describe('SPLITS CACHE REDIS', () => { ]); await cacheWithoutFilters.addSplit(featureFlagWithEmptyFS.name, featureFlagWithEmptyFS); - expect(await cacheWithoutFilters.getNamesByFlagSets(['o'])).toEqual([new _Set(['ff_one', 'ff_two'])]); - expect(await cacheWithoutFilters.getNamesByFlagSets(['n'])).toEqual([new _Set(['ff_one'])]); - expect(await cacheWithoutFilters.getNamesByFlagSets(['e'])).toEqual([new _Set(['ff_one', 'ff_three'])]); - expect(await cacheWithoutFilters.getNamesByFlagSets(['t'])).toEqual([new _Set(['ff_two', 'ff_three'])]); + expect(await cacheWithoutFilters.getNamesByFlagSets(['o'])).toEqual([new Set(['ff_one', 'ff_two'])]); + expect(await cacheWithoutFilters.getNamesByFlagSets(['n'])).toEqual([new Set(['ff_one'])]); + expect(await cacheWithoutFilters.getNamesByFlagSets(['e'])).toEqual([new Set(['ff_one', 'ff_three'])]); + expect(await cacheWithoutFilters.getNamesByFlagSets(['t'])).toEqual([new Set(['ff_two', 'ff_three'])]); expect(await cacheWithoutFilters.getNamesByFlagSets(['y'])).toEqual([emptySet]); - expect(await cacheWithoutFilters.getNamesByFlagSets(['o', 'n', 'e'])).toEqual([new _Set(['ff_one', 'ff_two']), new _Set(['ff_one']), new _Set(['ff_one', 'ff_three'])]); + expect(await cacheWithoutFilters.getNamesByFlagSets(['o', 'n', 'e'])).toEqual([new Set(['ff_one', 'ff_two']), new Set(['ff_one']), new Set(['ff_one', 'ff_three'])]); // Delete splits, TT and flag set keys await cacheWithoutFilters.removeSplits([featureFlagThree.name, featureFlagTwo.name, featureFlagOne.name, featureFlagWithEmptyFS.name]); diff --git a/src/storages/pluggable/EventsCachePluggable.ts b/src/storages/pluggable/EventsCachePluggable.ts index d30d43b7..ce87a4c6 100644 --- a/src/storages/pluggable/EventsCachePluggable.ts +++ b/src/storages/pluggable/EventsCachePluggable.ts @@ -1,6 +1,6 @@ import { IPluggableStorageWrapper, IEventsCacheAsync } from '../types'; import { IMetadata } from '../../dtos/types'; -import { SplitIO } from '../../types'; +import SplitIO from '../../../types/splitio'; import { ILogger } from '../../logger/types'; import { LOG_PREFIX } from './constants'; import { StoredEventWithMetadata } from '../../sync/submitters/types'; @@ -21,7 +21,7 @@ export class EventsCachePluggable implements IEventsCacheAsync { /** * Push given event to the storage. - * @param eventData Event item to push. + * @param eventData - Event item to push. * @returns A promise that is resolved with a boolean value indicating if the push operation succeeded or failed. * Unlike `impressions::track`, The promise will never be rejected. */ diff --git a/src/storages/pluggable/ImpressionsCachePluggable.ts b/src/storages/pluggable/ImpressionsCachePluggable.ts index dede350d..0be57ef5 100644 --- a/src/storages/pluggable/ImpressionsCachePluggable.ts +++ b/src/storages/pluggable/ImpressionsCachePluggable.ts @@ -1,6 +1,6 @@ import { IPluggableStorageWrapper, IImpressionsCacheAsync } from '../types'; import { IMetadata } from '../../dtos/types'; -import { ImpressionDTO } from '../../types'; +import SplitIO from '../../../types/splitio'; import { StoredImpressionWithMetadata } from '../../sync/submitters/types'; import { ILogger } from '../../logger/types'; import { impressionsToJSON } from '../utils'; @@ -21,11 +21,11 @@ export class ImpressionsCachePluggable implements IImpressionsCacheAsync { /** * Push given impressions to the storage. - * @param impressions List of impresions to push. + * @param impressions - List of impresions to push. * @returns A promise that is resolved if the push operation succeeded * or rejected if the wrapper operation fails. */ - track(impressions: ImpressionDTO[]): Promise { + track(impressions: SplitIO.ImpressionDTO[]): Promise { return this.wrapper.pushItems( this.key, impressionsToJSON(impressions, this.metadata) diff --git a/src/storages/pluggable/SegmentsCachePluggable.ts b/src/storages/pluggable/SegmentsCachePluggable.ts index 995c66df..033b1a49 100644 --- a/src/storages/pluggable/SegmentsCachePluggable.ts +++ b/src/storages/pluggable/SegmentsCachePluggable.ts @@ -5,7 +5,6 @@ import { KeyBuilderSS } from '../KeyBuilderSS'; import { IPluggableStorageWrapper, ISegmentsCacheAsync } from '../types'; import { ILogger } from '../../logger/types'; import { LOG_PREFIX } from './constants'; -import { _Set } from '../../utils/lang/sets'; /** * ISegmentsCacheAsync implementation for pluggable storages. @@ -23,33 +22,20 @@ export class SegmentsCachePluggable implements ISegmentsCacheAsync { } /** - * Add a list of `segmentKeys` to the given segment `name`. - * The returned promise is resolved when the operation success - * or rejected if wrapper operation fails. - */ - addToSegment(name: string, segmentKeys: string[]) { - const segmentKey = this.keys.buildSegmentNameKey(name); - - if (segmentKeys.length) { - return this.wrapper.addItems(segmentKey, segmentKeys); - } else { - return Promise.resolve(); - } - } - - /** - * Remove a list of `segmentKeys` from the given segment `name`. - * The returned promise is resolved when the operation success - * or rejected if wrapper operation fails. + * Update the given segment `name` with the lists of `addedKeys`, `removedKeys` and `changeNumber`. + * The returned promise is resolved if the operation success, with `true` if the segment was updated (i.e., some key was added or removed), + * or rejected if it fails (e.g., wrapper operation fails). */ - removeFromSegment(name: string, segmentKeys: string[]) { + update(name: string, addedKeys: string[], removedKeys: string[], changeNumber: number) { const segmentKey = this.keys.buildSegmentNameKey(name); - if (segmentKeys.length) { - return this.wrapper.removeItems(segmentKey, segmentKeys); - } else { - return Promise.resolve(); - } + return Promise.all([ + addedKeys.length && this.wrapper.addItems(segmentKey, addedKeys), + removedKeys.length && this.wrapper.removeItems(segmentKey, removedKeys), + this.wrapper.set(this.keys.buildSegmentTillKey(name), changeNumber + '') + ]).then(() => { + return addedKeys.length > 0 || removedKeys.length > 0; + }); } /** @@ -60,17 +46,6 @@ export class SegmentsCachePluggable implements ISegmentsCacheAsync { return this.wrapper.itemContains(this.keys.buildSegmentNameKey(name), key); } - /** - * Set till number for the given segment `name`. - * The returned promise is resolved when the operation success, - * or rejected if it fails (e.g., wrapper operation fails). - */ - setChangeNumber(name: string, changeNumber: number) { - return this.wrapper.set( - this.keys.buildSegmentTillKey(name), changeNumber + '' - ); - } - /** * Get till number or -1 if it's not defined. * The returned promise is resolved with the changeNumber or -1 if it doesn't exist or a wrapper operation fails. @@ -108,7 +83,7 @@ export class SegmentsCachePluggable implements ISegmentsCacheAsync { return this.wrapper.getItems(this.keys.buildRegisteredSegmentsKey()); } - /** @TODO implement if required by DataLoader or Producer mode */ + // @TODO implement if required by DataLoader or Producer mode clear(): Promise { return Promise.resolve(true); } diff --git a/src/storages/pluggable/SplitsCachePluggable.ts b/src/storages/pluggable/SplitsCachePluggable.ts index d35299f6..ddb06149 100644 --- a/src/storages/pluggable/SplitsCachePluggable.ts +++ b/src/storages/pluggable/SplitsCachePluggable.ts @@ -5,7 +5,7 @@ import { ILogger } from '../../logger/types'; import { ISplit, ISplitFiltersValidation } from '../../dtos/types'; import { LOG_PREFIX } from './constants'; import { AbstractSplitsCacheAsync } from '../AbstractSplitsCacheAsync'; -import { ISet, _Set, returnDifference } from '../../utils/lang/sets'; +import { returnDifference } from '../../utils/lang/sets'; /** * ISplitsCacheAsync implementation for pluggable storages. @@ -19,9 +19,9 @@ export class SplitsCachePluggable extends AbstractSplitsCacheAsync { /** * Create a SplitsCache that uses a storage wrapper. - * @param log Logger instance. - * @param keys Key builder. - * @param wrapper Adapted wrapper storage. + * @param log - Logger instance. + * @param keys - Key builder. + * @param wrapper - Adapted wrapper storage. */ constructor(log: ILogger, keys: KeyBuilder, wrapper: IPluggableStorageWrapper, splitFiltersValidation?: ISplitFiltersValidation) { super(); @@ -181,11 +181,11 @@ export class SplitsCachePluggable extends AbstractSplitsCacheAsync { * The returned promise is resolved with the list of feature flag names per flag set. * It never rejects (If there is a wrapper error for some flag set, an empty set is returned for it). */ - getNamesByFlagSets(flagSets: string[]): Promise[]> { + getNamesByFlagSets(flagSets: string[]): Promise[]> { return Promise.all(flagSets.map(flagSet => { const flagSetKey = this.keys.buildFlagSetKey(flagSet); return this.wrapper.getItems(flagSetKey).catch(() => []); - })).then(namesByFlagSets => namesByFlagSets.map(namesByFlagSet => new _Set(namesByFlagSet))); + })).then(namesByFlagSets => namesByFlagSets.map(namesByFlagSet => new Set(namesByFlagSet))); } /** diff --git a/src/storages/pluggable/TelemetryCachePluggable.ts b/src/storages/pluggable/TelemetryCachePluggable.ts index 5f459f10..16ddd45f 100644 --- a/src/storages/pluggable/TelemetryCachePluggable.ts +++ b/src/storages/pluggable/TelemetryCachePluggable.ts @@ -6,7 +6,6 @@ import { findLatencyIndex } from '../findLatencyIndex'; import { getTelemetryConfigStats } from '../../sync/submitters/telemetrySubmitter'; import { CONSUMER_MODE, STORAGE_PLUGGABLE } from '../../utils/constants'; import { isString, isNaNNumber } from '../../utils/lang'; -import { _Map } from '../../utils/lang/maps'; import { MAX_LATENCY_BUCKET_COUNT, newBuckets } from '../inMemory/TelemetryCacheInMemory'; import { parseLatencyField, parseExceptionField, parseMetadata } from '../utils'; @@ -14,9 +13,9 @@ export class TelemetryCachePluggable implements ITelemetryCacheAsync { /** * Create a Telemetry cache that uses a storage wrapper. - * @param log Logger instance. - * @param keys Key builder. - * @param wrapper Adapted wrapper storage. + * @param log - Logger instance. + * @param keys - Key builder. + * @param wrapper - Adapted wrapper storage. */ constructor(private readonly log: ILogger, private readonly keys: KeyBuilderSS, private readonly wrapper: IPluggableStorageWrapper) { } @@ -43,7 +42,7 @@ export class TelemetryCachePluggable implements ITelemetryCacheAsync { return latencyKeys.length ? this.wrapper.getMany(latencyKeys).then(latencies => { - const result: MultiMethodLatencies = new _Map(); + const result: MultiMethodLatencies = new Map(); for (let i = 0; i < latencyKeys.length; i++) { const field = latencyKeys[i].split('::')[1]; @@ -77,7 +76,7 @@ export class TelemetryCachePluggable implements ITelemetryCacheAsync { return Promise.all(latencyKeys.map((latencyKey) => this.wrapper.del(latencyKey))).then(() => result); }) : // If latencyKeys is empty, return an empty map. - new _Map(); + new Map(); }); } @@ -90,7 +89,7 @@ export class TelemetryCachePluggable implements ITelemetryCacheAsync { return exceptionKeys.length ? this.wrapper.getMany(exceptionKeys).then(exceptions => { - const result: MultiMethodExceptions = new _Map(); + const result: MultiMethodExceptions = new Map(); for (let i = 0; i < exceptionKeys.length; i++) { const field = exceptionKeys[i].split('::')[1]; @@ -117,7 +116,7 @@ export class TelemetryCachePluggable implements ITelemetryCacheAsync { return Promise.all(exceptionKeys.map((exceptionKey) => this.wrapper.del(exceptionKey))).then(() => result); }) : // If exceptionKeys is empty, return an empty map. - new _Map(); + new Map(); }); } @@ -130,7 +129,7 @@ export class TelemetryCachePluggable implements ITelemetryCacheAsync { return configKeys.length ? this.wrapper.getMany(configKeys).then(configs => { - const result: MultiConfigs = new _Map(); + const result: MultiConfigs = new Map(); for (let i = 0; i < configKeys.length; i++) { const field = configKeys[i].split('::')[1]; @@ -154,7 +153,7 @@ export class TelemetryCachePluggable implements ITelemetryCacheAsync { return Promise.all(configKeys.map((configKey) => this.wrapper.del(configKey))).then(() => result); }) : // If configKeys is empty, return an empty map. - new _Map(); + new Map(); }); } } diff --git a/src/storages/pluggable/UniqueKeysCachePluggable.ts b/src/storages/pluggable/UniqueKeysCachePluggable.ts index d430682e..f78831f9 100644 --- a/src/storages/pluggable/UniqueKeysCachePluggable.ts +++ b/src/storages/pluggable/UniqueKeysCachePluggable.ts @@ -1,10 +1,10 @@ import { IPluggableStorageWrapper, IUniqueKeysCacheBase } from '../types'; import { UniqueKeysCacheInMemory } from '../inMemory/UniqueKeysCacheInMemory'; -import { setToArray } from '../../utils/lang/sets'; import { DEFAULT_CACHE_SIZE, REFRESH_RATE } from '../inRedis/constants'; import { LOG_PREFIX } from './constants'; import { ILogger } from '../../logger/types'; import { UniqueKeysItemSs } from '../../sync/submitters/types'; +import { setToArray } from '../../utils/lang/sets'; export class UniqueKeysCachePluggable extends UniqueKeysCacheInMemory implements IUniqueKeysCacheBase { @@ -56,7 +56,7 @@ export class UniqueKeysCachePluggable extends UniqueKeysCacheInMemory implements /** * Async consumer API, used by synchronizer. - * @param count number of items to pop from the queue. If not provided or equal 0, all items will be popped. + * @param count - number of items to pop from the queue. If not provided or equal 0, all items will be popped. */ popNRaw(count = 0): Promise { return Promise.resolve(count || this.wrapper.getItemsCount(this.key)) diff --git a/src/storages/pluggable/__tests__/SegmentsCachePluggable.spec.ts b/src/storages/pluggable/__tests__/SegmentsCachePluggable.spec.ts index 7fa1c537..eedb8f11 100644 --- a/src/storages/pluggable/__tests__/SegmentsCachePluggable.spec.ts +++ b/src/storages/pluggable/__tests__/SegmentsCachePluggable.spec.ts @@ -13,24 +13,20 @@ describe('SEGMENTS CACHE PLUGGABLE', () => { wrapperMock.mockClear(); }); - test('isInSegment, set/getChangeNumber, add/removeFromSegment', async () => { + test('isInSegment, getChangeNumber, update', async () => { const cache = new SegmentsCachePluggable(loggerMock, keyBuilder, wrapperMock); - await cache.addToSegment('mocked-segment', ['a', 'b', 'c']); - - await cache.setChangeNumber('mocked-segment', 1); - - await cache.removeFromSegment('mocked-segment', ['d']); + await cache.update('mocked-segment', ['a', 'b', 'c'], ['d'], 1); expect(await cache.getChangeNumber('mocked-segment') === 1).toBe(true); expect(await cache.getChangeNumber('inexistent-segment')).toBe(-1); // -1 if the segment doesn't exist - await cache.addToSegment('mocked-segment', ['d', 'e']); + await cache.update('mocked-segment', ['d', 'e'], [], 2); - await cache.removeFromSegment('mocked-segment', ['a', 'c']); + await cache.update('mocked-segment', [], ['a', 'c'], 2); - expect(await cache.getChangeNumber('mocked-segment') === 1).toBe(true); + expect(await cache.getChangeNumber('mocked-segment') === 2).toBe(true); expect(await cache.isInSegment('mocked-segment', 'a')).toBe(false); expect(await cache.isInSegment('mocked-segment', 'b')).toBe(true); diff --git a/src/storages/pluggable/__tests__/SplitsCachePluggable.spec.ts b/src/storages/pluggable/__tests__/SplitsCachePluggable.spec.ts index ea8aa73e..57fc34b3 100644 --- a/src/storages/pluggable/__tests__/SplitsCachePluggable.spec.ts +++ b/src/storages/pluggable/__tests__/SplitsCachePluggable.spec.ts @@ -4,7 +4,6 @@ import { loggerMock } from '../../../logger/__tests__/sdkLogger.mock'; import { wrapperMockFactory } from './wrapper.mock'; import { splitWithUserTT, splitWithAccountTT, featureFlagOne, featureFlagThree, featureFlagTwo, featureFlagWithEmptyFS, featureFlagWithoutFS } from '../../__tests__/testUtils'; import { ISplit } from '../../../dtos/types'; -import { _Set } from '../../../utils/lang/sets'; const keysBuilder = new KeyBuilder(); @@ -154,7 +153,7 @@ describe('SPLITS CACHE PLUGGABLE', () => { test('flag set cache tests', async () => { const wrapper = wrapperMockFactory(); // @ts-ignore const cache = new SplitsCachePluggable(loggerMock, keysBuilder, wrapper, { groupedFilters: { bySet: ['o', 'n', 'e', 'x'] } }); - const emptySet = new _Set([]); + const emptySet = new Set([]); await cache.addSplits([ [featureFlagOne.name, featureFlagOne], @@ -163,25 +162,25 @@ describe('SPLITS CACHE PLUGGABLE', () => { ]); await cache.addSplit(featureFlagWithEmptyFS.name, featureFlagWithEmptyFS); - expect(await cache.getNamesByFlagSets(['o'])).toEqual([new _Set(['ff_one', 'ff_two'])]); - expect(await cache.getNamesByFlagSets(['n'])).toEqual([new _Set(['ff_one'])]); - expect(await cache.getNamesByFlagSets(['e'])).toEqual([new _Set(['ff_one', 'ff_three'])]); + expect(await cache.getNamesByFlagSets(['o'])).toEqual([new Set(['ff_one', 'ff_two'])]); + expect(await cache.getNamesByFlagSets(['n'])).toEqual([new Set(['ff_one'])]); + expect(await cache.getNamesByFlagSets(['e'])).toEqual([new Set(['ff_one', 'ff_three'])]); expect(await cache.getNamesByFlagSets(['t'])).toEqual([emptySet]); // 't' not in filter - expect(await cache.getNamesByFlagSets(['o', 'n', 'e'])).toEqual([new _Set(['ff_one', 'ff_two']), new _Set(['ff_one']), new _Set(['ff_one', 'ff_three'])]); + expect(await cache.getNamesByFlagSets(['o', 'n', 'e'])).toEqual([new Set(['ff_one', 'ff_two']), new Set(['ff_one']), new Set(['ff_one', 'ff_three'])]); await cache.addSplit(featureFlagOne.name, { ...featureFlagOne, sets: ['1'] }); expect(await cache.getNamesByFlagSets(['1'])).toEqual([emptySet]); // '1' not in filter - expect(await cache.getNamesByFlagSets(['o'])).toEqual([new _Set(['ff_two'])]); + expect(await cache.getNamesByFlagSets(['o'])).toEqual([new Set(['ff_two'])]); expect(await cache.getNamesByFlagSets(['n'])).toEqual([emptySet]); await cache.addSplit(featureFlagOne.name, { ...featureFlagOne, sets: ['x'] }); - expect(await cache.getNamesByFlagSets(['x'])).toEqual([new _Set(['ff_one'])]); - expect(await cache.getNamesByFlagSets(['o', 'e', 'x'])).toEqual([new _Set(['ff_two']), new _Set(['ff_three']), new _Set(['ff_one'])]); + expect(await cache.getNamesByFlagSets(['x'])).toEqual([new Set(['ff_one'])]); + expect(await cache.getNamesByFlagSets(['o', 'e', 'x'])).toEqual([new Set(['ff_two']), new Set(['ff_three']), new Set(['ff_one'])]); // Simulate one error in getItems wrapper.getItems.mockImplementationOnce(() => Promise.reject('error')); - expect(await cache.getNamesByFlagSets(['o', 'e', 'x'])).toEqual([emptySet, new _Set(['ff_three']), new _Set(['ff_one'])]); + expect(await cache.getNamesByFlagSets(['o', 'e', 'x'])).toEqual([emptySet, new Set(['ff_three']), new Set(['ff_one'])]); await cache.removeSplit(featureFlagOne.name); expect(await cache.getNamesByFlagSets(['x'])).toEqual([emptySet]); @@ -197,7 +196,7 @@ describe('SPLITS CACHE PLUGGABLE', () => { // if FlagSets filter is not defined, it should store all FlagSets in memory. test('flag set cache tests without filters', async () => { const cacheWithoutFilters = new SplitsCachePluggable(loggerMock, keysBuilder, wrapperMockFactory()); - const emptySet = new _Set([]); + const emptySet = new Set([]); await cacheWithoutFilters.addSplits([ [featureFlagOne.name, featureFlagOne], @@ -206,12 +205,12 @@ describe('SPLITS CACHE PLUGGABLE', () => { ]); await cacheWithoutFilters.addSplit(featureFlagWithEmptyFS.name, featureFlagWithEmptyFS); - expect(await cacheWithoutFilters.getNamesByFlagSets(['o'])).toEqual([new _Set(['ff_one', 'ff_two'])]); - expect(await cacheWithoutFilters.getNamesByFlagSets(['n'])).toEqual([new _Set(['ff_one'])]); - expect(await cacheWithoutFilters.getNamesByFlagSets(['e'])).toEqual([new _Set(['ff_one', 'ff_three'])]); - expect(await cacheWithoutFilters.getNamesByFlagSets(['t'])).toEqual([new _Set(['ff_two', 'ff_three'])]); + expect(await cacheWithoutFilters.getNamesByFlagSets(['o'])).toEqual([new Set(['ff_one', 'ff_two'])]); + expect(await cacheWithoutFilters.getNamesByFlagSets(['n'])).toEqual([new Set(['ff_one'])]); + expect(await cacheWithoutFilters.getNamesByFlagSets(['e'])).toEqual([new Set(['ff_one', 'ff_three'])]); + expect(await cacheWithoutFilters.getNamesByFlagSets(['t'])).toEqual([new Set(['ff_two', 'ff_three'])]); expect(await cacheWithoutFilters.getNamesByFlagSets(['y'])).toEqual([emptySet]); - expect(await cacheWithoutFilters.getNamesByFlagSets(['o', 'n', 'e'])).toEqual([new _Set(['ff_one', 'ff_two']), new _Set(['ff_one']), new _Set(['ff_one', 'ff_three'])]); + expect(await cacheWithoutFilters.getNamesByFlagSets(['o', 'n', 'e'])).toEqual([new Set(['ff_one', 'ff_two']), new Set(['ff_one']), new Set(['ff_one', 'ff_three'])]); }); }); diff --git a/src/storages/pluggable/inMemoryWrapper.ts b/src/storages/pluggable/inMemoryWrapper.ts index c87c9a47..8193b3e3 100644 --- a/src/storages/pluggable/inMemoryWrapper.ts +++ b/src/storages/pluggable/inMemoryWrapper.ts @@ -1,17 +1,17 @@ import { IPluggableStorageWrapper } from '../types'; import { startsWith, toNumber } from '../../utils/lang'; -import { ISet, setToArray, _Set } from '../../utils/lang/sets'; +import { setToArray } from '../../utils/lang/sets'; /** * Creates a IPluggableStorageWrapper implementation that stores items in memory. * The `_cache` property is the object were items are stored. * Intended for testing purposes. * - * @param connDelay delay in millis for `connect` resolve. If not provided, `connect` resolves inmediatelly. + * @param connDelay - delay in millis for `connect` resolve. If not provided, `connect` resolves immediately. */ -export function inMemoryWrapperFactory(connDelay?: number): IPluggableStorageWrapper & { _cache: Record>, _setConnDelay(connDelay: number): void } { +export function inMemoryWrapperFactory(connDelay?: number): IPluggableStorageWrapper & { _cache: Record>, _setConnDelay(connDelay: number): void } { - let _cache: Record> = {}; + let _cache: Record> = {}; let _connDelay = connDelay; return { @@ -84,22 +84,22 @@ export function inMemoryWrapperFactory(connDelay?: number): IPluggableStorageWra itemContains(key: string, item: string) { const set = _cache[key]; if (!set) return Promise.resolve(false); - if (set instanceof _Set) return Promise.resolve(set.has(item)); + if (set instanceof Set) return Promise.resolve(set.has(item)); return Promise.reject('key is not a set'); }, addItems(key: string, items: string[]) { - if (!(key in _cache)) _cache[key] = new _Set(); + if (!(key in _cache)) _cache[key] = new Set(); const set = _cache[key]; - if (set instanceof _Set) { + if (set instanceof Set) { items.forEach(item => set.add(item)); return Promise.resolve(); } return Promise.reject('key is not a set'); }, removeItems(key: string, items: string[]) { - if (!(key in _cache)) _cache[key] = new _Set(); + if (!(key in _cache)) _cache[key] = new Set(); const set = _cache[key]; - if (set instanceof _Set) { + if (set instanceof Set) { items.forEach(item => set.delete(item)); return Promise.resolve(); } @@ -108,7 +108,7 @@ export function inMemoryWrapperFactory(connDelay?: number): IPluggableStorageWra getItems(key: string) { const set = _cache[key]; if (!set) return Promise.resolve([]); - if (set instanceof _Set) return Promise.resolve(setToArray(set)); + if (set instanceof Set) return Promise.resolve(setToArray(set)); return Promise.reject('key is not a set'); }, diff --git a/src/storages/pluggable/index.ts b/src/storages/pluggable/index.ts index 60350d66..372eeeb4 100644 --- a/src/storages/pluggable/index.ts +++ b/src/storages/pluggable/index.ts @@ -32,7 +32,7 @@ export interface PluggableStorageOptions { /** * Validate pluggable storage factory options. * - * @param options user options + * @param options - user options * @throws Will throw an error if the options are invalid. Example: wrapper is not provided or doesn't have some methods. */ function validatePluggableStorageOptions(options: any) { diff --git a/src/storages/pluggable/wrapperAdapter.ts b/src/storages/pluggable/wrapperAdapter.ts index c47a5d8b..f56a1c90 100644 --- a/src/storages/pluggable/wrapperAdapter.ts +++ b/src/storages/pluggable/wrapperAdapter.ts @@ -26,8 +26,8 @@ export const METHODS_TO_PROMISE_WRAP: string[] = [ * Adapter of the Pluggable Storage Wrapper. * Used to handle exceptions as rejected promises, in order to simplify the error handling on storages. * - * @param log logger instance - * @param wrapper storage wrapper to adapt + * @param log - logger instance + * @param wrapper - storage wrapper to adapt * @returns an adapted version of the given storage wrapper */ export function wrapperAdapter(log: ILogger, wrapper: IPluggableStorageWrapper): IPluggableStorageWrapper { diff --git a/src/storages/types.ts b/src/storages/types.ts index 3fa7d244..83f388b1 100644 --- a/src/storages/types.ts +++ b/src/storages/types.ts @@ -1,7 +1,8 @@ -import { MaybeThenable, ISplit } from '../dtos/types'; +import SplitIO from '../../types/splitio'; +import { MaybeThenable, ISplit, IMySegmentsResponse } from '../dtos/types'; +import { MySegmentsData } from '../sync/polling/types'; import { EventDataType, HttpErrors, HttpLatencies, ImpressionDataType, LastSync, Method, MethodExceptions, MethodLatencies, MultiMethodExceptions, MultiMethodLatencies, MultiConfigs, OperationType, StoredEventWithMetadata, StoredImpressionWithMetadata, StreamingEvent, UniqueKeysPayloadCs, UniqueKeysPayloadSs, TelemetryUsageStatsPayload, UpdatesFromSSEEnum } from '../sync/submitters/types'; -import { SplitIO, ImpressionDTO, ISettings } from '../types'; -import { ISet } from '../utils/lang/sets'; +import { ISettings } from '../types'; /** * Interface of a pluggable storage wrapper. @@ -13,56 +14,50 @@ export interface IPluggableStorageWrapper { /** * Get the value of given `key`. * - * @function get - * @param {string} key Item to retrieve - * @returns {Promise} A promise that resolves with the element value associated with the specified `key`, + * @param key - Item to retrieve + * @returns A promise that resolves with the element value associated with the specified `key`, * or null if the key does not exist. The promise rejects if the operation fails. */ get: (key: string) => Promise /** * Add or update an item with a specified `key` and `value`. * - * @function set - * @param {string} key Item to update - * @param {string} value Value to set - * @returns {Promise} A promise that resolves if the operation success, whether the key was added or updated. + * @param key - Item to update + * @param value - Value to set + * @returns A promise that resolves if the operation success, whether the key was added or updated. * The promise rejects if the operation fails. */ set: (key: string, value: string) => Promise /** * Add or update an item with a specified `key` and `value`. * - * @function getAndSet - * @param {string} key Item to update - * @param {string} value Value to set - * @returns {Promise} A promise that resolves with the previous value associated to the given `key`, or null if not set. + * @param key - Item to update + * @param value - Value to set + * @returns A promise that resolves with the previous value associated to the given `key`, or null if not set. * The promise rejects if the operation fails. */ getAndSet: (key: string, value: string) => Promise /** * Removes the specified item by `key`. * - * @function del - * @param {string} key Item to delete - * @returns {Promise} A promise that resolves if the operation success, whether the key existed and was removed (resolves with true) or it didn't exist (resolves with false). + * @param key - Item to delete + * @returns A promise that resolves if the operation success, whether the key existed and was removed (resolves with true) or it didn't exist (resolves with false). * The promise rejects if the operation fails, for example, if there is a connection error. */ del: (key: string) => Promise /** * Returns all keys matching the given prefix. * - * @function getKeysByPrefix - * @param {string} prefix String prefix to match - * @returns {Promise} A promise that resolves with the list of keys that match the given `prefix`. + * @param prefix - String prefix to match + * @returns A promise that resolves with the list of keys that match the given `prefix`. * The promise rejects if the operation fails. */ getKeysByPrefix: (prefix: string) => Promise /** * Returns the values of all given `keys`. * - * @function getMany - * @param {string[]} keys List of keys to retrieve - * @returns {Promise<(string | null)[]>} A promise that resolves with the list of items associated with the specified list of `keys`. + * @param keys - List of keys to retrieve + * @returns A promise that resolves with the list of items associated with the specified list of `keys`. * For every key that does not hold a string value or does not exist, null is returned. The promise rejects if the operation fails. */ getMany: (keys: string[]) => Promise<(string | null)[]> @@ -72,20 +67,18 @@ export interface IPluggableStorageWrapper { /** * Increments the number stored at `key` by `increment`, or set it to `increment` if the value doesn't exist. * - * @function incr - * @param {string} key Key to increment - * @param {number} increment Value to increment by. Defaults to 1. - * @returns {Promise} A promise that resolves with the value of key after the increment. The promise rejects if the operation fails, + * @param key - Key to increment + * @param increment - Value to increment by. Defaults to 1. + * @returns A promise that resolves with the value of key after the increment. The promise rejects if the operation fails, * for example, if there is a connection error or the key contains a string that can not be represented as integer. */ incr: (key: string, increment?: number) => Promise /** * Decrements the number stored at `key` by `decrement`, or set it to minus `decrement` if the value doesn't exist. * - * @function decr - * @param {string} key Key to decrement - * @param {number} decrement Value to decrement by. Defaults to 1. - * @returns {Promise} A promise that resolves with the value of key after the decrement. The promise rejects if the operation fails, + * @param key - Key to decrement + * @param decrement - Value to decrement by. Defaults to 1. + * @returns A promise that resolves with the value of key after the decrement. The promise rejects if the operation fails, * for example, if there is a connection error or the key contains a string that can not be represented as integer. */ decr: (key: string, decrement?: number) => Promise @@ -95,29 +88,26 @@ export interface IPluggableStorageWrapper { /** * Inserts given items at the tail of `key` list. If `key` does not exist, an empty list is created before pushing the items. * - * @function pushItems - * @param {string} key List key - * @param {string[]} items List of items to push - * @returns {Promise} A promise that resolves if the operation success. + * @param key - List key + * @param items - List of items to push + * @returns A promise that resolves if the operation success. * The promise rejects if the operation fails, for example, if there is a connection error or the key holds a value that is not a list. */ pushItems: (key: string, items: string[]) => Promise /** * Removes and returns the first `count` items from a list. If `key` does not exist, an empty list is items is returned. * - * @function popItems - * @param {string} key List key - * @param {number} count Number of items to pop - * @returns {Promise} A promise that resolves with the list of removed items from the list, or an empty array when key does not exist. + * @param key - List key + * @param count - Number of items to pop + * @returns A promise that resolves with the list of removed items from the list, or an empty array when key does not exist. * The promise rejects if the operation fails, for example, if there is a connection error or the key holds a value that is not a list. */ popItems: (key: string, count: number) => Promise /** * Returns the count of items in a list, or 0 if `key` does not exist. * - * @function getItemsCount - * @param {string} key List key - * @returns {Promise} A promise that resolves with the number of items at the `key` list, or 0 when `key` does not exist. + * @param key - List key + * @returns A promise that resolves with the number of items at the `key` list, or 0 when `key` does not exist. * The promise rejects if the operation fails, for example, if there is a connection error or the key holds a value that is not a list. */ getItemsCount: (key: string) => Promise @@ -127,10 +117,9 @@ export interface IPluggableStorageWrapper { /** * Returns if item is a member of a set. * - * @function itemContains - * @param {string} key Set key - * @param {string} item Item value - * @returns {Promise} A promise that resolves with true boolean value if `item` is a member of the set stored at `key`, + * @param key - Set key + * @param item - Item value + * @returns A promise that resolves with true boolean value if `item` is a member of the set stored at `key`, * or false if it is not a member or `key` set does not exist. The promise rejects if the operation fails, for example, * if there is a connection error or the key holds a value that is not a set. */ @@ -139,29 +128,26 @@ export interface IPluggableStorageWrapper { * Add the specified `items` to the set stored at `key`. Those items that are already part of the set are ignored. * If key does not exist, an empty set is created before adding the items. * - * @function addItems - * @param {string} key Set key - * @param {string} items Items to add - * @returns {Promise} A promise that resolves if the operation success. + * @param key - Set key + * @param items - Items to add + * @returns A promise that resolves if the operation success. * The promise rejects if the operation fails, for example, if there is a connection error or the key holds a value that is not a set. */ addItems: (key: string, items: string[]) => Promise /** * Remove the specified `items` from the set stored at `key`. Those items that are not part of the set are ignored. * - * @function removeItems - * @param {string} key Set key - * @param {string} items Items to remove - * @returns {Promise} A promise that resolves if the operation success. If key does not exist, the promise also resolves. + * @param key - Set key + * @param items - Items to remove + * @returns A promise that resolves if the operation success. If key does not exist, the promise also resolves. * The promise rejects if the operation fails, for example, if there is a connection error or the key holds a value that is not a set. */ removeItems: (key: string, items: string[]) => Promise /** * Returns all the items of the `key` set. * - * @function getItems - * @param {string} key Set key - * @returns {Promise} A promise that resolves with the list of items. If key does not exist, the result is an empty list. + * @param key - Set key + * @returns A promise that resolves with the list of items. If key does not exist, the result is an empty list. * The promise rejects if the operation fails, for example, if there is a connection error or the key holds a value that is not a set. */ getItems: (key: string) => Promise @@ -173,8 +159,7 @@ export interface IPluggableStorageWrapper { * It is meant for storages that requires to be connected to some database or server. Otherwise it can just return a resolved promise. * Note: will be called once on SplitFactory instantiation and once per each shared client instantiation. * - * @function connect - * @returns {Promise} A promise that resolves when the wrapper successfully connect to the underlying storage. + * @returns A promise that resolves when the wrapper successfully connect to the underlying storage. * The promise rejects with the corresponding error if the wrapper fails to connect. */ connect: () => Promise @@ -183,8 +168,7 @@ export interface IPluggableStorageWrapper { * It is meant for storages that requires to be closed, in order to release resources. Otherwise it can just return a resolved promise. * Note: will be called once on SplitFactory main client destroy. * - * @function disconnect - * @returns {Promise} A promise that resolves when the operation ends. + * @returns A promise that resolves when the operation ends. * The promise never rejects. */ disconnect: () => Promise @@ -204,13 +188,13 @@ export interface ISplitsCacheBase { getSplitNames(): MaybeThenable, // should never reject or throw an exception. Instead return true by default, asssuming the TT might exist. trafficTypeExists(trafficType: string): MaybeThenable, - // only for Client-Side + // only for Client-Side. Returns true if the storage is not synchronized yet (getChangeNumber() === -1) or contains a FF using segments or large segments usesSegments(): MaybeThenable, clear(): MaybeThenable, // should never reject or throw an exception. Instead return false by default, to avoid emitting SDK_READY_FROM_CACHE. checkCache(): MaybeThenable, killLocally(name: string, defaultTreatment: string, changeNumber: number): MaybeThenable, - getNamesByFlagSets(flagSets: string[]): MaybeThenable[]> + getNamesByFlagSets(flagSets: string[]): MaybeThenable[]> } export interface ISplitsCacheSync extends ISplitsCacheBase { @@ -218,7 +202,7 @@ export interface ISplitsCacheSync extends ISplitsCacheBase { removeSplits(names: string[]): boolean[], getSplit(name: string): ISplit | null, getSplits(names: string[]): Record, - setChangeNumber(changeNumber: number): boolean, + setChangeNumber(changeNumber: number): boolean | void, getChangeNumber(): number, getAll(): ISplit[], getSplitNames(): string[], @@ -227,7 +211,7 @@ export interface ISplitsCacheSync extends ISplitsCacheBase { clear(): void, checkCache(): boolean, killLocally(name: string, defaultTreatment: string, changeNumber: number): boolean, - getNamesByFlagSets(flagSets: string[]): ISet[] + getNamesByFlagSets(flagSets: string[]): Set[] } export interface ISplitsCacheAsync extends ISplitsCacheBase { @@ -244,44 +228,38 @@ export interface ISplitsCacheAsync extends ISplitsCacheBase { clear(): Promise, checkCache(): Promise, killLocally(name: string, defaultTreatment: string, changeNumber: number): Promise, - getNamesByFlagSets(flagSets: string[]): Promise[]> + getNamesByFlagSets(flagSets: string[]): Promise[]> } /** Segments cache */ export interface ISegmentsCacheBase { - addToSegment(name: string, segmentKeys: string[]): MaybeThenable // different signature on Server and Client-Side - removeFromSegment(name: string, segmentKeys: string[]): MaybeThenable // different signature on Server and Client-Side isInSegment(name: string, key?: string): MaybeThenable // different signature on Server and Client-Side registerSegments(names: string[]): MaybeThenable // only for Server-Side getRegisteredSegments(): MaybeThenable // only for Server-Side - setChangeNumber(name: string, changeNumber: number): MaybeThenable // only for Server-Side getChangeNumber(name: string): MaybeThenable // only for Server-Side + update(name: string, addedKeys: string[], removedKeys: string[], changeNumber: number): MaybeThenable // only for Server-Side clear(): MaybeThenable } // Same API for both variants: SegmentsCache and MySegmentsCache (client-side API) export interface ISegmentsCacheSync extends ISegmentsCacheBase { - addToSegment(name: string, segmentKeys?: string[]): boolean - removeFromSegment(name: string, segmentKeys?: string[]): boolean isInSegment(name: string, key?: string): boolean registerSegments(names: string[]): boolean getRegisteredSegments(): string[] getKeysCount(): number // only used for telemetry - setChangeNumber(name: string, changeNumber: number): boolean - getChangeNumber(name: string): number - resetSegments(names: string[]): boolean // only for Sync Client-Side + getChangeNumber(name?: string): number + update(name: string, addedKeys: string[], removedKeys: string[], changeNumber: number): boolean // only for Server-Side + resetSegments(segmentsData: MySegmentsData | IMySegmentsResponse): boolean // only for Sync Client-Side clear(): void } export interface ISegmentsCacheAsync extends ISegmentsCacheBase { - addToSegment(name: string, segmentKeys: string[]): Promise - removeFromSegment(name: string, segmentKeys: string[]): Promise isInSegment(name: string, key: string): Promise registerSegments(names: string[]): Promise getRegisteredSegments(): Promise - setChangeNumber(name: string, changeNumber: number): Promise getChangeNumber(name: string): Promise + update(name: string, addedKeys: string[], removedKeys: string[], changeNumber: number): Promise clear(): Promise } @@ -289,7 +267,7 @@ export interface ISegmentsCacheAsync extends ISegmentsCacheBase { export interface IImpressionsCacheBase { // Used by impressions tracker, in DEBUG and OPTIMIZED impression modes, to push impressions into the storage. - track(data: ImpressionDTO[]): MaybeThenable + track(data: SplitIO.ImpressionDTO[]): MaybeThenable } export interface IEventsCacheBase { @@ -320,8 +298,8 @@ export interface IRecorderCacheSync { pop(toMerge?: T): T } -export interface IImpressionsCacheSync extends IImpressionsCacheBase, IRecorderCacheSync { - track(data: ImpressionDTO[]): void +export interface IImpressionsCacheSync extends IImpressionsCacheBase, IRecorderCacheSync { + track(data: SplitIO.ImpressionDTO[]): void /* Registers callback for full queue */ setOnFullQueueCb(cb: () => void): void } @@ -354,7 +332,7 @@ export interface IRecorderCacheAsync { export interface IImpressionsCacheAsync extends IImpressionsCacheBase, IRecorderCacheAsync { // Consumer API method, used by impressions tracker (in standalone and consumer modes) to push data into. // The result promise can reject. - track(data: ImpressionDTO[]): Promise + track(data: SplitIO.ImpressionDTO[]): Promise } export interface IEventsCacheAsync extends IEventsCacheBase, IRecorderCacheAsync { @@ -477,7 +455,10 @@ export interface IStorageSync extends IStorageBase< IEventsCacheSync, ITelemetryCacheSync, IUniqueKeysCacheSync -> { } +> { + // Defined in client-side + largeSegments?: ISegmentsCacheSync, +} export interface IStorageAsync extends IStorageBase< ISplitsCacheAsync, @@ -502,14 +483,13 @@ export interface IStorageFactoryParams { onReadyCb: (error?: any) => void, } -export type StorageType = 'MEMORY' | 'LOCALSTORAGE' | 'REDIS' | 'PLUGGABLE'; -export type IStorageSyncFactory = { - readonly type: StorageType, +export type IStorageSyncFactory = SplitIO.StorageSyncFactory & { + readonly type: SplitIO.StorageType, (params: IStorageFactoryParams): IStorageSync } -export type IStorageAsyncFactory = { - type: StorageType, +export type IStorageAsyncFactory = SplitIO.StorageAsyncFactory & { + readonly type: SplitIO.StorageType, (params: IStorageFactoryParams): IStorageAsync } diff --git a/src/storages/utils.ts b/src/storages/utils.ts index 2bf236e3..2963bbc5 100644 --- a/src/storages/utils.ts +++ b/src/storages/utils.ts @@ -2,7 +2,8 @@ import { IMetadata } from '../dtos/types'; import { Method, StoredImpressionWithMetadata } from '../sync/submitters/types'; -import { ImpressionDTO, ISettings } from '../types'; +import { ISettings } from '../types'; +import SplitIO from '../../types/splitio'; import { UNKNOWN } from '../utils/constants'; import { MAX_LATENCY_BUCKET_COUNT } from './inMemory/TelemetryCacheInMemory'; import { METHOD_NAMES } from './KeyBuilderSS'; @@ -16,7 +17,7 @@ export function metadataBuilder(settings: Pick } // Converts impressions to be stored in Redis or pluggable storage. -export function impressionsToJSON(impressions: ImpressionDTO[], metadata: IMetadata): string[] { +export function impressionsToJSON(impressions: SplitIO.ImpressionDTO[], metadata: IMetadata): string[] { return impressions.map(impression => { const impressionWithMetadata: StoredImpressionWithMetadata = { m: metadata, diff --git a/src/sync/__tests__/syncManagerOnline.spec.ts b/src/sync/__tests__/syncManagerOnline.spec.ts index 44b9c8b3..7fda853b 100644 --- a/src/sync/__tests__/syncManagerOnline.spec.ts +++ b/src/sync/__tests__/syncManagerOnline.spec.ts @@ -29,7 +29,7 @@ const pollingManagerMock = { start: jest.fn(), stop: jest.fn(), isRunning: jest.fn(), - add: jest.fn(()=>{return {isrunning: () => true};}), + add: jest.fn(() => { return { isRunning: () => true }; }), get: jest.fn() }; @@ -125,12 +125,9 @@ test('syncManagerOnline should syncAll a single time when sync is disabled', () if (!pollingSyncManagerShared) throw new Error('pollingSyncManagerShared should exist'); - pollingSyncManagerShared.start(); - expect(pollingManagerMock.start).not.toBeCalled(); pollingSyncManagerShared.stop(); - pollingSyncManagerShared.start(); expect(pollingManagerMock.start).not.toBeCalled(); @@ -153,12 +150,9 @@ test('syncManagerOnline should syncAll a single time when sync is disabled', () if (!pushingSyncManagerShared) throw new Error('pushingSyncManagerShared should exist'); - pushingSyncManagerShared.start(); - expect(pollingManagerMock.start).not.toBeCalled(); pushingSyncManagerShared.stop(); - pushingSyncManagerShared.start(); expect(pollingManagerMock.start).not.toBeCalled(); diff --git a/src/sync/__tests__/syncTask.spec.ts b/src/sync/__tests__/syncTask.spec.ts index f2516e8a..5423d387 100644 --- a/src/sync/__tests__/syncTask.spec.ts +++ b/src/sync/__tests__/syncTask.spec.ts @@ -24,7 +24,7 @@ test('syncTaskFactory / start & stop methods for periodic execution', async () = // Calling `start` again has not effect expect(syncTask.start(...startArgs)).toBe(undefined); - // Calling `execute` inmediatelly executes the given task and returns its result + // Calling `execute` immediately executes the given task and returns its result result = await syncTask.execute(3, 4); expect(result).toBe(taskResult); expect(asyncTask).toHaveBeenLastCalledWith(3, 4); diff --git a/src/sync/offline/LocalhostFromObject.ts b/src/sync/offline/LocalhostFromObject.ts index b823e7ee..bd3d2209 100644 --- a/src/sync/offline/LocalhostFromObject.ts +++ b/src/sync/offline/LocalhostFromObject.ts @@ -1,12 +1,6 @@ import { splitsParserFromSettingsFactory } from './splitsParser/splitsParserFromSettings'; import { syncManagerOfflineFactory } from './syncManagerOffline'; -import { SplitIO } from '../../types'; -// Singleton instance of the factory function for offline SyncManager from object (a.k.a. localhostFromObject) +// Singleton instance of the factory function for offline SyncManager from object // SDK instances instantiate their SyncManagers with the same factory -const localhostFromObject = syncManagerOfflineFactory(splitsParserFromSettingsFactory) as SplitIO.LocalhostFactory; -localhostFromObject.type = 'LocalhostFromObject'; - -export function LocalhostFromObject(): SplitIO.LocalhostFactory { - return localhostFromObject; -} +export const localhostFromObjectFactory = syncManagerOfflineFactory(splitsParserFromSettingsFactory); diff --git a/src/sync/offline/splitsParser/__tests__/splitsParserFromSettings.spec.ts b/src/sync/offline/splitsParser/__tests__/splitsParserFromSettings.spec.ts index 08e42996..20e1a17f 100644 --- a/src/sync/offline/splitsParser/__tests__/splitsParserFromSettings.spec.ts +++ b/src/sync/offline/splitsParser/__tests__/splitsParserFromSettings.spec.ts @@ -1,4 +1,4 @@ -import { SplitIO } from '../../../../types'; +import SplitIO from '../../../../../types/splitio'; import { splitsParserFromSettingsFactory } from '../splitsParserFromSettings'; const FEATURE_ON = { conditions: [{ conditionType: 'ROLLOUT', label: 'default rule', matcherGroup: { combiner: 'AND', matchers: [{ keySelector: null, matcherType: 'ALL_KEYS', negate: false }] }, partitions: [{ size: 100, treatment: 'on' }] }], configurations: {}, trafficTypeName: 'localhost' }; diff --git a/src/sync/offline/splitsParser/splitsParserFromSettings.ts b/src/sync/offline/splitsParser/splitsParserFromSettings.ts index e94d3b07..f242b26c 100644 --- a/src/sync/offline/splitsParser/splitsParserFromSettings.ts +++ b/src/sync/offline/splitsParser/splitsParserFromSettings.ts @@ -1,5 +1,5 @@ import { ISplitPartial } from '../../../dtos/types'; -import { ISettings, SplitIO } from '../../../types'; +import SplitIO from '../../../../types/splitio'; import { isObject, forOwn, merge } from '../../../utils/lang'; import { parseCondition } from './parseCondition'; @@ -39,9 +39,9 @@ export function splitsParserFromSettingsFactory() { /** * - * @param settings validated object with mocked features mapping. + * @param settings - validated object with mocked features mapping. */ - return function splitsParserFromSettings(settings: Pick): false | Record { + return function splitsParserFromSettings(settings: Pick): false | Record { const features = settings.features as SplitIO.MockedFeaturesMap || {}; if (!mockUpdated(features)) return false; diff --git a/src/sync/offline/syncManagerOffline.ts b/src/sync/offline/syncManagerOffline.ts index cd5c435d..31ac6dd0 100644 --- a/src/sync/offline/syncManagerOffline.ts +++ b/src/sync/offline/syncManagerOffline.ts @@ -1,4 +1,4 @@ -import { ISyncManager, ISyncManagerCS } from '../types'; +import { ISyncManagerCS } from '../types'; import { fromObjectSyncTaskFactory } from './syncTasks/fromObjectSyncTask'; import { objectAssign } from '../../utils/lang/objectAssign'; import { ISplitsParser } from './splitsParser/types'; @@ -14,7 +14,7 @@ function flush() { * Offline SyncManager factory. * Can be used for server-side API, and client-side API with or without multiple clients. * - * @param splitsParser e.g., `splitsParserFromFile`, `splitsParserFromSettings`. + * @param splitsParser - e.g., `splitsParserFromFile`, `splitsParserFromSettings`. */ export function syncManagerOfflineFactory( splitsParserFactory: () => ISplitsParser @@ -29,26 +29,34 @@ export function syncManagerOfflineFactory( storage, }: ISdkFactoryContextSync): ISyncManagerCS { + const mainSyncManager = fromObjectSyncTaskFactory(splitsParserFactory(), storage, readiness, settings); + const mainStart = mainSyncManager.start; + const sharedStarts: Array<() => void> = []; + return objectAssign( - fromObjectSyncTaskFactory(splitsParserFactory(), storage, readiness, settings), + mainSyncManager, { + start() { + mainStart(); + sharedStarts.forEach(cb => cb()); + sharedStarts.length = 0; + }, // fake flush, that resolves immediately flush, // [Only used for client-side] - shared(matchingKey: string, readinessManager: IReadinessManager): ISyncManager { + shared(matchingKey: string, readinessManager: IReadinessManager) { + // In LOCALHOST mode, shared clients are ready in the next event-loop cycle than created + // SDK_READY cannot be emitted directly because this will not update the readiness status + function emitSdkReady() { + readinessManager.segments.emit(SDK_SEGMENTS_ARRIVED); // SDK_SPLITS_ARRIVED emitted by main SyncManager + } + + if (mainSyncManager.isRunning()) setTimeout(emitSdkReady); + else sharedStarts.push(emitSdkReady); + return { - start() { - // In LOCALHOST mode, shared clients are ready in the next event-loop cycle than created - // SDK_READY cannot be emitted directly because this will not update the readiness status - setTimeout(() => { - readinessManager.segments.emit(SDK_SEGMENTS_ARRIVED); // SDK_SPLITS_ARRIVED emitted by main SyncManager - }, 0); - }, stop() { }, - isRunning() { - return true; - }, flush, }; } diff --git a/src/sync/polling/fetchers/mySegmentsFetcher.ts b/src/sync/polling/fetchers/mySegmentsFetcher.ts index 498132b0..8773a7aa 100644 --- a/src/sync/polling/fetchers/mySegmentsFetcher.ts +++ b/src/sync/polling/fetchers/mySegmentsFetcher.ts @@ -1,27 +1,25 @@ -import { IFetchMySegments, IResponse } from '../../../services/types'; -import { IMySegmentsResponseItem } from '../../../dtos/types'; +import { IFetchMemberships, IResponse } from '../../../services/types'; +import { IMembershipsResponse } from '../../../dtos/types'; import { IMySegmentsFetcher } from './types'; /** * Factory of MySegments fetcher. * MySegments fetcher is a wrapper around `mySegments` API service that parses the response and handle errors. */ -export function mySegmentsFetcherFactory(fetchMySegments: IFetchMySegments): IMySegmentsFetcher { +export function mySegmentsFetcherFactory(fetchMemberships: IFetchMemberships): IMySegmentsFetcher { return function mySegmentsFetcher( userMatchingKey: string, noCache?: boolean, - // Optional decorator for `fetchMySegments` promise, such as timeout or time tracker + till?: number, + // Optional decorator for `fetchMemberships` promise, such as timeout or time tracker decorator?: (promise: Promise) => Promise - ): Promise { + ): Promise { - let mySegmentsPromise = fetchMySegments(userMatchingKey, noCache); + let mySegmentsPromise = fetchMemberships(userMatchingKey, noCache, till); if (decorator) mySegmentsPromise = decorator(mySegmentsPromise); - // Extract segment names - return mySegmentsPromise - .then(resp => resp.json()) - .then(json => json.mySegments.map((segment: IMySegmentsResponseItem) => segment.name)); + return mySegmentsPromise.then(resp => resp.json()); }; } diff --git a/src/sync/polling/fetchers/segmentChangesFetcher.ts b/src/sync/polling/fetchers/segmentChangesFetcher.ts index 01a42b38..2eb1cdbf 100644 --- a/src/sync/polling/fetchers/segmentChangesFetcher.ts +++ b/src/sync/polling/fetchers/segmentChangesFetcher.ts @@ -28,7 +28,7 @@ export function segmentChangesFetcherFactory(fetchSegmentChanges: IFetchSegmentC segmentName: string, noCache?: boolean, till?: number, - // Optional decorator for `fetchMySegments` promise, such as timeout or time tracker + // Optional decorator for `fetchSegmentChanges` promise, such as timeout or time tracker decorator?: (promise: Promise) => Promise ): Promise { diff --git a/src/sync/polling/fetchers/types.ts b/src/sync/polling/fetchers/types.ts index 19ccd7bb..72968a5f 100644 --- a/src/sync/polling/fetchers/types.ts +++ b/src/sync/polling/fetchers/types.ts @@ -1,4 +1,4 @@ -import { ISplitChangesResponse, ISegmentChangesResponse } from '../../../dtos/types'; +import { ISplitChangesResponse, ISegmentChangesResponse, IMembershipsResponse } from '../../../dtos/types'; import { IResponse } from '../../../services/types'; export type ISplitChangesFetcher = ( @@ -19,5 +19,6 @@ export type ISegmentChangesFetcher = ( export type IMySegmentsFetcher = ( userMatchingKey: string, noCache?: boolean, + till?: number, decorator?: (promise: Promise) => Promise -) => Promise +) => Promise diff --git a/src/sync/polling/pollingManagerCS.ts b/src/sync/polling/pollingManagerCS.ts index ac3253f2..4ce0882a 100644 --- a/src/sync/polling/pollingManagerCS.ts +++ b/src/sync/polling/pollingManagerCS.ts @@ -29,13 +29,13 @@ export function pollingManagerCSFactory( const mySegmentsSyncTask = add(matchingKey, readiness, storage); function startMySegmentsSyncTasks() { - forOwn(mySegmentsSyncTasks, function (mySegmentsSyncTask) { + forOwn(mySegmentsSyncTasks, (mySegmentsSyncTask) => { mySegmentsSyncTask.start(); }); } function stopMySegmentsSyncTasks() { - forOwn(mySegmentsSyncTasks, function (mySegmentsSyncTask) { + forOwn(mySegmentsSyncTasks, (mySegmentsSyncTask) => { if (mySegmentsSyncTask.isRunning()) mySegmentsSyncTask.stop(); }); } @@ -55,7 +55,7 @@ export function pollingManagerCSFactory( }); function add(matchingKey: string, readiness: IReadinessManager, storage: IStorageSync) { - const mySegmentsSyncTask = mySegmentsSyncTaskFactory(splitApi.fetchMySegments, storage, readiness, settings, matchingKey); + const mySegmentsSyncTask = mySegmentsSyncTaskFactory(splitApi.fetchMemberships, storage, readiness, settings, matchingKey); // smart ready function smartReady() { @@ -94,7 +94,7 @@ export function pollingManagerCSFactory( // fetch splits and segments syncAll() { const promises = [splitsSyncTask.execute()]; - forOwn(mySegmentsSyncTasks, function (mySegmentsSyncTask) { + forOwn(mySegmentsSyncTasks, (mySegmentsSyncTask) => { promises.push(mySegmentsSyncTask.execute()); }); return Promise.all(promises); diff --git a/src/sync/polling/pollingManagerSS.ts b/src/sync/polling/pollingManagerSS.ts index 90f252a4..cea57dfe 100644 --- a/src/sync/polling/pollingManagerSS.ts +++ b/src/sync/polling/pollingManagerSS.ts @@ -1,7 +1,6 @@ import { splitsSyncTaskFactory } from './syncTasks/splitsSyncTask'; import { segmentsSyncTaskFactory } from './syncTasks/segmentsSyncTask'; import { IPollingManager, ISegmentsSyncTask, ISplitsSyncTask } from './types'; -import { thenable } from '../../utils/promise/thenable'; import { POLLING_START, POLLING_STOP, LOG_PREFIX_SYNC_POLLING } from '../../logger/constants'; import { ISdkFactoryContextSync } from '../../sdkFactory/types'; @@ -29,9 +28,9 @@ export function pollingManagerSSFactory( log.debug(LOG_PREFIX_SYNC_POLLING + `Segments will be refreshed each ${settings.scheduler.segmentsRefreshRate} millis`); const startingUp = splitsSyncTask.start(); - if (thenable(startingUp)) { + if (startingUp) { startingUp.then(() => { - segmentsSyncTask.start(); + if (splitsSyncTask.isRunning()) segmentsSyncTask.start(); }); } }, diff --git a/src/sync/polling/syncTasks/mySegmentsSyncTask.ts b/src/sync/polling/syncTasks/mySegmentsSyncTask.ts index f8ebade2..fe006c68 100644 --- a/src/sync/polling/syncTasks/mySegmentsSyncTask.ts +++ b/src/sync/polling/syncTasks/mySegmentsSyncTask.ts @@ -2,7 +2,7 @@ import { IStorageSync } from '../../../storages/types'; import { IReadinessManager } from '../../../readiness/types'; import { syncTaskFactory } from '../../syncTask'; import { IMySegmentsSyncTask } from '../types'; -import { IFetchMySegments } from '../../../services/types'; +import { IFetchMemberships } from '../../../services/types'; import { mySegmentsFetcherFactory } from '../fetchers/mySegmentsFetcher'; import { ISettings } from '../../../types'; import { mySegmentsUpdaterFactory } from '../updaters/mySegmentsUpdater'; @@ -11,7 +11,7 @@ import { mySegmentsUpdaterFactory } from '../updaters/mySegmentsUpdater'; * Creates a sync task that periodically executes a `mySegmentsUpdater` task */ export function mySegmentsSyncTaskFactory( - fetchMySegments: IFetchMySegments, + fetchMemberships: IFetchMemberships, storage: IStorageSync, readiness: IReadinessManager, settings: ISettings, @@ -21,9 +21,8 @@ export function mySegmentsSyncTaskFactory( settings.log, mySegmentsUpdaterFactory( settings.log, - mySegmentsFetcherFactory(fetchMySegments), - storage.splits, - storage.segments, + mySegmentsFetcherFactory(fetchMemberships), + storage, readiness.segments, settings.startup.requestTimeoutBeforeReady, settings.startup.retriesOnFailureBeforeReady, diff --git a/src/sync/polling/types.ts b/src/sync/polling/types.ts index 4653b568..c542fec9 100644 --- a/src/sync/polling/types.ts +++ b/src/sync/polling/types.ts @@ -1,20 +1,21 @@ import { ISplit } from '../../dtos/types'; import { IReadinessManager } from '../../readiness/types'; import { IStorageSync } from '../../storages/types'; +import { MEMBERSHIPS_LS_UPDATE, MEMBERSHIPS_MS_UPDATE } from '../streaming/types'; import { ITask, ISyncTask } from '../types'; export interface ISplitsSyncTask extends ISyncTask<[noCache?: boolean, till?: number, splitUpdateNotification?: { payload: ISplit, changeNumber: number }], boolean> { } export interface ISegmentsSyncTask extends ISyncTask<[fetchOnlyNew?: boolean, segmentName?: string, noCache?: boolean, till?: number], boolean> { } -export type MySegmentsData = string[] | { - /* segment name */ - name: string, - /* action: `true` for add, and `false` for delete */ - add: boolean +export type MySegmentsData = { + type: MEMBERSHIPS_MS_UPDATE | MEMBERSHIPS_LS_UPDATE + cn: number + added: string[] + removed: string[] } -export interface IMySegmentsSyncTask extends ISyncTask<[segmentsData?: MySegmentsData, noCache?: boolean], boolean> { } +export interface IMySegmentsSyncTask extends ISyncTask<[segmentsData?: MySegmentsData, noCache?: boolean, till?: number], boolean> { } export interface IPollingManager extends ITask { syncAll(): Promise diff --git a/src/sync/polling/updaters/__tests__/splitChangesUpdater.spec.ts b/src/sync/polling/updaters/__tests__/splitChangesUpdater.spec.ts index faf31e44..b4dca3fe 100644 --- a/src/sync/polling/updaters/__tests__/splitChangesUpdater.spec.ts +++ b/src/sync/polling/updaters/__tests__/splitChangesUpdater.spec.ts @@ -7,7 +7,7 @@ import { splitChangesFetcherFactory } from '../../fetchers/splitChangesFetcher'; import { splitChangesUpdaterFactory, parseSegments, computeSplitsMutation } from '../splitChangesUpdater'; import splitChangesMock1 from '../../../../__tests__/mocks/splitchanges.since.-1.json'; import fetchMock from '../../../../__tests__/testUtils/fetchMock'; -import { settingsSplitApi } from '../../../../utils/settingsValidation/__tests__/settings.mocks'; +import { fullSettings, settingsSplitApi } from '../../../../utils/settingsValidation/__tests__/settings.mocks'; import { EventEmitter } from '../../../../utils/MinEvents'; import { loggerMock } from '../../../../logger/__tests__/sdkLogger.mock'; import { telemetryTrackerFactory } from '../../../../trackers/telemetryTracker'; @@ -165,7 +165,7 @@ describe('splitChangesUpdater', () => { const segmentsCache = new SegmentsCacheInMemory(); const registerSegments = jest.spyOn(segmentsCache, 'registerSegments'); - const readinessManager = readinessManagerFactory(EventEmitter); + const readinessManager = readinessManagerFactory(EventEmitter, fullSettings); const splitsEmitSpy = jest.spyOn(readinessManager.splits, 'emit'); let splitFiltersValidation = { queryString: null, groupedFilters: { bySet: [], byName: [], byPrefix: [] }, validFilters: [] }; diff --git a/src/sync/polling/updaters/mySegmentsUpdater.ts b/src/sync/polling/updaters/mySegmentsUpdater.ts index 421e0c3f..32d9f78e 100644 --- a/src/sync/polling/updaters/mySegmentsUpdater.ts +++ b/src/sync/polling/updaters/mySegmentsUpdater.ts @@ -1,13 +1,15 @@ import { IMySegmentsFetcher } from '../fetchers/types'; -import { ISegmentsCacheSync, ISplitsCacheSync } from '../../../storages/types'; +import { IStorageSync } from '../../../storages/types'; import { ISegmentsEventEmitter } from '../../../readiness/types'; import { timeout } from '../../../utils/promise/timeout'; import { SDK_SEGMENTS_ARRIVED } from '../../../readiness/constants'; import { ILogger } from '../../../logger/types'; import { SYNC_MYSEGMENTS_FETCH_RETRY } from '../../../logger/constants'; import { MySegmentsData } from '../types'; +import { IMembershipsResponse } from '../../../dtos/types'; +import { MEMBERSHIPS_LS_UPDATE } from '../../streaming/constants'; -type IMySegmentsUpdater = (segmentList?: string[], noCache?: boolean) => Promise +type IMySegmentsUpdater = (segmentsData?: MySegmentsData, noCache?: boolean, till?: number) => Promise /** * factory of MySegments updater, a task that: @@ -18,14 +20,14 @@ type IMySegmentsUpdater = (segmentList?: string[], noCache?: boolean) => Promise export function mySegmentsUpdaterFactory( log: ILogger, mySegmentsFetcher: IMySegmentsFetcher, - splitsCache: ISplitsCacheSync, - mySegmentsCache: ISegmentsCacheSync, + storage: IStorageSync, segmentsEventEmitter: ISegmentsEventEmitter, requestTimeoutBeforeReady: number, retriesOnFailureBeforeReady: number, matchingKey: string ): IMySegmentsUpdater { + const { splits, segments, largeSegments } = storage; let readyOnAlreadyExistentState = true; let startingUp = true; @@ -36,37 +38,31 @@ export function mySegmentsUpdaterFactory( } // @TODO if allowing pluggable storages, handle async execution - function updateSegments(segmentsData: MySegmentsData) { + function updateSegments(segmentsData: IMembershipsResponse | MySegmentsData) { let shouldNotifyUpdate; - if (Array.isArray(segmentsData)) { - // Update the list of segment names available - shouldNotifyUpdate = mySegmentsCache.resetSegments(segmentsData); + if ((segmentsData as MySegmentsData).type !== undefined) { + shouldNotifyUpdate = (segmentsData as MySegmentsData).type === MEMBERSHIPS_LS_UPDATE ? + largeSegments!.resetSegments(segmentsData as MySegmentsData) : + segments.resetSegments(segmentsData as MySegmentsData); } else { - // Add/Delete the segment - const { name, add } = segmentsData; - if (mySegmentsCache.isInSegment(name) !== add) { - shouldNotifyUpdate = true; - if (add) mySegmentsCache.addToSegment(name); - else mySegmentsCache.removeFromSegment(name); - } else { - shouldNotifyUpdate = false; - } + shouldNotifyUpdate = segments.resetSegments((segmentsData as IMembershipsResponse).ms || {}); + shouldNotifyUpdate = largeSegments!.resetSegments((segmentsData as IMembershipsResponse).ls || {}) || shouldNotifyUpdate; } // Notify update if required - if (splitsCache.usesSegments() && (shouldNotifyUpdate || readyOnAlreadyExistentState)) { + if (splits.usesSegments() && (shouldNotifyUpdate || readyOnAlreadyExistentState)) { readyOnAlreadyExistentState = false; segmentsEventEmitter.emit(SDK_SEGMENTS_ARRIVED); } } - function _mySegmentsUpdater(retry: number, segmentsData?: MySegmentsData, noCache?: boolean): Promise { + function _mySegmentsUpdater(retry: number, segmentsData?: MySegmentsData, noCache?: boolean, till?: number): Promise { const updaterPromise: Promise = segmentsData ? // If segmentsData is provided, there is no need to fetch mySegments new Promise((res) => { updateSegments(segmentsData); res(true); }) : // If not provided, fetch mySegments - mySegmentsFetcher(matchingKey, noCache, _promiseDecorator).then(segments => { + mySegmentsFetcher(matchingKey, noCache, till, _promiseDecorator).then(segments => { // Only when we have downloaded segments completely, we should not keep retrying anymore startingUp = false; @@ -91,14 +87,15 @@ export function mySegmentsUpdaterFactory( * MySegments updater returns a promise that resolves with a `false` boolean value if it fails to fetch mySegments or synchronize them with the storage. * Returned promise will not be rejected. * - * @param {SegmentsData | undefined} segmentsData it can be: + * @param segmentsData - it can be: * (1) the list of mySegments names to sync in the storage, * (2) an object with a segment name and action (true: add, or false: delete) to update the storage, * (3) or `undefined`, for which the updater will fetch mySegments in order to sync the storage. - * @param {boolean | undefined} noCache true to revalidate data to fetch + * @param noCache - true to revalidate data to fetch + * @param till - query param to bypass CDN requests */ - return function mySegmentsUpdater(segmentsData?: MySegmentsData, noCache?: boolean) { - return _mySegmentsUpdater(0, segmentsData, noCache); + return function mySegmentsUpdater(segmentsData?: MySegmentsData, noCache?: boolean, till?: number) { + return _mySegmentsUpdater(0, segmentsData, noCache, till); }; } diff --git a/src/sync/polling/updaters/segmentChangesUpdater.ts b/src/sync/polling/updaters/segmentChangesUpdater.ts index 39d147ff..5f9db114 100644 --- a/src/sync/polling/updaters/segmentChangesUpdater.ts +++ b/src/sync/polling/updaters/segmentChangesUpdater.ts @@ -1,12 +1,9 @@ import { ISegmentChangesFetcher } from '../fetchers/types'; import { ISegmentsCacheBase } from '../../../storages/types'; import { IReadinessManager } from '../../../readiness/types'; -import { MaybeThenable } from '../../../dtos/types'; -import { findIndex } from '../../../utils/lang'; import { SDK_SEGMENTS_ARRIVED } from '../../../readiness/constants'; import { ILogger } from '../../../logger/types'; import { LOG_PREFIX_INSTANTIATION, LOG_PREFIX_SYNC_SEGMENTS } from '../../../logger/constants'; -import { thenable } from '../../../utils/promise/thenable'; type ISegmentChangesUpdater = (fetchOnlyNew?: boolean, segmentName?: string, noCache?: boolean, till?: number) => Promise @@ -16,10 +13,10 @@ type ISegmentChangesUpdater = (fetchOnlyNew?: boolean, segmentName?: string, noC * - updates `segmentsCache` * - uses `segmentsEventEmitter` to emit events related to segments data updates * - * @param log logger instance - * @param segmentChangesFetcher fetcher of `/segmentChanges` - * @param segments segments storage, with sync or async methods - * @param readiness optional readiness manager. Not required for synchronizer or producer mode. + * @param log - logger instance + * @param segmentChangesFetcher - fetcher of `/segmentChanges` + * @param segments - segments storage, with sync or async methods + * @param readiness - optional readiness manager. Not required for synchronizer or producer mode. */ export function segmentChangesUpdaterFactory( log: ILogger, @@ -30,31 +27,22 @@ export function segmentChangesUpdaterFactory( let readyOnAlreadyExistentState = true; - function updateSegment(segmentName: string, noCache?: boolean, till?: number, fetchOnlyNew?: boolean) { + function updateSegment(segmentName: string, noCache?: boolean, till?: number, fetchOnlyNew?: boolean): Promise { log.debug(`${LOG_PREFIX_SYNC_SEGMENTS}Processing segment ${segmentName}`); let sincePromise = Promise.resolve(segments.getChangeNumber(segmentName)); return sincePromise.then(since => { // if fetchOnlyNew flag, avoid processing already fetched segments - if (fetchOnlyNew && since !== -1) return -1; - - return segmentChangesFetcher(since, segmentName, noCache, till).then(function (changes) { - let changeNumber = -1; - const results: MaybeThenable[] = []; - changes.forEach(x => { - if (x.added.length > 0) results.push(segments.addToSegment(segmentName, x.added)); - if (x.removed.length > 0) results.push(segments.removeFromSegment(segmentName, x.removed)); - if (x.added.length > 0 || x.removed.length > 0) { - results.push(segments.setChangeNumber(segmentName, x.till)); - changeNumber = x.till; - } - - log.debug(`${LOG_PREFIX_SYNC_SEGMENTS}Processed ${segmentName} with till = ${x.till}. Added: ${x.added.length}. Removed: ${x.removed.length}`); + return fetchOnlyNew && since !== -1 ? + false : + segmentChangesFetcher(since, segmentName, noCache, till).then((changes) => { + return Promise.all(changes.map(x => { + log.debug(`${LOG_PREFIX_SYNC_SEGMENTS}Processing ${segmentName} with till = ${x.till}. Added: ${x.added.length}. Removed: ${x.removed.length}`); + return segments.update(segmentName, x.added, x.removed, x.till); + })).then((updates) => { + return updates.some(update => update); + }); }); - // If at least one storage operation result is a promise, join all in a single promise. - if (results.some(result => thenable(result))) return Promise.all(results).then(() => changeNumber); - return changeNumber; - }); }); } /** @@ -62,11 +50,11 @@ export function segmentChangesUpdaterFactory( * Thus, a false result doesn't imply that SDK_SEGMENTS_ARRIVED was not emitted. * Returned promise will not be rejected. * - * @param {boolean | undefined} fetchOnlyNew if true, only fetch the segments that not exists, i.e., which `changeNumber` is equal to -1. + * @param fetchOnlyNew - if true, only fetch the segments that not exists, i.e., which `changeNumber` is equal to -1. * This param is used by SplitUpdateWorker on server-side SDK, to fetch new registered segments on SPLIT_UPDATE notifications. - * @param {string | undefined} segmentName segment name to fetch. By passing `undefined` it fetches the list of segments registered at the storage - * @param {boolean | undefined} noCache true to revalidate data to fetch on a SEGMENT_UPDATE notifications. - * @param {number | undefined} till till target for the provided segmentName, for CDN bypass. + * @param segmentName - segment name to fetch. By passing `undefined` it fetches the list of segments registered at the storage + * @param noCache - true to revalidate data to fetch on a SEGMENT_UPDATE notifications. + * @param till - till target for the provided segmentName, for CDN bypass. */ return function segmentChangesUpdater(fetchOnlyNew?: boolean, segmentName?: string, noCache?: boolean, till?: number) { log.debug(`${LOG_PREFIX_SYNC_SEGMENTS}Started segments update`); @@ -75,16 +63,12 @@ export function segmentChangesUpdaterFactory( let segmentsPromise = Promise.resolve(segmentName ? [segmentName] : segments.getRegisteredSegments()); return segmentsPromise.then(segmentNames => { - // Async fetchers are collected here. - const updaters: Promise[] = []; - - for (let index = 0; index < segmentNames.length; index++) { - updaters.push(updateSegment(segmentNames[index], noCache, till, fetchOnlyNew)); - } + // Async fetchers + const updaters = segmentNames.map(segmentName => updateSegment(segmentName, noCache, till, fetchOnlyNew)); return Promise.all(updaters).then(shouldUpdateFlags => { // if at least one segment fetch succeeded, mark segments ready - if (findIndex(shouldUpdateFlags, v => v !== -1) !== -1 || readyOnAlreadyExistentState) { + if (shouldUpdateFlags.some(update => update) || readyOnAlreadyExistentState) { readyOnAlreadyExistentState = false; if (readiness) readiness.segments.emit(SDK_SEGMENTS_ARRIVED); } diff --git a/src/sync/polling/updaters/splitChangesUpdater.ts b/src/sync/polling/updaters/splitChangesUpdater.ts index 669a2010..bf8803a2 100644 --- a/src/sync/polling/updaters/splitChangesUpdater.ts +++ b/src/sync/polling/updaters/splitChangesUpdater.ts @@ -1,4 +1,3 @@ -import { _Set, setToArray, ISet } from '../../../utils/lang/sets'; import { ISegmentsCacheBase, ISplitsCacheBase } from '../../../storages/types'; import { ISplitChangesFetcher } from '../fetchers/types'; import { ISplit, ISplitChangesResponse, ISplitFiltersValidation } from '../../../dtos/types'; @@ -9,6 +8,7 @@ import { ILogger } from '../../../logger/types'; import { SYNC_SPLITS_FETCH, SYNC_SPLITS_NEW, SYNC_SPLITS_REMOVED, SYNC_SPLITS_SEGMENTS, SYNC_SPLITS_FETCH_FAILS, SYNC_SPLITS_FETCH_RETRY } from '../../../logger/constants'; import { startsWith } from '../../../utils/lang'; import { IN_SEGMENT } from '../../../utils/constants'; +import { setToArray } from '../../../utils/lang/sets'; type ISplitChangesUpdater = (noCache?: boolean, till?: number, splitUpdateNotification?: { payload: ISplit, changeNumber: number }) => Promise @@ -27,8 +27,8 @@ function checkAllSegmentsExist(segments: ISegmentsCacheBase): Promise { * Collect segments from a raw split definition. * Exported for testing purposes. */ -export function parseSegments({ conditions }: ISplit): ISet { - let segments = new _Set(); +export function parseSegments({ conditions }: ISplit): Set { + let segments = new Set(); for (let i = 0; i < conditions.length; i++) { const matchers = conditions[i].matcherGroup.matchers; @@ -51,8 +51,8 @@ interface ISplitMutations { * If there are defined filters and one feature flag doesn't match with them, its status is changed to 'ARCHIVE' to avoid storing it * If there are set filter defined, names filter is ignored * - * @param featureFlag feature flag to be evaluated - * @param filters splitFiltersValidation bySet | byName + * @param featureFlag - feature flag to be evaluated + * @param filters - splitFiltersValidation bySet | byName */ function matchFilters(featureFlag: ISplit, filters: ISplitFiltersValidation) { const { bySet: setsFilter, byName: namesFilter, byPrefix: prefixFilter } = filters.groupedFilters; @@ -74,7 +74,7 @@ function matchFilters(featureFlag: ISplit, filters: ISplitFiltersValidation) { * Exported for testing purposes. */ export function computeSplitsMutation(entries: ISplit[], filters: ISplitFiltersValidation): ISplitMutations { - const segments = new _Set(); + const segments = new Set(); const computed = entries.reduce((accum, split) => { if (split.status === 'ACTIVE' && matchFilters(split, filters)) { accum.added.push([split.name, split]); @@ -100,13 +100,13 @@ export function computeSplitsMutation(entries: ISplit[], filters: ISplitFiltersV * - updates `splitsCache` * - uses `splitsEventEmitter` to emit events related to split data updates * - * @param log Logger instance - * @param splitChangesFetcher Fetcher of `/splitChanges` - * @param splits Splits storage, with sync or async methods - * @param segments Segments storage, with sync or async methods - * @param splitsEventEmitter Optional readiness manager. Not required for synchronizer or producer mode. - * @param requestTimeoutBeforeReady How long the updater will wait for the request to timeout. Default 0, i.e., never timeout. - * @param retriesOnFailureBeforeReady How many retries on `/splitChanges` we the updater do in case of failure or timeout. Default 0, i.e., no retries. + * @param log - Logger instance + * @param splitChangesFetcher - Fetcher of `/splitChanges` + * @param splits - Splits storage, with sync or async methods + * @param segments - Segments storage, with sync or async methods + * @param splitsEventEmitter - Optional readiness manager. Not required for synchronizer or producer mode. + * @param requestTimeoutBeforeReady - How long the updater will wait for the request to timeout. Default 0, i.e., never timeout. + * @param retriesOnFailureBeforeReady - How many retries on `/splitChanges` we the updater do in case of failure or timeout. Default 0, i.e., no retries. */ export function splitChangesUpdaterFactory( log: ILogger, @@ -142,14 +142,14 @@ export function splitChangesUpdaterFactory( * SplitChanges updater returns a promise that resolves with a `false` boolean value if it fails to fetch splits or synchronize them with the storage. * Returned promise will not be rejected. * - * @param {boolean | undefined} noCache true to revalidate data to fetch - * @param {boolean | undefined} till query param to bypass CDN requests + * @param noCache - true to revalidate data to fetch + * @param till - query param to bypass CDN requests */ return function splitChangesUpdater(noCache?: boolean, till?: number, splitUpdateNotification?: { payload: ISplit, changeNumber: number }) { /** - * @param {number} since current changeNumber at splitsCache - * @param {number} retry current number of retry attempts + * @param since - current changeNumber at splitsCache + * @param retry - current number of retry attempts */ function _splitChangesUpdater(since: number, retry = 0): Promise { log.debug(SYNC_SPLITS_FETCH, [since]); diff --git a/src/sync/streaming/AuthClient/index.ts b/src/sync/streaming/AuthClient/index.ts index b8d81c55..8869674d 100644 --- a/src/sync/streaming/AuthClient/index.ts +++ b/src/sync/streaming/AuthClient/index.ts @@ -8,13 +8,13 @@ import { hash } from '../../../utils/murmur3/murmur3'; /** * Factory of authentication function. * - * @param fetchAuth `SplitAPI.fetchAuth` endpoint + * @param fetchAuth - `SplitAPI.fetchAuth` endpoint */ export function authenticateFactory(fetchAuth: IFetchAuth): IAuthenticate { /** * Run authentication requests to Auth Server, and returns a promise that resolves with the decoded JTW token. - * @param {string[] | undefined} userKeys set of user Keys to track MY_SEGMENTS_CHANGES. It is undefined for server-side API. + * @param userKeys - set of user Keys to track membership updates. It is undefined for server-side API. */ return function authenticate(userKeys?: string[]): Promise { return fetchAuth(userKeys) diff --git a/src/sync/streaming/SSEClient/__tests__/index.spec.ts b/src/sync/streaming/SSEClient/__tests__/index.spec.ts index 7564c645..675e8c5a 100644 --- a/src/sync/streaming/SSEClient/__tests__/index.spec.ts +++ b/src/sync/streaming/SSEClient/__tests__/index.spec.ts @@ -19,18 +19,18 @@ const EXPECTED_URL = url(settings, '/sse') + const EXPECTED_BROWSER_URL = EXPECTED_URL + `&SplitSDKVersion=${settings.version}&SplitSDKClientKey=${EXPECTED_HEADERS.SplitSDKClientKey}`; -test('SSClient / instance creation throws error if EventSource is not provided', () => { +test('SSEClient / instance creation throws error if EventSource is not provided', () => { expect(() => { new SSEClient(settings); }).toThrow(Error); expect(() => { new SSEClient(settings, {}); }).toThrow(Error); expect(() => { new SSEClient(settings, { getEventSource: () => undefined }); }).toThrow(Error); }); -test('SSClient / instance creation success if EventSource is provided', () => { +test('SSEClient / instance creation success if EventSource is provided', () => { const instance = new SSEClient(settings, { getEventSource: () => EventSourceMock }); expect(instance.eventSource).toBe(EventSourceMock); }); -test('SSClient / setEventHandler, open and close methods', () => { +test('SSEClient / setEventHandler, open and close methods', () => { // instance event handler const handler = { handleOpen: jest.fn(), @@ -83,7 +83,7 @@ test('SSClient / setEventHandler, open and close methods', () => { }); -describe('SSClient / open method on client-side', () => { +describe('SSEClient / open method on client-side', () => { test('metadata as query params', () => { @@ -123,7 +123,7 @@ describe('SSClient / open method on client-side', () => { }); -describe('SSClient / open method on server-side', () => { +describe('SSEClient / open method on server-side', () => { test('metadata as headers', () => { diff --git a/src/sync/streaming/SSEClient/index.ts b/src/sync/streaming/SSEClient/index.ts index da5d8648..c19c2817 100644 --- a/src/sync/streaming/SSEClient/index.ts +++ b/src/sync/streaming/SSEClient/index.ts @@ -14,8 +14,8 @@ const CONTROL_CHANNEL_REGEX = /^control_/; /** * Build metadata headers for SSE connection. * - * @param {ISettings} settings Validated settings. - * @returns {Record} Headers object + * @param settings - Validated settings. + * @returns Headers object */ function buildSSEHeaders(settings: ISettings) { const headers: Record = { @@ -45,8 +45,8 @@ export class SSEClient implements ISSEClient { /** * SSEClient constructor. * - * @param settings Validated settings. - * @param platform object containing environment-specific dependencies + * @param settings - Validated settings. + * @param platform - object containing environment-specific dependencies * @throws 'EventSource API is not available.' if EventSource is not available. */ constructor(private settings: ISettings, { getEventSource, getOptions }: IPlatform) { @@ -64,19 +64,14 @@ export class SSEClient implements ISSEClient { /** * Open the connection with a given authToken - * - * @param {IAuthTokenPushEnabled} authToken - * @throws {TypeError} Will throw an error if `authToken` is undefined */ open(authToken: IAuthTokenPushEnabled) { this.close(); // it closes connection if previously opened - const channelsQueryParam = Object.keys(authToken.channels).map( - function (channel) { - const params = CONTROL_CHANNEL_REGEX.test(channel) ? '[?occupancy=metrics.publishers]' : ''; - return encodeURIComponent(params + channel); - } - ).join(','); + const channelsQueryParam = Object.keys(authToken.channels).map((channel) => { + const params = CONTROL_CHANNEL_REGEX.test(channel) ? '[?occupancy=metrics.publishers]' : ''; + return encodeURIComponent(params + channel); + }).join(','); const url = `${this.settings.urls.streaming}/sse?channels=${channelsQueryParam}&accessToken=${authToken.token}&v=${ABLY_API_VERSION}&heartbeats=true`; // same results using `&heartbeats=false` const isServerSide = !this.settings.core.key; diff --git a/src/sync/streaming/SSEHandler/NotificationKeeper.ts b/src/sync/streaming/SSEHandler/NotificationKeeper.ts index a07c6761..8b40ae6d 100644 --- a/src/sync/streaming/SSEHandler/NotificationKeeper.ts +++ b/src/sync/streaming/SSEHandler/NotificationKeeper.ts @@ -10,7 +10,7 @@ const STREAMING_EVENT_TYPES: StreamingEventType[] = [OCCUPANCY_PRI, OCCUPANCY_SE /** * Factory of notification keeper, which process OCCUPANCY and CONTROL notifications and emits the corresponding push events. * - * @param pushEmitter emitter for events related to streaming support + * @param pushEmitter - emitter for events related to streaming support */ // @TODO update logic to handle OCCUPANCY for any region and rename according to new spec (e.g.: PUSH_SUBSYSTEM_UP --> PUSH_SUBSYSTEM_UP) export function notificationKeeperFactory(pushEmitter: IPushEventEmitter, telemetryTracker: ITelemetryTracker) { diff --git a/src/sync/streaming/SSEHandler/NotificationParser.ts b/src/sync/streaming/SSEHandler/NotificationParser.ts index 749fc332..e333a9f3 100644 --- a/src/sync/streaming/SSEHandler/NotificationParser.ts +++ b/src/sync/streaming/SSEHandler/NotificationParser.ts @@ -7,8 +7,9 @@ import { INotificationMessage, INotificationError } from './types'; * HTTP errors handled by Ably (e.g., 400 due to invalid token, 401 due to expired token, 500) have the `data` property. * Other network and HTTP errors do not have this property. * - * @param {Object} error - * @throws {SyntaxError} if `error.data` is an invalid JSON string + * @param error - The error event to parse + * @returns parsed notification error + * @throws SyntaxError if `error.data` is an invalid JSON string */ export function errorParser(error: Event): INotificationError { // @ts-ignore @@ -21,10 +22,10 @@ export function errorParser(error: Event): INotificationError { * Parses the `data` JSON string of a given SSE message notifications. * Also assigns the type OCCUPANCY, if it corresponds, so that all supported messages (e.g., SPLIT_UPDATE, CONTROL) have a type. * - * @param message + * @param message - The message event to parse * @returns parsed notification message or undefined if the given event data is falsy (e.g, '' or undefined). * For example, the EventSource implementation of React-Native for iOS emits a message event with empty data for Ably keepalive comments. - * @throws {SyntaxError} if `message.data` or `JSON.parse(message.data).data` are invalid JSON strings + * @throws SyntaxError if `message.data` or `JSON.parse(message.data).data` are invalid JSON strings */ export function messageParser(message: MessageEvent): INotificationMessage | undefined { if (!message.data) return; diff --git a/src/sync/streaming/SSEHandler/__tests__/index.spec.ts b/src/sync/streaming/SSEHandler/__tests__/index.spec.ts index 9651d232..e85b22d8 100644 --- a/src/sync/streaming/SSEHandler/__tests__/index.spec.ts +++ b/src/sync/streaming/SSEHandler/__tests__/index.spec.ts @@ -1,21 +1,24 @@ // @ts-nocheck import { SSEHandlerFactory } from '..'; -import { PUSH_SUBSYSTEM_UP, PUSH_NONRETRYABLE_ERROR, PUSH_SUBSYSTEM_DOWN, PUSH_RETRYABLE_ERROR, MY_SEGMENTS_UPDATE, SEGMENT_UPDATE, SPLIT_KILL, SPLIT_UPDATE, MY_SEGMENTS_UPDATE_V2, ControlType } from '../../constants'; +import { PUSH_SUBSYSTEM_UP, PUSH_NONRETRYABLE_ERROR, PUSH_SUBSYSTEM_DOWN, PUSH_RETRYABLE_ERROR, SEGMENT_UPDATE, SPLIT_KILL, SPLIT_UPDATE, MEMBERSHIPS_MS_UPDATE, MEMBERSHIPS_LS_UPDATE, ControlType } from '../../constants'; import { loggerMock } from '../../../../logger/__tests__/sdkLogger.mock'; // update messages import splitUpdateMessage from '../../../../__tests__/mocks/message.SPLIT_UPDATE.1457552620999.json'; import splitKillMessage from '../../../../__tests__/mocks/message.SPLIT_KILL.1457552650000.json'; import segmentUpdateMessage from '../../../../__tests__/mocks/message.SEGMENT_UPDATE.1457552640000.json'; -import mySegmentsUpdateMessage from '../../../../__tests__/mocks/message.MY_SEGMENTS_UPDATE.nicolas@split.io.1457552640000.json'; -// update messages MY_SEGMENTS_UPDATE_V2 -import unboundedMessage from '../../../../__tests__/mocks/message.V2.UNBOUNDED.1457552650000.json'; -import boundedGzipMessage from '../../../../__tests__/mocks/message.V2.BOUNDED.GZIP.1457552651000.json'; -import keylistGzipMessage from '../../../../__tests__/mocks/message.V2.KEYLIST.GZIP.1457552652000.json'; -import segmentRemovalMessage from '../../../../__tests__/mocks/message.V2.SEGMENT_REMOVAL.1457552653000.json'; +// update messages MEMBERSHIPS_MS_UPDATE +import unboundedMessage from '../../../../__tests__/mocks/message.MEMBERSHIPS_MS_UPDATE.UNBOUNDED.1457552650000.json'; +import boundedGzipMessage from '../../../../__tests__/mocks/message.MEMBERSHIPS_MS_UPDATE.BOUNDED.GZIP.1457552651000.json'; +import keylistGzipMessage from '../../../../__tests__/mocks/message.MEMBERSHIPS_MS_UPDATE.KEYLIST.GZIP.1457552652000.json'; +import segmentRemovalMessage from '../../../../__tests__/mocks/message.MEMBERSHIPS_MS_UPDATE.SEGMENT_REMOVAL.1457552653000.json'; import { keylists, bitmaps } from '../../__tests__/dataMocks'; +// update messages MEMBERSHIPS_LS_UPDATE +import largeSegmentUnboundedMessage from '../../../../__tests__/mocks/message.MEMBERSHIPS_LS_UPDATE.UNBOUNDED.1457552650000.json'; +import largeSegmentRemovalMessage from '../../../../__tests__/mocks/message.MEMBERSHIPS_LS_UPDATE.SEGMENT_REMOVAL.1457552653000.json'; + // occupancy messages import occupancy1ControlPri from '../../../../__tests__/mocks/message.OCCUPANCY.1.control_pri.1586987434450.json'; import occupancy0ControlPri from '../../../../__tests__/mocks/message.OCCUPANCY.0.control_pri.1586987434550.json'; @@ -149,25 +152,29 @@ test('`handlerMessage` for update notifications (NotificationProcessor) and stre sseHandler.handleMessage(segmentUpdateMessage); expect(pushEmitter.emit).toHaveBeenLastCalledWith(SEGMENT_UPDATE, ...expectedParams); // must emit SEGMENT_UPDATE with the message change number and segment name - expectedParams = [{ type: MY_SEGMENTS_UPDATE, changeNumber: 1457552640000, includesPayload: false }, 'NzM2MDI5Mzc0_NDEzMjQ1MzA0Nw==_NTcwOTc3MDQx_mySegments']; - sseHandler.handleMessage(mySegmentsUpdateMessage); - expect(pushEmitter.emit).toHaveBeenLastCalledWith(MY_SEGMENTS_UPDATE, ...expectedParams); // must emit MY_SEGMENTS_UPDATE with the message parsed data and channel - - expectedParams = [{ type: 'MY_SEGMENTS_UPDATE_V2', changeNumber: 1457552650000, c: 0, d: '', u: 0, segmentName: '' }]; + expectedParams = [{ type: 'MEMBERSHIPS_MS_UPDATE', cn: 1457552650000, c: 0, d: '', u: 0, l: [] }]; sseHandler.handleMessage(unboundedMessage); - expect(pushEmitter.emit).toHaveBeenLastCalledWith(MY_SEGMENTS_UPDATE_V2, ...expectedParams); // must emit MY_SEGMENTS_UPDATE_V2 with the message parsed data + expect(pushEmitter.emit).toHaveBeenLastCalledWith(MEMBERSHIPS_MS_UPDATE, ...expectedParams); // must emit MEMBERSHIPS_MS_UPDATE with the message parsed data - expectedParams = [{ type: 'MY_SEGMENTS_UPDATE_V2', changeNumber: 1457552651000, c: 1, d: bitmaps[0].bitmapDataCompressed, u: 1, segmentName: '' }]; + expectedParams = [{ type: 'MEMBERSHIPS_MS_UPDATE', cn: 1457552651000, c: 1, d: bitmaps[0].bitmapDataCompressed, u: 1, l: [] }]; sseHandler.handleMessage(boundedGzipMessage); - expect(pushEmitter.emit).toHaveBeenLastCalledWith(MY_SEGMENTS_UPDATE_V2, ...expectedParams); // must emit MY_SEGMENTS_UPDATE_V2 with the message parsed data + expect(pushEmitter.emit).toHaveBeenLastCalledWith(MEMBERSHIPS_MS_UPDATE, ...expectedParams); // must emit MEMBERSHIPS_MS_UPDATE with the message parsed data - expectedParams = [{ type: 'MY_SEGMENTS_UPDATE_V2', changeNumber: 1457552652000, c: 1, d: keylists[0].keyListDataCompressed, u: 2, segmentName: 'splitters' }]; + expectedParams = [{ type: 'MEMBERSHIPS_MS_UPDATE', cn: 1457552652000, c: 1, d: keylists[0].keyListDataCompressed, u: 2, l: ['splitters'] }]; sseHandler.handleMessage(keylistGzipMessage); - expect(pushEmitter.emit).toHaveBeenLastCalledWith(MY_SEGMENTS_UPDATE_V2, ...expectedParams); // must emit MY_SEGMENTS_UPDATE_V2 with the message parsed data + expect(pushEmitter.emit).toHaveBeenLastCalledWith(MEMBERSHIPS_MS_UPDATE, ...expectedParams); // must emit MEMBERSHIPS_MS_UPDATE with the message parsed data - expectedParams = [{ type: 'MY_SEGMENTS_UPDATE_V2', changeNumber: 1457552653000, c: 0, d: '', u: 3, segmentName: 'splitters' }]; + expectedParams = [{ type: 'MEMBERSHIPS_MS_UPDATE', cn: 1457552653000, c: 0, d: '', u: 3, l: ['splitters'] }]; sseHandler.handleMessage(segmentRemovalMessage); - expect(pushEmitter.emit).toHaveBeenLastCalledWith(MY_SEGMENTS_UPDATE_V2, ...expectedParams); // must emit MY_SEGMENTS_UPDATE_V2 with the message parsed data + expect(pushEmitter.emit).toHaveBeenLastCalledWith(MEMBERSHIPS_MS_UPDATE, ...expectedParams); // must emit MEMBERSHIPS_MS_UPDATE with the message parsed data + + expectedParams = [{ type: 'MEMBERSHIPS_LS_UPDATE', cn: 1457552650000, c: 0, d: '', u: 0, l: [], i: 300, h: 1, s: 0 }]; + sseHandler.handleMessage(largeSegmentUnboundedMessage); + expect(pushEmitter.emit).toHaveBeenLastCalledWith(MEMBERSHIPS_LS_UPDATE, ...expectedParams); // must emit MEMBERSHIPS_LS_UPDATE with the message parsed data + + expectedParams = [{ type: 'MEMBERSHIPS_LS_UPDATE', cn: 1457552653000, c: 0, d: '', u: 3, l: ['employees'] }]; + sseHandler.handleMessage(largeSegmentRemovalMessage); + expect(pushEmitter.emit).toHaveBeenLastCalledWith(MEMBERSHIPS_LS_UPDATE, ...expectedParams); // must emit MEMBERSHIPS_LS_UPDATE with the message parsed data sseHandler.handleMessage(streamingReset); expect(pushEmitter.emit).toHaveBeenLastCalledWith(ControlType.STREAMING_RESET); // must emit STREAMING_RESET diff --git a/src/sync/streaming/SSEHandler/index.ts b/src/sync/streaming/SSEHandler/index.ts index d8f20b32..fbbe329c 100644 --- a/src/sync/streaming/SSEHandler/index.ts +++ b/src/sync/streaming/SSEHandler/index.ts @@ -1,6 +1,6 @@ import { errorParser, messageParser } from './NotificationParser'; import { notificationKeeperFactory } from './NotificationKeeper'; -import { PUSH_RETRYABLE_ERROR, PUSH_NONRETRYABLE_ERROR, OCCUPANCY, CONTROL, MY_SEGMENTS_UPDATE, MY_SEGMENTS_UPDATE_V2, SEGMENT_UPDATE, SPLIT_KILL, SPLIT_UPDATE } from '../constants'; +import { PUSH_RETRYABLE_ERROR, PUSH_NONRETRYABLE_ERROR, OCCUPANCY, CONTROL, SEGMENT_UPDATE, SPLIT_KILL, SPLIT_UPDATE, MEMBERSHIPS_MS_UPDATE, MEMBERSHIPS_LS_UPDATE } from '../constants'; import { IPushEventEmitter } from '../types'; import { ISseEventHandler } from '../SSEClient/types'; import { INotificationError, INotificationMessage } from './types'; @@ -12,8 +12,8 @@ import { ITelemetryTracker } from '../../../trackers/types'; /** * Factory for SSEHandler, which processes SSEClient messages and emits the corresponding push events. * - * @param log factory logger - * @param pushEmitter emitter for events related to streaming support + * @param log - factory logger + * @param pushEmitter - emitter for events related to streaming support */ export function SSEHandlerFactory(log: ILogger, pushEmitter: IPushEventEmitter, telemetryTracker: ITelemetryTracker): ISseEventHandler { @@ -74,21 +74,18 @@ export function SSEHandlerFactory(log: ILogger, pushEmitter: IPushEventEmitter, const { parsedData, data, channel, timestamp } = messageWithParsedData; log.debug(STREAMING_NEW_MESSAGE, [data]); - // we only handle update events if streaming is up. - if (!notificationKeeper.isStreamingUp() && [OCCUPANCY, CONTROL].indexOf(parsedData.type) === -1) - return; + // we only handle update events if streaming is up + if (!notificationKeeper.isStreamingUp() && [OCCUPANCY, CONTROL].indexOf(parsedData.type) === -1) return; switch (parsedData.type) { /* update events */ case SPLIT_UPDATE: case SEGMENT_UPDATE: - case MY_SEGMENTS_UPDATE_V2: + case MEMBERSHIPS_MS_UPDATE: + case MEMBERSHIPS_LS_UPDATE: case SPLIT_KILL: pushEmitter.emit(parsedData.type, parsedData); break; - case MY_SEGMENTS_UPDATE: - pushEmitter.emit(parsedData.type, parsedData, channel); - break; /* occupancy & control events, handled by NotificationManagerKeeper */ case OCCUPANCY: diff --git a/src/sync/streaming/SSEHandler/types.ts b/src/sync/streaming/SSEHandler/types.ts index d794322c..192583c3 100644 --- a/src/sync/streaming/SSEHandler/types.ts +++ b/src/sync/streaming/SSEHandler/types.ts @@ -1,12 +1,5 @@ import { ControlType } from '../constants'; -import { MY_SEGMENTS_UPDATE, MY_SEGMENTS_UPDATE_V2, SEGMENT_UPDATE, SPLIT_UPDATE, SPLIT_KILL, CONTROL, OCCUPANCY } from '../types'; - -export interface IMySegmentsUpdateData { - type: MY_SEGMENTS_UPDATE, - changeNumber: number, - includesPayload: boolean, - segmentList?: string[] -} +import { SEGMENT_UPDATE, SPLIT_UPDATE, SPLIT_KILL, CONTROL, OCCUPANCY, MEMBERSHIPS_LS_UPDATE, MEMBERSHIPS_MS_UPDATE } from '../types'; export enum Compression { None = 0, @@ -26,15 +19,22 @@ export interface KeyList { r?: string[], // decimal hash64 of user keys } -export interface IMySegmentsUpdateV2Data { - type: MY_SEGMENTS_UPDATE_V2, - changeNumber: number, - segmentName: string, - c: Compression, - d: string, +interface IMembershipUpdateData { + type: T, + cn: number, + n?: string[], + c?: Compression, + d?: string, u: UpdateStrategy, + i?: number, // time interval in millis + h?: number, // hash function + s?: number, // seed for hash function } +export interface IMembershipMSUpdateData extends IMembershipUpdateData { } + +export interface IMembershipLSUpdateData extends IMembershipUpdateData { } + export interface ISegmentUpdateData { type: SEGMENT_UPDATE, changeNumber: number, @@ -68,6 +68,6 @@ export interface IOccupancyData { } } -export type INotificationData = IMySegmentsUpdateData | IMySegmentsUpdateV2Data | ISegmentUpdateData | ISplitUpdateData | ISplitKillData | IControlData | IOccupancyData +export type INotificationData = IMembershipMSUpdateData | IMembershipLSUpdateData | ISegmentUpdateData | ISplitUpdateData | ISplitKillData | IControlData | IOccupancyData export type INotificationMessage = { parsedData: INotificationData, channel: string, timestamp: number, data: string } export type INotificationError = Event & { parsedData?: any, message?: string } diff --git a/src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts b/src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts index eb1a25b1..bafdd37d 100644 --- a/src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts +++ b/src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts @@ -1,66 +1,133 @@ import { IMySegmentsSyncTask, MySegmentsData } from '../../polling/types'; import { Backoff } from '../../../utils/Backoff'; import { IUpdateWorker } from './types'; -import { MY_SEGMENT } from '../../../utils/constants'; import { ITelemetryTracker } from '../../../trackers/types'; +import { MEMBERSHIPS } from '../../../utils/constants'; +import { ISegmentsCacheSync, IStorageSync } from '../../../storages/types'; +import { ILogger } from '../../../logger/types'; +import { FETCH_BACKOFF_MAX_RETRIES } from './constants'; +import { MEMBERSHIPS_LS_UPDATE, MEMBERSHIPS_MS_UPDATE } from '../constants'; /** * MySegmentsUpdateWorker factory */ -export function MySegmentsUpdateWorker(mySegmentsSyncTask: IMySegmentsSyncTask, telemetryTracker: ITelemetryTracker): IUpdateWorker { - - let maxChangeNumber = 0; // keeps the maximum changeNumber among queued events - let currentChangeNumber = -1; - let handleNewEvent = false; - let isHandlingEvent: boolean; - let _segmentsData: MySegmentsData | undefined; // keeps the segmentsData (if included in notification payload) from the queued event with maximum changeNumber - const backoff = new Backoff(__handleMySegmentsUpdateCall); - - function __handleMySegmentsUpdateCall() { - isHandlingEvent = true; - if (maxChangeNumber > currentChangeNumber) { - handleNewEvent = false; - const currentMaxChangeNumber = maxChangeNumber; - - // fetch mySegments revalidating data if cached - mySegmentsSyncTask.execute(_segmentsData, true).then((result) => { - if (!isHandlingEvent) return; // halt if `stop` has been called - if (result !== false) {// Unlike `Splits|SegmentsUpdateWorker`, we cannot use `mySegmentsCache.getChangeNumber` since `/mySegments` endpoint doesn't provide this value. - if (_segmentsData) telemetryTracker.trackUpdatesFromSSE(MY_SEGMENT); - currentChangeNumber = Math.max(currentChangeNumber, currentMaxChangeNumber); // use `currentMaxChangeNumber`, in case that `maxChangeNumber` was updated during fetch. - } - if (handleNewEvent) { - __handleMySegmentsUpdateCall(); - } else { - backoff.scheduleCall(); - } - }); - } else { - isHandlingEvent = false; +export function MySegmentsUpdateWorker(log: ILogger, storage: Pick, mySegmentsSyncTask: IMySegmentsSyncTask, telemetryTracker: ITelemetryTracker): IUpdateWorker<[mySegmentsData?: Pick, payload?: Pick, delay?: number]> { + + let _delay: undefined | number; + let _delayTimeoutID: any; + + function createUpdateWorker(mySegmentsCache: ISegmentsCacheSync) { + + let maxChangeNumber = 0; // keeps the maximum changeNumber among queued events + let currentChangeNumber = -1; + let handleNewEvent = false; + let isHandlingEvent: boolean; + let cdnBypass: boolean; + let _segmentsData: MySegmentsData | undefined; // keeps the segmentsData (if included in notification payload) from the queued event with maximum changeNumber + const backoff = new Backoff(__handleMySegmentsUpdateCall); + + function __handleMySegmentsUpdateCall() { + isHandlingEvent = true; + if (maxChangeNumber > Math.max(currentChangeNumber, mySegmentsCache.getChangeNumber())) { + handleNewEvent = false; + const currentMaxChangeNumber = maxChangeNumber; + + // fetch mySegments revalidating data if cached + const syncTask = _delay ? + new Promise(res => { + _delayTimeoutID = setTimeout(() => { + _delay = undefined; + mySegmentsSyncTask.execute(_segmentsData, true, cdnBypass ? maxChangeNumber : undefined).then(res); + }, _delay); + }) : + mySegmentsSyncTask.execute(_segmentsData, true, cdnBypass ? maxChangeNumber : undefined); + + syncTask.then((result) => { + if (!isHandlingEvent) return; // halt if `stop` has been called + if (result !== false) { // Unlike `Splits|SegmentsUpdateWorker`, `mySegmentsCache.getChangeNumber` can be -1, since `/memberships` change number is optional + const storageChangeNumber = mySegmentsCache.getChangeNumber(); + currentChangeNumber = storageChangeNumber > -1 ? + storageChangeNumber : + Math.max(currentChangeNumber, currentMaxChangeNumber); // use `currentMaxChangeNumber`, in case that `maxChangeNumber` was updated during fetch. + } + if (handleNewEvent) { + __handleMySegmentsUpdateCall(); + } else { + if (_segmentsData) telemetryTracker.trackUpdatesFromSSE(MEMBERSHIPS); + + const attempts = backoff.attempts + 1; + + if (maxChangeNumber <= currentChangeNumber) { + log.debug(`Refresh completed${cdnBypass ? ' bypassing the CDN' : ''} in ${attempts} attempts.`); + isHandlingEvent = false; + return; + } + + if (attempts < FETCH_BACKOFF_MAX_RETRIES) { + backoff.scheduleCall(); + return; + } + + if (cdnBypass) { + log.debug(`No changes fetched after ${attempts} attempts with CDN bypassed.`); + isHandlingEvent = false; + } else { + backoff.reset(); + cdnBypass = true; + __handleMySegmentsUpdateCall(); + } + } + }); + } else { + isHandlingEvent = false; + } } + + return { + /** + * Invoked by NotificationProcessor on MY_(LARGE)_SEGMENTS_UPDATE notifications + * + * @param changeNumber - change number of the notification + * @param segmentsData - data for KeyList or SegmentRemoval instant updates + * @param delay - optional time to wait for BoundedFetchRequest or BoundedFetchRequest updates + */ + put(mySegmentsData: Pick, payload?: Pick, delay?: number) { + const { type, cn } = mySegmentsData; + // Discard event if it is outdated or there is a pending fetch request (_delay is set), but update target change number + if (cn <= Math.max(currentChangeNumber, mySegmentsCache.getChangeNumber()) || cn <= maxChangeNumber) return; + maxChangeNumber = cn; + if (_delay) return; + + handleNewEvent = true; + cdnBypass = false; + _segmentsData = payload && { type, cn, added: payload.added, removed: payload.removed }; + _delay = delay; + + if (backoff.timeoutID || !isHandlingEvent) __handleMySegmentsUpdateCall(); + backoff.reset(); + }, + + stop() { + clearTimeout(_delayTimeoutID); + _delay = undefined; + isHandlingEvent = false; + backoff.reset(); + } + }; } + const updateWorkers = { + [MEMBERSHIPS_MS_UPDATE]: createUpdateWorker(storage.segments), + [MEMBERSHIPS_LS_UPDATE]: createUpdateWorker(storage.largeSegments!), + }; + return { - /** - * Invoked by NotificationProcessor on MY_SEGMENTS_UPDATE event - * - * @param {number} changeNumber change number of the MY_SEGMENTS_UPDATE notification - * @param {SegmentsData | undefined} segmentsData might be undefined - */ - put(changeNumber: number, segmentsData?: MySegmentsData) { - if (changeNumber <= currentChangeNumber || changeNumber <= maxChangeNumber) return; - - maxChangeNumber = changeNumber; - handleNewEvent = true; - _segmentsData = segmentsData; - - if (backoff.timeoutID || !isHandlingEvent) __handleMySegmentsUpdateCall(); - backoff.reset(); + put(mySegmentsData: Pick, payload?: Pick, delay?: number) { + updateWorkers[mySegmentsData.type].put(mySegmentsData, payload, delay); }, - stop() { - isHandlingEvent = false; - backoff.reset(); + updateWorkers[MEMBERSHIPS_MS_UPDATE].stop(); + updateWorkers[MEMBERSHIPS_LS_UPDATE].stop(); } }; } diff --git a/src/sync/streaming/UpdateWorkers/SegmentsUpdateWorker.ts b/src/sync/streaming/UpdateWorkers/SegmentsUpdateWorker.ts index 93454f0e..956b744a 100644 --- a/src/sync/streaming/UpdateWorkers/SegmentsUpdateWorker.ts +++ b/src/sync/streaming/UpdateWorkers/SegmentsUpdateWorker.ts @@ -9,7 +9,7 @@ import { IUpdateWorker } from './types'; /** * SegmentsUpdateWorker factory */ -export function SegmentsUpdateWorker(log: ILogger, segmentsSyncTask: ISegmentsSyncTask, segmentsCache: ISegmentsCacheSync): IUpdateWorker { +export function SegmentsUpdateWorker(log: ILogger, segmentsSyncTask: ISegmentsSyncTask, segmentsCache: ISegmentsCacheSync): IUpdateWorker<[ISegmentUpdateData]> { // Handles retries with CDN bypass per segment name function SegmentUpdateWorker(segment: string) { @@ -84,8 +84,8 @@ export function SegmentsUpdateWorker(log: ILogger, segmentsSyncTask: ISegmentsSy /** * Invoked by NotificationProcessor on SEGMENT_UPDATE event * - * @param {number} changeNumber change number of the SEGMENT_UPDATE notification - * @param {string} segmentName segment name of the SEGMENT_UPDATE notification + * @param changeNumber - change number of the SEGMENT_UPDATE notification + * @param segmentName - segment name of the SEGMENT_UPDATE notification */ put({ changeNumber, segmentName }: ISegmentUpdateData) { if (!segments[segmentName]) segments[segmentName] = SegmentUpdateWorker(segmentName); diff --git a/src/sync/streaming/UpdateWorkers/SplitsUpdateWorker.ts b/src/sync/streaming/UpdateWorkers/SplitsUpdateWorker.ts index e9336aba..580fe9cb 100644 --- a/src/sync/streaming/UpdateWorkers/SplitsUpdateWorker.ts +++ b/src/sync/streaming/UpdateWorkers/SplitsUpdateWorker.ts @@ -14,7 +14,7 @@ import { IUpdateWorker } from './types'; /** * SplitsUpdateWorker factory */ -export function SplitsUpdateWorker(log: ILogger, splitsCache: ISplitsCacheSync, splitsSyncTask: ISplitsSyncTask, splitsEventEmitter: ISplitsEventEmitter, telemetryTracker: ITelemetryTracker, segmentsSyncTask?: ISegmentsSyncTask): IUpdateWorker & { killSplit(event: ISplitKillData): void } { +export function SplitsUpdateWorker(log: ILogger, splitsCache: ISplitsCacheSync, splitsSyncTask: ISplitsSyncTask, splitsEventEmitter: ISplitsEventEmitter, telemetryTracker: ITelemetryTracker, segmentsSyncTask?: ISegmentsSyncTask): IUpdateWorker<[updateData: ISplitUpdateData, payload?: ISplit]> & { killSplit(event: ISplitKillData): void } { let maxChangeNumber = 0; let handleNewEvent = false; @@ -69,7 +69,7 @@ export function SplitsUpdateWorker(log: ILogger, splitsCache: ISplitsCacheSync, /** * Invoked by NotificationProcessor on SPLIT_UPDATE event * - * @param {number} changeNumber change number of the SPLIT_UPDATE notification + * @param changeNumber - change number of the SPLIT_UPDATE notification */ function put({ changeNumber, pcn }: ISplitUpdateData, _payload?: ISplit) { const currentChangeNumber = splitsCache.getChangeNumber(); @@ -94,9 +94,9 @@ export function SplitsUpdateWorker(log: ILogger, splitsCache: ISplitsCacheSync, /** * Invoked by NotificationProcessor on SPLIT_KILL event * - * @param {number} changeNumber change number of the SPLIT_UPDATE notification - * @param {string} splitName name of split to kill - * @param {string} defaultTreatment default treatment value + * @param changeNumber - change number of the SPLIT_UPDATE notification + * @param splitName - name of split to kill + * @param defaultTreatment - default treatment value */ killSplit({ changeNumber, splitName, defaultTreatment }: ISplitKillData) { if (splitsCache.killLocally(splitName, defaultTreatment, changeNumber)) { diff --git a/src/sync/streaming/UpdateWorkers/__tests__/MySegmentsUpdateWorker.spec.ts b/src/sync/streaming/UpdateWorkers/__tests__/MySegmentsUpdateWorker.spec.ts index 6726ed86..0ba329bb 100644 --- a/src/sync/streaming/UpdateWorkers/__tests__/MySegmentsUpdateWorker.spec.ts +++ b/src/sync/streaming/UpdateWorkers/__tests__/MySegmentsUpdateWorker.spec.ts @@ -1,18 +1,37 @@ -// @ts-nocheck import { MySegmentsUpdateWorker } from '../MySegmentsUpdateWorker'; +import { loggerMock } from '../../../../logger/__tests__/sdkLogger.mock'; import { syncTaskFactory } from '../../../syncTask'; import { Backoff } from '../../../../utils/Backoff'; import { telemetryTrackerFactory } from '../../../../trackers/telemetryTracker'; +import { MEMBERSHIPS_LS_UPDATE, MEMBERSHIPS_MS_UPDATE } from '../../constants'; -function mySegmentsSyncTaskMock(values = []) { +function storageMock() { + return { + segments: { + _changeNumber: -1, + getChangeNumber() { + return this._changeNumber; + } + }, + largeSegments: { + _changeNumber: -1, + getChangeNumber() { + return this._changeNumber; + } + }, + }; +} + +function mySegmentsSyncTaskMock(values: Array = []) { - const __mySegmentsUpdaterCalls = []; + const __mySegmentsUpdaterCalls: Array<{ res: (value?: boolean) => void }> = []; - function __resolveMySegmentsUpdaterCall(value) { - if (__mySegmentsUpdaterCalls.length) __mySegmentsUpdaterCalls.shift().res(value); // resolve previous call + function __resolveMySegmentsUpdaterCall(value?: boolean) { + if (__mySegmentsUpdaterCalls.length) __mySegmentsUpdaterCalls.shift()!.res(value); // resolve previous call else values.push(value); } + // @ts-expect-error const syncTask = syncTaskFactory( { debug() { } }, // no-op logger () => { @@ -20,7 +39,7 @@ function mySegmentsSyncTaskMock(values = []) { __mySegmentsUpdaterCalls.push({ res }); if (values.length) __resolveMySegmentsUpdaterCall(values.shift()); }); - } + }, ); return { @@ -45,18 +64,18 @@ describe('MySegmentsUpdateWorker', () => { // setup const mySegmentsSyncTask = mySegmentsSyncTaskMock(); Backoff.__TEST__BASE_MILLIS = 1; // retry immediately - const mySegmentUpdateWorker = MySegmentsUpdateWorker(mySegmentsSyncTask, telemetryTracker); + const mySegmentUpdateWorker = MySegmentsUpdateWorker(loggerMock, storageMock() as any, mySegmentsSyncTask as any, telemetryTracker); // assert calling `mySegmentsSyncTask.execute` if `isExecuting` is false expect(mySegmentsSyncTask.isExecuting()).toBe(false); - mySegmentUpdateWorker.put(100); + mySegmentUpdateWorker.put({ type: MEMBERSHIPS_MS_UPDATE, cn: 100 }); expect(mySegmentsSyncTask.execute).toBeCalledTimes(1); // synchronizes MySegments if `isExecuting` is false // assert queueing changeNumber if `isExecuting` is true expect(mySegmentsSyncTask.isExecuting()).toBe(true); - mySegmentUpdateWorker.put(105); - mySegmentUpdateWorker.put(104); - mySegmentUpdateWorker.put(106); + mySegmentUpdateWorker.put({ type: MEMBERSHIPS_MS_UPDATE, cn: 105 }); + mySegmentUpdateWorker.put({ type: MEMBERSHIPS_MS_UPDATE, cn: 104 }); + mySegmentUpdateWorker.put({ type: MEMBERSHIPS_MS_UPDATE, cn: 106 }); expect(mySegmentsSyncTask.execute).toBeCalledTimes(1); // doesn't synchronize MySegments if `isExecuting` is true // assert calling `mySegmentsSyncTask.execute` if previous call is resolved and a new changeNumber in queue @@ -73,36 +92,36 @@ describe('MySegmentsUpdateWorker', () => { mySegmentsSyncTask.__resolveMySegmentsUpdaterCall(); // fetch success await new Promise(res => setTimeout(res, 10)); expect(mySegmentsSyncTask.execute).toBeCalledTimes(3); // doesn't synchronize MySegments while queue is empty - expect(mySegmentsSyncTask.execute.mock.calls).toEqual([[undefined, true], [undefined, true], [undefined, true]]); + expect(mySegmentsSyncTask.execute.mock.calls).toEqual([[undefined, true, undefined], [undefined, true, undefined], [undefined, true, undefined]]); // assert handling an event with segmentList after an event without segmentList, // to validate the special case than the fetch associated to the first event is resolved after a second event with payload arrives mySegmentsSyncTask.execute.mockClear(); expect(mySegmentsSyncTask.isExecuting()).toBe(false); - mySegmentUpdateWorker.put(110); + mySegmentUpdateWorker.put({ type: MEMBERSHIPS_MS_UPDATE, cn: 110 }); expect(mySegmentsSyncTask.isExecuting()).toBe(true); - mySegmentUpdateWorker.put(120, ['some_segment']); + mySegmentUpdateWorker.put({ type: MEMBERSHIPS_MS_UPDATE, cn: 120 }, { added: [], removed: ['some_segment'] }); expect(mySegmentsSyncTask.execute).toBeCalledTimes(1); // doesn't synchronize MySegments if `isExecuting` is true, even if payload (segmentList) is included - expect(mySegmentsSyncTask.execute).toHaveBeenLastCalledWith(undefined, true); + expect(mySegmentsSyncTask.execute).toHaveBeenLastCalledWith(undefined, true, undefined); mySegmentsSyncTask.__resolveMySegmentsUpdaterCall(); // fetch success await new Promise(res => setTimeout(res, 10)); expect(mySegmentsSyncTask.execute).toBeCalledTimes(2); // re-synchronizes MySegments once previous event was handled - expect(mySegmentsSyncTask.execute).toHaveBeenLastCalledWith(['some_segment'], true); // synchronizes MySegments with given segmentList + expect(mySegmentsSyncTask.execute).toHaveBeenLastCalledWith({ type: MEMBERSHIPS_MS_UPDATE, cn: 120, added: [], removed: ['some_segment'] }, true, undefined); // synchronizes MySegments with given segmentList mySegmentsSyncTask.__resolveMySegmentsUpdaterCall(); // fetch success await new Promise(res => setTimeout(res, 10)); // assert handling an event without segmentList after one with segmentList mySegmentsSyncTask.execute.mockClear(); - mySegmentUpdateWorker.put(130, ['other_segment']); - mySegmentUpdateWorker.put(140); + mySegmentUpdateWorker.put({ type: MEMBERSHIPS_MS_UPDATE, cn: 130 }, { added: [], removed: ['other_segment'] }); + mySegmentUpdateWorker.put({ type: MEMBERSHIPS_MS_UPDATE, cn: 140 }); expect(mySegmentsSyncTask.execute).toBeCalledTimes(1); // synchronizes MySegments once, until event is handled - expect(mySegmentsSyncTask.execute).toHaveBeenLastCalledWith(['other_segment'], true); + expect(mySegmentsSyncTask.execute).toHaveBeenLastCalledWith({ type: MEMBERSHIPS_MS_UPDATE, cn: 130, added: [], removed: ['other_segment'] }, true, undefined); mySegmentsSyncTask.__resolveMySegmentsUpdaterCall(); // fetch success await new Promise(res => setTimeout(res)); expect(mySegmentsSyncTask.execute).toBeCalledTimes(2); // re-synchronizes MySegments once previous event was handled - expect(mySegmentsSyncTask.execute).toHaveBeenLastCalledWith(undefined, true); // synchronizes MySegments without segmentList if the event doesn't have payload + expect(mySegmentsSyncTask.execute).toHaveBeenLastCalledWith(undefined, true, undefined); // synchronizes MySegments without segmentList if the event doesn't have payload mySegmentsSyncTask.__resolveMySegmentsUpdaterCall(); // fetch success await new Promise(res => setTimeout(res, 20)); // Wait to assert no more calls with backoff to `execute` @@ -113,30 +132,83 @@ describe('MySegmentsUpdateWorker', () => { // setup Backoff.__TEST__BASE_MILLIS = 50; const mySegmentsSyncTask = mySegmentsSyncTaskMock([false, false, false]); // fetch fail - const mySegmentUpdateWorker = MySegmentsUpdateWorker(mySegmentsSyncTask, telemetryTracker); + const mySegmentUpdateWorker = MySegmentsUpdateWorker(loggerMock, storageMock() as any, mySegmentsSyncTask as any, telemetryTracker); // while fetch fails, should retry with backoff - mySegmentUpdateWorker.put(100); - await new Promise(res => setTimeout(res, Backoff.__TEST__BASE_MILLIS * 3 + 100 /* some delay */)); + mySegmentUpdateWorker.put({ type: MEMBERSHIPS_MS_UPDATE, cn: 100 }); + await new Promise(res => setTimeout(res, Backoff.__TEST__BASE_MILLIS! * 3 + 100 /* some delay */)); expect(mySegmentsSyncTask.execute).toBeCalledTimes(3); // if backoff is scheduled and a new event is queued, it must be handled immediately - mySegmentUpdateWorker.put(105); + mySegmentUpdateWorker.put({ type: MEMBERSHIPS_MS_UPDATE, cn: 105 }); expect(mySegmentsSyncTask.execute).toBeCalledTimes(4); }); test('stop', async () => { // setup const mySegmentsSyncTask = mySegmentsSyncTaskMock([false]); - Backoff.__TEST__BASE_MILLIS = 1; - const mySegmentUpdateWorker = MySegmentsUpdateWorker(mySegmentsSyncTask, telemetryTracker); + const mySegmentUpdateWorker = MySegmentsUpdateWorker(loggerMock, storageMock() as any, mySegmentsSyncTask as any, telemetryTracker); - mySegmentUpdateWorker.put(100); + mySegmentUpdateWorker.put({ type: MEMBERSHIPS_LS_UPDATE, cn: 100 }); + mySegmentUpdateWorker.stop(); + + await new Promise(res => setTimeout(res, 20)); // Wait to assert no more calls to `execute` after stopping + expect(mySegmentsSyncTask.execute).toBeCalledTimes(1); + mySegmentUpdateWorker.put({ type: MEMBERSHIPS_LS_UPDATE, cn: 150 }, undefined, 10); mySegmentUpdateWorker.stop(); - await new Promise(res => setTimeout(res, 20)); // Wait to assert no more calls to `execute` after reseting + await new Promise(res => setTimeout(res, 20)); // Wait to assert no calls to `execute` after stopping (fetch request with delay is cleared) expect(mySegmentsSyncTask.execute).toBeCalledTimes(1); }); + test('put, with delay and storage change number', async () => { + // setup + Backoff.__TEST__BASE_MILLIS = 1; // retry immediately + const mySegmentsSyncTask = mySegmentsSyncTaskMock(); + const storage = storageMock(); + const mySegmentUpdateWorker = MySegmentsUpdateWorker(loggerMock, storage as any, mySegmentsSyncTask as any, telemetryTracker); + + // notification with delay + mySegmentUpdateWorker.put({ type: MEMBERSHIPS_LS_UPDATE, cn: 100 }, undefined, 50); + + // If a notification is queued while a fetch request is waiting, the notification is discarded but its change number is used to update the target change number + mySegmentUpdateWorker.put({ type: MEMBERSHIPS_MS_UPDATE, cn: 150 }, undefined, 100); // target for segments storage is 150 + mySegmentUpdateWorker.put({ type: MEMBERSHIPS_LS_UPDATE, cn: 120 }); // target for large segments storage is 120 + + await new Promise(res => setTimeout(res, 60)); + expect(mySegmentsSyncTask.execute).toBeCalledTimes(1); + expect(mySegmentsSyncTask.execute).toHaveBeenLastCalledWith(undefined, true, undefined); + storage.largeSegments._changeNumber = 100; // change number update but not the expected one + mySegmentsSyncTask.__resolveMySegmentsUpdaterCall(); // fetch success + + await new Promise(res => setTimeout(res, 60)); + expect(mySegmentsSyncTask.execute).toBeCalledTimes(2); // fetch retry due to target change number mismatch + + storage.largeSegments._changeNumber = 120; + mySegmentsSyncTask.__resolveMySegmentsUpdaterCall(); + await new Promise(res => setTimeout(res, 60)); + expect(mySegmentsSyncTask.execute).toBeCalledTimes(2); // no more fetches since target change number is reached + + // If an event with segmentData (i.e., an instant update) is queued while a delayed fetch request is waiting, the instant update is discarded + mySegmentUpdateWorker.put({ type: MEMBERSHIPS_LS_UPDATE, cn: 200 }, undefined, 50); + await new Promise(res => setTimeout(res, 10)); + mySegmentUpdateWorker.put({ type: MEMBERSHIPS_LS_UPDATE, cn: 230 }, { added: ['some_segment'], removed: [] }); + + await new Promise(res => setTimeout(res, 60)); + expect(mySegmentsSyncTask.execute).toBeCalledTimes(3); + expect(mySegmentsSyncTask.execute).toHaveBeenLastCalledWith(undefined, true, undefined); + mySegmentsSyncTask.__resolveMySegmentsUpdaterCall(); // fetch success + await new Promise(res => setTimeout(res)); + + mySegmentUpdateWorker.put({ type: MEMBERSHIPS_LS_UPDATE, cn: 250 }, { added: ['some_segment'], removed: [] }); + expect(mySegmentsSyncTask.execute).toBeCalledTimes(4); + expect(mySegmentsSyncTask.execute).toHaveBeenLastCalledWith({ type: MEMBERSHIPS_LS_UPDATE, cn: 250, added: ['some_segment'], removed: [] }, true, undefined); + + // Stop should clear the delayed fetch request + mySegmentUpdateWorker.put({ type: MEMBERSHIPS_MS_UPDATE, cn: 300 }, undefined, 10); + mySegmentUpdateWorker.stop(); + await new Promise(res => setTimeout(res, 20)); + expect(mySegmentsSyncTask.execute).toBeCalledTimes(4); + }); }); diff --git a/src/sync/streaming/UpdateWorkers/__tests__/SegmentsUpdateWorker.spec.ts b/src/sync/streaming/UpdateWorkers/__tests__/SegmentsUpdateWorker.spec.ts index 5f4068f2..31663fb5 100644 --- a/src/sync/streaming/UpdateWorkers/__tests__/SegmentsUpdateWorker.spec.ts +++ b/src/sync/streaming/UpdateWorkers/__tests__/SegmentsUpdateWorker.spec.ts @@ -12,7 +12,7 @@ function segmentsSyncTaskMock(segmentsStorage: SegmentsCacheInMemory, changeNumb function __resolveSegmentsUpdaterCall(changeNumber: Record) { Object.keys(changeNumber).forEach(segmentName => { - segmentsStorage.setChangeNumber(segmentName, changeNumber[segmentName]); // update changeNumber in storage + segmentsStorage.update(segmentName, [], [], changeNumber[segmentName]); // update changeNumber in storage }); if (__segmentsUpdaterCalls.length) __segmentsUpdaterCalls.shift().res(); // resolve `execute` call else changeNumbers.push(changeNumber); diff --git a/src/sync/streaming/UpdateWorkers/__tests__/SplitsUpdateWorker.spec.ts b/src/sync/streaming/UpdateWorkers/__tests__/SplitsUpdateWorker.spec.ts index af5c6336..d5fd3acd 100644 --- a/src/sync/streaming/UpdateWorkers/__tests__/SplitsUpdateWorker.spec.ts +++ b/src/sync/streaming/UpdateWorkers/__tests__/SplitsUpdateWorker.spec.ts @@ -206,7 +206,7 @@ describe('SplitsUpdateWorker', () => { splitUpdateWorker.stop(); - await new Promise(res => setTimeout(res, 20)); // Wait to assert no more calls to `execute` after reseting + await new Promise(res => setTimeout(res, 20)); // Wait to assert no more calls to `execute` after resetting expect(splitsSyncTask.execute).toBeCalledTimes(1); }); @@ -219,9 +219,9 @@ describe('SplitsUpdateWorker', () => { const splitUpdateWorker = SplitsUpdateWorker(loggerMock, cache, splitsSyncTask, telemetryTracker); const payload = notification.decoded; const changeNumber = payload.changeNumber; - splitUpdateWorker.put( { changeNumber, pcn }, payload); // queued + splitUpdateWorker.put({ changeNumber, pcn }, payload); // queued expect(splitsSyncTask.execute).toBeCalledTimes(1); - expect(splitsSyncTask.execute.mock.calls[0]).toEqual([true, undefined, {changeNumber, payload}]); + expect(splitsSyncTask.execute.mock.calls[0]).toEqual([true, undefined, { changeNumber, payload }]); }); }); @@ -265,7 +265,7 @@ describe('SplitsUpdateWorker', () => { splitUpdateWorker = SplitsUpdateWorker(loggerMock, cache, splitsSyncTask, telemetryTracker); splitUpdateWorker.put({ changeNumber, pcn }, notification.decoded); expect(splitsSyncTask.execute).toBeCalledTimes(1); - expect(splitsSyncTask.execute.mock.calls[0]).toEqual([true, undefined, {payload: notification.decoded, changeNumber }]); + expect(splitsSyncTask.execute.mock.calls[0]).toEqual([true, undefined, { payload: notification.decoded, changeNumber }]); }); }); diff --git a/src/sync/streaming/UpdateWorkers/types.ts b/src/sync/streaming/UpdateWorkers/types.ts index cd899aa9..6e3066b7 100644 --- a/src/sync/streaming/UpdateWorkers/types.ts +++ b/src/sync/streaming/UpdateWorkers/types.ts @@ -1,4 +1,4 @@ -export interface IUpdateWorker { +export interface IUpdateWorker { stop(): void // clear scheduled tasks (backoff) - put(...args: any[]): void // handle new update event + put(...args: T): void // handle new update event } diff --git a/src/sync/streaming/__tests__/parseUtils.spec.ts b/src/sync/streaming/__tests__/parseUtils.spec.ts index a66487c1..a12a0a4e 100644 --- a/src/sync/streaming/__tests__/parseUtils.spec.ts +++ b/src/sync/streaming/__tests__/parseUtils.spec.ts @@ -1,8 +1,7 @@ import { hash64 } from '../../../utils/murmur3/murmur3_64'; import { keylists, bitmaps, splitNotifications } from './dataMocks'; -import { parseKeyList, parseBitmap, isInBitmap, parseFFUpdatePayload } from '../parseUtils'; -import { _Set } from '../../../utils/lang/sets'; +import { parseKeyList, parseBitmap, isInBitmap, parseFFUpdatePayload, getDelay } from '../parseUtils'; test('parseKeyList', () => { keylists.forEach(keylist => { @@ -10,8 +9,8 @@ test('parseKeyList', () => { expect(parseKeyList(keyListDataCompressed, compression)).toEqual(keyListData); // decompress KeyList - const added = new _Set(keyListData.a); - const removed = new _Set(keyListData.r); + const added = new Set(keyListData.a); + const removed = new Set(keyListData.r); addedUserKeys.forEach(userKey => { const hash = hash64(userKey); @@ -60,3 +59,16 @@ test('split notification - parseKeyList', () => { }); }); + +test('getDelay', () => { + // if h === 0, return 0 (immediate, no delay) + expect(getDelay({ i: 300, h: 0, s: 1 }, 'anything')).toBe(0); + + // if h !== 0, calculate delay with provided hash, seed and interval + expect(getDelay({ i: 300, h: 1, s: 0 }, 'nicolas@split.io')).toBe(241); + expect(getDelay({ i: 60000, h: 1, s: 1 }, 'emi@split.io')).toBe(14389); + expect(getDelay({ i: 60000, h: 1, s: 0 }, 'emi@split.io')).toBe(24593); + + // if i, h and s are not provided, use defaults + expect(getDelay({}, 'emi@split.io')).toBe(24593); +}); diff --git a/src/sync/streaming/__tests__/pushManager.spec.ts b/src/sync/streaming/__tests__/pushManager.spec.ts index fb5fc39d..7a08bc9b 100644 --- a/src/sync/streaming/__tests__/pushManager.spec.ts +++ b/src/sync/streaming/__tests__/pushManager.spec.ts @@ -33,7 +33,7 @@ test('pushManagerFactory returns undefined if EventSource is not available', () describe('pushManager in client-side', () => { - test('does not connect to streaming if it is stopped inmediatelly after being started', async () => { + test('does not connect to streaming if it is stopped immediately after being started', async () => { const fetchAuthMock = jest.fn(); const pushManager = pushManagerFactory({ // @ts-ignore @@ -122,7 +122,7 @@ describe('pushManager in client-side', () => { describe('pushManager in server-side', () => { - test('does not connect to streaming if it is stopped inmediatelly after being started', async () => { + test('does not connect to streaming if it is stopped immediately after being started', async () => { const fetchAuthMock = jest.fn(); const pushManager = pushManagerFactory({ // @ts-ignore diff --git a/src/sync/streaming/constants.ts b/src/sync/streaming/constants.ts index 8afb41a6..ed958ee7 100644 --- a/src/sync/streaming/constants.ts +++ b/src/sync/streaming/constants.ts @@ -25,8 +25,8 @@ export const PUSH_SUBSYSTEM_UP = 'PUSH_SUBSYSTEM_UP'; export const PUSH_SUBSYSTEM_DOWN = 'PUSH_SUBSYSTEM_DOWN'; // Update-type push notifications, handled by NotificationProcessor -export const MY_SEGMENTS_UPDATE = 'MY_SEGMENTS_UPDATE'; -export const MY_SEGMENTS_UPDATE_V2 = 'MY_SEGMENTS_UPDATE_V2'; +export const MEMBERSHIPS_MS_UPDATE = 'MEMBERSHIPS_MS_UPDATE'; +export const MEMBERSHIPS_LS_UPDATE = 'MEMBERSHIPS_LS_UPDATE'; export const SEGMENT_UPDATE = 'SEGMENT_UPDATE'; export const SPLIT_KILL = 'SPLIT_KILL'; export const SPLIT_UPDATE = 'SPLIT_UPDATE'; diff --git a/src/sync/streaming/parseUtils.ts b/src/sync/streaming/parseUtils.ts index 83fca519..97fde935 100644 --- a/src/sync/streaming/parseUtils.ts +++ b/src/sync/streaming/parseUtils.ts @@ -1,6 +1,8 @@ import { algorithms } from '../../utils/decompress'; import { decodeFromBase64 } from '../../utils/base64'; -import { Compression, KeyList } from './SSEHandler/types'; +import { hash } from '../../utils/murmur3/murmur3'; +import { Compression, IMembershipMSUpdateData, KeyList } from './SSEHandler/types'; +import { ISplit } from '../../dtos/types'; const GZIP = 1; const ZLIB = 2; @@ -11,16 +13,15 @@ function Uint8ArrayToString(myUint8Arr: Uint8Array) { // @ts-ignore function StringToUint8Array(myString: string) { const charCodes = myString.split('').map((e) => e.charCodeAt(0)); - // eslint-disable-next-line compat/compat return new Uint8Array(charCodes); } /** * Decode and decompress 'data' with 'compression' algorithm * - * @param {string} data - * @param {number} compression 1 GZIP, 2 ZLIB - * @returns {Uint8Array} + * @param data - base64 encoded string + * @param compression - 1 GZIP, 2 ZLIB + * @returns * @throws if data string cannot be decoded, decompressed or the provided compression value is invalid (not 1 or 2) */ function decompress(data: string, compression: Compression) { @@ -36,13 +37,13 @@ function decompress(data: string, compression: Compression) { /** * Decode, decompress and parse the provided 'data' into a KeyList object * - * @param {string} data - * @param {number} compression - * @param {boolean} avoidPrecisionLoss true as default, set it as false if dont need to avoid precission loss - * @returns {{a?: string[], r?: string[] }} + * @param data - base64 encoded string + * @param compression - 1 GZIP, 2 ZLIB + * @param avoidPrecisionLoss - true as default, set it as false if dont need to avoid precission loss + * @returns keyList * @throws if data string cannot be decoded, decompressed or parsed */ -export function parseKeyList(data: string, compression: Compression, avoidPrecisionLoss: boolean = true): KeyList { +export function parseKeyList(data: string, compression: Compression, avoidPrecisionLoss = true): KeyList { const binKeyList = decompress(data, compression); let strKeyList = Uint8ArrayToString(binKeyList); // replace numbers to strings, to avoid losing precision @@ -53,9 +54,9 @@ export function parseKeyList(data: string, compression: Compression, avoidPrecis /** * Decode, decompress and parse the provided 'data' into a Bitmap object * - * @param {string} data - * @param {number} compression - * @returns {Uint8Array} + * @param data - base64 encoded string + * @param compression - 1 GZIP, 2 ZLIB + * @returns Bitmap * @throws if data string cannot be decoded or decompressed */ export function parseBitmap(data: string, compression: Compression) { @@ -65,9 +66,9 @@ export function parseBitmap(data: string, compression: Compression) { /** * Check if the 'bitmap' bit at 'hash64hex' position is 1 * - * @param {Uint8Array} bitmap - * @param {string} hash64hex 16-chars string, representing a number in hexa - * @returns {boolean} + * @param bitmap - Uint8Array bitmap + * @param hash64hex - 16-chars string, representing a number in hexa + * @returns whether the provided 'hash64hex' index is set in the bitmap */ export function isInBitmap(bitmap: Uint8Array, hash64hex: string) { // using the lowest 32 bits as index, to avoid losing precision when converting to number @@ -80,14 +81,20 @@ export function isInBitmap(bitmap: Uint8Array, hash64hex: string) { /** * Parse feature flags notifications for instant feature flag updates - * - * @param {ISplitUpdateData} data - * @returns {KeyList} */ -export function parseFFUpdatePayload(compression: Compression, data: string): KeyList | undefined { - const avoidPrecisionLoss = false; - if (compression > 0) - return parseKeyList(data, compression, avoidPrecisionLoss); - else - return JSON.parse(decodeFromBase64(data)); +export function parseFFUpdatePayload(compression: Compression, data: string): ISplit | undefined { + return compression > 0 ? + parseKeyList(data, compression, false) : + JSON.parse(decodeFromBase64(data)); +} + +const DEFAULT_MAX_INTERVAL = 60000; + +export function getDelay(parsedData: Pick, matchingKey: string) { + if (parsedData.h === 0) return 0; + + const interval = parsedData.i || DEFAULT_MAX_INTERVAL; + const seed = parsedData.s || 0; + + return hash(matchingKey, seed) % interval; } diff --git a/src/sync/streaming/pushManager.ts b/src/sync/streaming/pushManager.ts index 33819304..d089a167 100644 --- a/src/sync/streaming/pushManager.ts +++ b/src/sync/streaming/pushManager.ts @@ -11,16 +11,14 @@ import { authenticateFactory, hashUserKey } from './AuthClient'; import { forOwn } from '../../utils/lang'; import { SSEClient } from './SSEClient'; import { getMatching } from '../../utils/key'; -import { MY_SEGMENTS_UPDATE, MY_SEGMENTS_UPDATE_V2, PUSH_NONRETRYABLE_ERROR, PUSH_SUBSYSTEM_DOWN, SECONDS_BEFORE_EXPIRATION, SEGMENT_UPDATE, SPLIT_KILL, SPLIT_UPDATE, PUSH_RETRYABLE_ERROR, PUSH_SUBSYSTEM_UP, ControlType } from './constants'; -import { STREAMING_FALLBACK, STREAMING_REFRESH_TOKEN, STREAMING_CONNECTING, STREAMING_DISABLED, ERROR_STREAMING_AUTH, STREAMING_DISCONNECTING, STREAMING_RECONNECT, STREAMING_PARSING_MY_SEGMENTS_UPDATE_V2, STREAMING_PARSING_SPLIT_UPDATE } from '../../logger/constants'; -import { KeyList, UpdateStrategy } from './SSEHandler/types'; -import { isInBitmap, parseBitmap, parseFFUpdatePayload, parseKeyList } from './parseUtils'; -import { ISet, _Set } from '../../utils/lang/sets'; +import { MEMBERSHIPS_MS_UPDATE, MEMBERSHIPS_LS_UPDATE, PUSH_NONRETRYABLE_ERROR, PUSH_SUBSYSTEM_DOWN, SECONDS_BEFORE_EXPIRATION, SEGMENT_UPDATE, SPLIT_KILL, SPLIT_UPDATE, PUSH_RETRYABLE_ERROR, PUSH_SUBSYSTEM_UP, ControlType } from './constants'; +import { STREAMING_FALLBACK, STREAMING_REFRESH_TOKEN, STREAMING_CONNECTING, STREAMING_DISABLED, ERROR_STREAMING_AUTH, STREAMING_DISCONNECTING, STREAMING_RECONNECT, STREAMING_PARSING_MEMBERSHIPS_UPDATE, STREAMING_PARSING_SPLIT_UPDATE } from '../../logger/constants'; +import { IMembershipMSUpdateData, IMembershipLSUpdateData, KeyList, UpdateStrategy } from './SSEHandler/types'; +import { getDelay, isInBitmap, parseBitmap, parseFFUpdatePayload, parseKeyList } from './parseUtils'; import { Hash64, hash64 } from '../../utils/murmur3/murmur3_64'; import { IAuthTokenPushEnabled } from './AuthClient/types'; import { TOKEN_REFRESH, AUTH_REJECTION } from '../../utils/constants'; import { ISdkFactoryContextSync } from '../../sdkFactory/types'; -import { IUpdateWorker } from './UpdateWorkers/types'; /** * PushManager factory: @@ -60,11 +58,11 @@ export function pushManagerFactory( // For server-side we pass the segmentsSyncTask, used by SplitsUpdateWorker to fetch new segments const splitsUpdateWorker = SplitsUpdateWorker(log, storage.splits, pollingManager.splitsSyncTask, readiness.splits, telemetryTracker, userKey ? undefined : pollingManager.segmentsSyncTask as ISegmentsSyncTask); - // [Only for client-side] map of hashes to user keys, to dispatch MY_SEGMENTS_UPDATE events to the corresponding MySegmentsUpdateWorker + // [Only for client-side] map of hashes to user keys, to dispatch membership update events to the corresponding MySegmentsUpdateWorker const userKeyHashes: Record = {}; // [Only for client-side] map of user keys to their corresponding hash64 and MySegmentsUpdateWorkers. - // Hash64 is used to process MY_SEGMENTS_UPDATE_V2 events and dispatch actions to the corresponding MySegmentsUpdateWorker. - const clients: Record = {}; + // Hash64 is used to process membership update events and dispatch actions to the corresponding MySegmentsUpdateWorker. + const clients: Record }> = {}; // [Only for client-side] variable to flag that a new client was added. It is needed to reconnect streaming. let connectForNewClient = false; @@ -236,76 +234,75 @@ export function pushManagerFactory( splitsUpdateWorker.put(parsedData); }); - if (userKey) { - pushEmitter.on(MY_SEGMENTS_UPDATE, function handleMySegmentsUpdate(parsedData, channel) { - const userKeyHash = channel.split('_')[2]; - const userKey = userKeyHashes[userKeyHash]; - if (userKey && clients[userKey]) { // check existence since it can be undefined if client has been destroyed - clients[userKey].worker.put( - parsedData.changeNumber, - parsedData.includesPayload ? parsedData.segmentList ? parsedData.segmentList : [] : undefined); - } - }); - pushEmitter.on(MY_SEGMENTS_UPDATE_V2, function handleMySegmentsUpdate(parsedData) { - switch (parsedData.u) { - case UpdateStrategy.BoundedFetchRequest: { - let bitmap: Uint8Array; - try { - bitmap = parseBitmap(parsedData.d, parsedData.c); - } catch (e) { - log.warn(STREAMING_PARSING_MY_SEGMENTS_UPDATE_V2, ['BoundedFetchRequest', e]); - break; - } - - forOwn(clients, ({ hash64, worker }) => { - if (isInBitmap(bitmap, hash64.hex)) { - worker.put(parsedData.changeNumber); // fetch mySegments - } - }); - return; + function handleMySegmentsUpdate(parsedData: IMembershipMSUpdateData | IMembershipLSUpdateData) { + switch (parsedData.u) { + case UpdateStrategy.BoundedFetchRequest: { + let bitmap: Uint8Array; + try { + bitmap = parseBitmap(parsedData.d!, parsedData.c!); + } catch (e) { + log.warn(STREAMING_PARSING_MEMBERSHIPS_UPDATE, ['BoundedFetchRequest', e]); + break; } - case UpdateStrategy.KeyList: { - let keyList: KeyList, added: ISet, removed: ISet; - try { - keyList = parseKeyList(parsedData.d, parsedData.c); - added = new _Set(keyList.a); - removed = new _Set(keyList.r); - } catch (e) { - log.warn(STREAMING_PARSING_MY_SEGMENTS_UPDATE_V2, ['KeyList', e]); - break; + + forOwn(clients, ({ hash64, worker }, matchingKey) => { + if (isInBitmap(bitmap, hash64.hex)) { + worker.put(parsedData, undefined, getDelay(parsedData, matchingKey)); } + }); + return; + } + case UpdateStrategy.KeyList: { + let keyList: KeyList, added: Set, removed: Set; + try { + keyList = parseKeyList(parsedData.d!, parsedData.c!); + added = new Set(keyList.a); + removed = new Set(keyList.r); + } catch (e) { + log.warn(STREAMING_PARSING_MEMBERSHIPS_UPDATE, ['KeyList', e]); + break; + } - forOwn(clients, ({ hash64, worker }) => { - const add = added.has(hash64.dec) ? true : removed.has(hash64.dec) ? false : undefined; - if (add !== undefined) { - worker.put(parsedData.changeNumber, { - name: parsedData.segmentName, - add - }); - } - }); - return; + if (!parsedData.n || !parsedData.n.length) { + log.warn(STREAMING_PARSING_MEMBERSHIPS_UPDATE, ['KeyList', 'No segment name was provided']); + break; } - case UpdateStrategy.SegmentRemoval: - if (!parsedData.segmentName) { - log.warn(STREAMING_PARSING_MY_SEGMENTS_UPDATE_V2, ['SegmentRemoval', 'No segment name was provided']); - break; - } - forOwn(clients, ({ worker }) => - worker.put(parsedData.changeNumber, { - name: parsedData.segmentName, - add: false - }) - ); - return; + forOwn(clients, ({ hash64, worker }) => { + const add = added.has(hash64.dec) ? true : removed.has(hash64.dec) ? false : undefined; + if (add !== undefined) { + worker.put(parsedData, { + added: add ? [parsedData.n![0]] : [], + removed: add ? [] : [parsedData.n![0]] + }); + } + }); + return; } + case UpdateStrategy.SegmentRemoval: + if (!parsedData.n || !parsedData.n.length) { + log.warn(STREAMING_PARSING_MEMBERSHIPS_UPDATE, ['SegmentRemoval', 'No segment name was provided']); + break; + } - // `UpdateStrategy.UnboundedFetchRequest` and fallbacks of other cases - forOwn(clients, ({ worker }) => { - worker.put(parsedData.changeNumber); - }); + forOwn(clients, ({ worker }) => { + worker.put(parsedData, { + added: [], + removed: parsedData.n! + }); + }); + return; + } + + // `UpdateStrategy.UnboundedFetchRequest` and fallbacks of other cases + forOwn(clients, ({ worker }, matchingKey) => { + worker.put(parsedData, undefined, getDelay(parsedData, matchingKey)); }); + } + + if (userKey) { + pushEmitter.on(MEMBERSHIPS_MS_UPDATE, handleMySegmentsUpdate); + pushEmitter.on(MEMBERSHIPS_LS_UPDATE, handleMySegmentsUpdate); } else { pushEmitter.on(SEGMENT_UPDATE, segmentsUpdateWorker!.put); } @@ -328,7 +325,7 @@ export function pushManagerFactory( if (disabled || disconnected === false) return; disconnected = false; - if (userKey) this.add(userKey, pollingManager.segmentsSyncTask as IMySegmentsSyncTask); // client-side + if (userKey) this.add(userKey, pollingManager.segmentsSyncTask); // client-side else setTimeout(connectPush); // server-side runs in next cycle as in client-side, for consistency with client-side }, @@ -343,7 +340,10 @@ export function pushManagerFactory( if (!userKeyHashes[hash]) { userKeyHashes[hash] = userKey; - clients[userKey] = { hash64: hash64(userKey), worker: MySegmentsUpdateWorker(mySegmentsSyncTask, telemetryTracker) }; + clients[userKey] = { + hash64: hash64(userKey), + worker: MySegmentsUpdateWorker(log, storage, mySegmentsSyncTask, telemetryTracker) + }; connectForNewClient = true; // we must reconnect on start, to listen the channel for the new user key // Reconnects in case of a new client. diff --git a/src/sync/streaming/types.ts b/src/sync/streaming/types.ts index 715220f1..ec80781e 100644 --- a/src/sync/streaming/types.ts +++ b/src/sync/streaming/types.ts @@ -1,7 +1,7 @@ -import { IMySegmentsUpdateData, IMySegmentsUpdateV2Data, ISegmentUpdateData, ISplitUpdateData, ISplitKillData } from './SSEHandler/types'; +import { IMembershipMSUpdateData, IMembershipLSUpdateData, ISegmentUpdateData, ISplitUpdateData, ISplitKillData, INotificationData } from './SSEHandler/types'; import { ITask } from '../types'; import { IMySegmentsSyncTask } from '../polling/types'; -import { IEventEmitter } from '../../types'; +import SplitIO from '../../../types/splitio'; import { ControlType } from './constants'; // Internal SDK events, subscribed by SyncManager and PushManager @@ -11,8 +11,8 @@ export type PUSH_NONRETRYABLE_ERROR = 'PUSH_NONRETRYABLE_ERROR' export type PUSH_RETRYABLE_ERROR = 'PUSH_RETRYABLE_ERROR' // Update-type push notifications, handled by NotificationProcessor -export type MY_SEGMENTS_UPDATE = 'MY_SEGMENTS_UPDATE'; -export type MY_SEGMENTS_UPDATE_V2 = 'MY_SEGMENTS_UPDATE_V2'; +export type MEMBERSHIPS_MS_UPDATE = 'MEMBERSHIPS_MS_UPDATE'; +export type MEMBERSHIPS_LS_UPDATE = 'MEMBERSHIPS_LS_UPDATE'; export type SEGMENT_UPDATE = 'SEGMENT_UPDATE'; export type SPLIT_KILL = 'SPLIT_KILL'; export type SPLIT_UPDATE = 'SPLIT_UPDATE'; @@ -21,23 +21,23 @@ export type SPLIT_UPDATE = 'SPLIT_UPDATE'; export type CONTROL = 'CONTROL'; export type OCCUPANCY = 'OCCUPANCY'; -export type IPushEvent = PUSH_SUBSYSTEM_UP | PUSH_SUBSYSTEM_DOWN | PUSH_NONRETRYABLE_ERROR | PUSH_RETRYABLE_ERROR | MY_SEGMENTS_UPDATE | MY_SEGMENTS_UPDATE_V2 | SEGMENT_UPDATE | SPLIT_UPDATE | SPLIT_KILL | ControlType.STREAMING_RESET +export type IPushEvent = PUSH_SUBSYSTEM_UP | PUSH_SUBSYSTEM_DOWN | PUSH_NONRETRYABLE_ERROR | PUSH_RETRYABLE_ERROR | MEMBERSHIPS_MS_UPDATE | MEMBERSHIPS_LS_UPDATE | SEGMENT_UPDATE | SPLIT_UPDATE | SPLIT_KILL | ControlType.STREAMING_RESET type IParsedData = - T extends MY_SEGMENTS_UPDATE ? IMySegmentsUpdateData : - T extends MY_SEGMENTS_UPDATE_V2 ? IMySegmentsUpdateV2Data : + T extends MEMBERSHIPS_MS_UPDATE ? IMembershipMSUpdateData : + T extends MEMBERSHIPS_LS_UPDATE ? IMembershipLSUpdateData : T extends SEGMENT_UPDATE ? ISegmentUpdateData : T extends SPLIT_UPDATE ? ISplitUpdateData : - T extends SPLIT_KILL ? ISplitKillData : undefined; + T extends SPLIT_KILL ? ISplitKillData : INotificationData; /** * EventEmitter used as Feedback Loop between the SyncManager and PushManager, * where the latter pushes messages and the former consumes it */ -export interface IPushEventEmitter extends IEventEmitter { - once(event: T, listener: (parsedData: IParsedData, channel: T extends MY_SEGMENTS_UPDATE ? string : undefined) => void): this; - on(event: T, listener: (parsedData: IParsedData, channel: T extends MY_SEGMENTS_UPDATE ? string : undefined) => void): this; - emit(event: T, parsedData?: IParsedData, channel?: T extends MY_SEGMENTS_UPDATE ? string : undefined): boolean; +export interface IPushEventEmitter extends SplitIO.IEventEmitter { + once(event: T, listener: (parsedData: IParsedData) => void): this; + on(event: T, listener: (parsedData: IParsedData) => void): this; + emit(event: T, parsedData?: IParsedData): boolean; } /** diff --git a/src/sync/submitters/__tests__/eventsSubmitter.spec.ts b/src/sync/submitters/__tests__/eventsSubmitter.spec.ts index d79d0c5e..3da0d1c8 100644 --- a/src/sync/submitters/__tests__/eventsSubmitter.spec.ts +++ b/src/sync/submitters/__tests__/eventsSubmitter.spec.ts @@ -31,7 +31,7 @@ describe('Events submitter', () => { eventsSubmitter.start(); expect(eventsSubmitter.isRunning()).toEqual(true); // Submitter should be flagged as running - expect(eventsSubmitter.isExecuting()).toEqual(false); // but not executed immediatelly if there is a push window + expect(eventsSubmitter.isExecuting()).toEqual(false); // but not executed immediately if there is a push window expect(eventsCacheMock.isEmpty).not.toBeCalled(); // If queue is full, submitter should be executed @@ -55,7 +55,7 @@ describe('Events submitter', () => { eventsSubmitter.start(); expect(eventsSubmitter.isRunning()).toEqual(true); // Submitter should be flagged as running - expect(eventsSubmitter.isExecuting()).toEqual(true); // and executes immediatelly if there isn't a push window + expect(eventsSubmitter.isExecuting()).toEqual(true); // and executes immediately if there isn't a push window expect(eventsCacheMock.isEmpty).toBeCalledTimes(1); // If queue is full, submitter is executed again after current execution is resolved diff --git a/src/sync/submitters/__tests__/telemetrySubmitter.spec.ts b/src/sync/submitters/__tests__/telemetrySubmitter.spec.ts index 000dc0a7..57a368c5 100644 --- a/src/sync/submitters/__tests__/telemetrySubmitter.spec.ts +++ b/src/sync/submitters/__tests__/telemetrySubmitter.spec.ts @@ -39,7 +39,7 @@ describe('Telemetry submitter', () => { telemetrySubmitter.start(); expect(telemetrySubmitter.isRunning()).toEqual(true); // Submitter should be flagged as running - expect(telemetrySubmitter.isExecuting()).toEqual(false); // but not executed immediatelly (first push window) + expect(telemetrySubmitter.isExecuting()).toEqual(false); // but not executed immediately (first push window) expect(popSpy).toBeCalledTimes(0); // Await first periodic execution @@ -48,7 +48,7 @@ describe('Telemetry submitter', () => { expect(isEmptySpy).toBeCalledTimes(1); expect(popSpy).toBeCalledTimes(1); expect(postMetricsUsage).toBeCalledWith(JSON.stringify({ - lS: {}, mL: {}, mE: {}, hE: {}, hL: {}, tR: 0, aR: 0, iQ: 0, iDe: 0, iDr: 0, spC: 0, seC: 0, skC: 0, eQ: 0, eD: 0, sE: [], t: ['tag1'], ufs:{ sp: 0, ms: 0 } + lS: {}, mL: {}, mE: {}, hE: {}, hL: {}, tR: 0, aR: 0, iQ: 0, iDe: 0, iDr: 0, spC: 0, seC: 0, skC: 0, eQ: 0, eD: 0, sE: [], t: ['tag1'], ufs: {} })); // Await second periodic execution diff --git a/src/sync/submitters/impressionsSubmitter.ts b/src/sync/submitters/impressionsSubmitter.ts index e54b5f1f..193e2703 100644 --- a/src/sync/submitters/impressionsSubmitter.ts +++ b/src/sync/submitters/impressionsSubmitter.ts @@ -1,5 +1,5 @@ import { groupBy, forOwn } from '../../utils/lang'; -import { ImpressionDTO } from '../../types'; +import SplitIO from '../../../types/splitio'; import { submitterFactory } from './submitter'; import { ImpressionsPayload } from './types'; import { SUBMITTERS_PUSH_FULL_QUEUE } from '../../logger/constants'; @@ -10,7 +10,7 @@ const DATA_NAME = 'impressions'; /** * Converts `impressions` data from cache into request payload. */ -export function fromImpressionsCollector(sendLabels: boolean, data: ImpressionDTO[]): ImpressionsPayload { +export function fromImpressionsCollector(sendLabels: boolean, data: SplitIO.ImpressionDTO[]): ImpressionsPayload { let groupedByFeature = groupBy(data, 'feature'); let dto: ImpressionsPayload = []; diff --git a/src/sync/submitters/telemetrySubmitter.ts b/src/sync/submitters/telemetrySubmitter.ts index a2289e08..7a2e2ee7 100644 --- a/src/sync/submitters/telemetrySubmitter.ts +++ b/src/sync/submitters/telemetrySubmitter.ts @@ -3,7 +3,8 @@ import { submitterFactory, firstPushWindowDecorator } from './submitter'; import { TelemetryConfigStatsPayload, TelemetryConfigStats } from './types'; import { CONSUMER_MODE, CONSUMER_ENUM, STANDALONE_MODE, CONSUMER_PARTIAL_MODE, STANDALONE_ENUM, CONSUMER_PARTIAL_ENUM, OPTIMIZED, DEBUG, NONE, DEBUG_ENUM, OPTIMIZED_ENUM, NONE_ENUM, CONSENT_GRANTED, CONSENT_DECLINED, CONSENT_UNKNOWN } from '../../utils/constants'; import { SDK_READY, SDK_READY_FROM_CACHE } from '../../readiness/constants'; -import { ConsentStatus, ISettings, SDKMode, SplitIO } from '../../types'; +import { ISettings } from '../../types'; +import SplitIO from '../../../types/splitio'; import { base } from '../../utils/settingsValidation'; import { usedKeysMap } from '../../utils/inputValidation/apiKey'; import { timer } from '../../utils/timeTracker/timer'; @@ -27,7 +28,7 @@ const USER_CONSENT_MAP = { [CONSENT_UNKNOWN]: 1, [CONSENT_GRANTED]: 2, [CONSENT_DECLINED]: 3 -} as Record; +} as Record; function getActiveFactories() { return Object.keys(usedKeysMap).length; @@ -51,7 +52,7 @@ function getTelemetryFlagSetsStats(splitFiltersValidation: ISplitFiltersValidati return { flagSetsTotal, flagSetsIgnored }; } -export function getTelemetryConfigStats(mode: SDKMode, storageType: string): TelemetryConfigStats { +export function getTelemetryConfigStats(mode: SplitIO.SDKMode, storageType: string): TelemetryConfigStats { return { oM: OPERATION_MODE_MAP[mode], // @ts-ignore lower case of storage type st: storageType.toLowerCase(), diff --git a/src/sync/submitters/types.ts b/src/sync/submitters/types.ts index 440f466a..f3b93c4d 100644 --- a/src/sync/submitters/types.ts +++ b/src/sync/submitters/types.ts @@ -1,7 +1,6 @@ /* eslint-disable no-use-before-define */ import { IMetadata } from '../../dtos/types'; -import { SplitIO } from '../../types'; -import { IMap } from '../../utils/lang/maps'; +import SplitIO from '../../../types/splitio'; import { ISyncTask } from '../types'; export type ImpressionsPayload = { @@ -88,11 +87,11 @@ export type StoredEventWithMetadata = { e: SplitIO.EventData } -export type MultiMethodLatencies = IMap +export type MultiMethodLatencies = Map -export type MultiMethodExceptions = IMap +export type MultiMethodExceptions = Map -export type MultiConfigs = IMap +export type MultiConfigs = Map /** * Telemetry usage stats @@ -103,7 +102,7 @@ export type DROPPED = 1; export type DEDUPED = 2; export type ImpressionDataType = QUEUED | DROPPED | DEDUPED export type EventDataType = QUEUED | DROPPED; -export type UpdatesFromSSEEnum = SPLITS | MY_SEGMENT; +export type UpdatesFromSSEEnum = SPLITS | MEMBERSHIPS; export type SPLITS = 'sp'; export type IMPRESSIONS = 'im'; @@ -112,8 +111,8 @@ export type EVENTS = 'ev'; export type TELEMETRY = 'te'; export type TOKEN = 'to'; export type SEGMENT = 'se'; -export type MY_SEGMENT = 'ms'; -export type OperationType = SPLITS | IMPRESSIONS | IMPRESSIONS_COUNT | EVENTS | TELEMETRY | TOKEN | SEGMENT | MY_SEGMENT; +export type MEMBERSHIPS = 'ms'; +export type OperationType = SPLITS | IMPRESSIONS | IMPRESSIONS_COUNT | EVENTS | TELEMETRY | TOKEN | SEGMENT | MEMBERSHIPS; export type LastSync = Partial> export type HttpErrors = Partial> @@ -158,8 +157,9 @@ export type TelemetryUsageStats = { // amount of instant updates that we are doing by avoiding fetching to Split servers export type UpdatesFromSSE = { - sp: number, // splits + sp?: number, // splits ms?: number, // my segments + mls?: number // my large segments } // 'metrics/usage' JSON request body @@ -175,12 +175,14 @@ export type TelemetryUsageStatsPayload = TelemetryUsageStats & { spC?: number, // splitCount seC?: number, // segmentCount skC?: number, // segmentKeyCount + lsC?: number, // largeSegmentCount + lskC?: number, // largeSegmentKeyCount sL?: number, // sessionLengthMs eQ: number, // eventsQueued eD: number, // eventsDropped sE: Array, // streamingEvents t?: Array, // tags - ufs?: UpdatesFromSSE, //UpdatesFromSSE + ufs?: UpdatesFromSSE, // instant updates } /** diff --git a/src/sync/syncManagerOnline.ts b/src/sync/syncManagerOnline.ts index b6407630..071e9ea3 100644 --- a/src/sync/syncManagerOnline.ts +++ b/src/sync/syncManagerOnline.ts @@ -14,9 +14,9 @@ import { ISdkFactoryContextSync } from '../sdkFactory/types'; * Online SyncManager factory. * Can be used for server-side API, and client-side API with or without multiple clients. * - * @param pollingManagerFactory allows to specialize the SyncManager for server-side or client-side API by passing + * @param pollingManagerFactory - allows to specialize the SyncManager for server-side or client-side API by passing * `pollingManagerSSFactory` or `pollingManagerCSFactory` respectively. - * @param pushManagerFactory optional to build a SyncManager with or without streaming support + * @param pushManagerFactory - optional to build a SyncManager with or without streaming support */ export function syncManagerOnlineFactory( pollingManagerFactory?: (params: ISdkFactoryContextSync) => IPollingManager, @@ -143,27 +143,27 @@ export function syncManagerOnlineFactory( const mySegmentsSyncTask = (pollingManager as IPollingManagerCS).add(matchingKey, readinessManager, storage); - return { - isRunning: mySegmentsSyncTask.isRunning, - start() { - if (syncEnabled) { - if (pushManager) { - if (pollingManager!.isRunning()) { - // if doing polling, we must start the periodic fetch of data - if (storage.splits.usesSegments()) mySegmentsSyncTask.start(); - } else { - // if not polling, we must execute the sync task for the initial fetch - // of segments since `syncAll` was already executed when starting the main client - mySegmentsSyncTask.execute(); - } - pushManager.add(matchingKey, mySegmentsSyncTask); - } else { + if (running) { + if (syncEnabled) { + if (pushManager) { + if (pollingManager!.isRunning()) { + // if doing polling, we must start the periodic fetch of data if (storage.splits.usesSegments()) mySegmentsSyncTask.start(); + } else { + // if not polling, we must execute the sync task for the initial fetch + // of segments since `syncAll` was already executed when starting the main client + mySegmentsSyncTask.execute(); } + pushManager.add(matchingKey, mySegmentsSyncTask); } else { - if (!readinessManager.isReady()) mySegmentsSyncTask.execute(); + if (storage.splits.usesSegments()) mySegmentsSyncTask.start(); } - }, + } else { + if (!readinessManager.isReady()) mySegmentsSyncTask.execute(); + } + } + + return { stop() { // check in case `client.destroy()` has been invoked more than once for the same client const mySegmentsSyncTask = (pollingManager as IPollingManagerCS).get(matchingKey); diff --git a/src/sync/syncTask.ts b/src/sync/syncTask.ts index 035a9c59..5f22f6c0 100644 --- a/src/sync/syncTask.ts +++ b/src/sync/syncTask.ts @@ -6,10 +6,10 @@ import { ISyncTask } from './types'; * Creates an object that handles the periodic execution of a given task via "start" and "stop" methods. * The task can be also executed by calling the "execute" method. Multiple calls run sequentially to avoid race conditions (e.g., submitters executed on SDK destroy or full queue, while periodic execution is pending). * - * @param log Logger instance. - * @param task Task to execute that returns a promise that NEVER REJECTS. Otherwise, periodic execution can result in Unhandled Promise Rejections. - * @param period Period in milliseconds to execute the task. - * @param taskName Optional task name for logging. + * @param log - Logger instance. + * @param task - Task to execute that returns a promise that NEVER REJECTS. Otherwise, periodic execution can result in Unhandled Promise Rejections. + * @param period - Period in milliseconds to execute the task. + * @param taskName - Optional task name for logging. * @returns A sync task that wraps the given task. */ export function syncTaskFactory(log: ILogger, task: (...args: Input) => Promise, period: number, taskName = 'task'): ISyncTask { diff --git a/src/sync/types.ts b/src/sync/types.ts index 81727ca9..956d3112 100644 --- a/src/sync/types.ts +++ b/src/sync/types.ts @@ -44,5 +44,5 @@ export interface ISyncManager extends ITask { } export interface ISyncManagerCS extends ISyncManager { - shared(matchingKey: string, readinessManager: IReadinessManager, storage: IStorageSync): ISyncManager | undefined + shared(matchingKey: string, readinessManager: IReadinessManager, storage: IStorageSync): Pick | undefined } diff --git a/src/trackers/__tests__/eventTracker.spec.ts b/src/trackers/__tests__/eventTracker.spec.ts index 18dba632..95ce3e33 100644 --- a/src/trackers/__tests__/eventTracker.spec.ts +++ b/src/trackers/__tests__/eventTracker.spec.ts @@ -1,4 +1,4 @@ -import { SplitIO } from '../../types'; +import SplitIO from '../../../types/splitio'; import { QUEUED } from '../../utils/constants'; import { fullSettings } from '../../utils/settingsValidation/__tests__/settings.mocks'; import { eventTrackerFactory } from '../eventTracker'; @@ -29,13 +29,15 @@ const fakeEvent = { } }; +const fakeWhenInit = (cb: () => void) => cb(); + /* Tests */ describe('Event Tracker', () => { test('Tracker API', () => { expect(typeof eventTrackerFactory).toBe('function'); // The module should return a function which acts as a factory. - const instance = eventTrackerFactory(fullSettings, fakeEventsCache, fakeIntegrationsManager); + const instance = eventTrackerFactory(fullSettings, fakeEventsCache, fakeWhenInit, fakeIntegrationsManager); expect(typeof instance.track).toBe('function'); // The instance should implement the track method. }); @@ -51,7 +53,7 @@ describe('Event Tracker', () => { } }); // @ts-ignore - const tracker = eventTrackerFactory(fullSettings, fakeEventsCache, fakeIntegrationsManager, fakeTelemetryCache); + const tracker = eventTrackerFactory(fullSettings, fakeEventsCache, fakeWhenInit, fakeIntegrationsManager, fakeTelemetryCache); const result1 = tracker.track(fakeEvent, 1); expect(fakeEventsCache.track.mock.calls[0]).toEqual([fakeEvent, 1]); // Should be present in the event cache. @@ -92,7 +94,7 @@ describe('Event Tracker', () => { const settings = { ...fullSettings }; const fakeEventsCache = { track: jest.fn(() => true) }; - const tracker = eventTrackerFactory(settings, fakeEventsCache); + const tracker = eventTrackerFactory(settings, fakeEventsCache, fakeWhenInit); expect(tracker.track(fakeEvent)).toBe(true); expect(fakeEventsCache.track).toBeCalledTimes(1); // event should be tracked if userConsent is undefined diff --git a/src/trackers/__tests__/impressionsTracker.spec.ts b/src/trackers/__tests__/impressionsTracker.spec.ts index d5c40ee7..08ec9f71 100644 --- a/src/trackers/__tests__/impressionsTracker.spec.ts +++ b/src/trackers/__tests__/impressionsTracker.spec.ts @@ -2,7 +2,7 @@ import { impressionsTrackerFactory } from '../impressionsTracker'; import { ImpressionCountsCacheInMemory } from '../../storages/inMemory/ImpressionCountsCacheInMemory'; import { impressionObserverSSFactory } from '../impressionObserver/impressionObserverSS'; import { impressionObserverCSFactory } from '../impressionObserver/impressionObserverCS'; -import { ImpressionDTO } from '../../types'; +import SplitIO from '../../../types/splitio'; import { fullSettings } from '../../utils/settingsValidation/__tests__/settings.mocks'; import { strategyDebugFactory } from '../strategy/strategyDebug'; import { strategyOptimizedFactory } from '../strategy/strategyOptimized'; @@ -34,6 +34,7 @@ const fakeSettingsWithListener = { ...fakeSettings, impressionListener: fakeListener }; +const fakeWhenInit = (cb: () => void) => cb(); /* Tests */ @@ -50,22 +51,22 @@ describe('Impressions Tracker', () => { test('Tracker API', () => { expect(typeof impressionsTrackerFactory).toBe('function'); // The module should return a function which acts as a factory. - const instance = impressionsTrackerFactory(fakeSettings, fakeImpressionsCache, strategy); + const instance = impressionsTrackerFactory(fakeSettings, fakeImpressionsCache, strategy, fakeWhenInit); expect(typeof instance.track).toBe('function'); // The instance should implement the track method which will actually track queued impressions. }); test('Should be able to track impressions (in DEBUG mode without Previous Time).', () => { - const tracker = impressionsTrackerFactory(fakeSettings, fakeImpressionsCache, strategy); + const tracker = impressionsTrackerFactory(fakeSettings, fakeImpressionsCache, strategy, fakeWhenInit); const imp1 = { feature: '10', - } as ImpressionDTO; + } as SplitIO.ImpressionDTO; const imp2 = { feature: '20', - } as ImpressionDTO; + } as SplitIO.ImpressionDTO; const imp3 = { feature: '30', - } as ImpressionDTO; + } as SplitIO.ImpressionDTO; expect(fakeImpressionsCache.track).not.toBeCalled(); // cache method should not be called by just creating a tracker @@ -75,14 +76,14 @@ describe('Impressions Tracker', () => { }); test('Tracked impressions should be sent to impression listener and integration manager when we invoke .track()', (done) => { - const tracker = impressionsTrackerFactory(fakeSettingsWithListener, fakeImpressionsCache, strategy, fakeIntegrationsManager); + const tracker = impressionsTrackerFactory(fakeSettingsWithListener, fakeImpressionsCache, strategy, fakeWhenInit, fakeIntegrationsManager); const fakeImpression = { feature: 'impression' - } as ImpressionDTO; + } as SplitIO.ImpressionDTO; const fakeImpression2 = { feature: 'impression_2' - } as ImpressionDTO; + } as SplitIO.ImpressionDTO; const fakeAttributes = { fake: 'attributes' }; @@ -126,7 +127,7 @@ describe('Impressions Tracker', () => { time: 0, bucketingKey: 'impr_bucketing_2', label: 'default rule' - } as ImpressionDTO; + } as SplitIO.ImpressionDTO; const impression2 = { feature: 'qc_team_2', keyName: 'marcio@split.io', @@ -134,7 +135,7 @@ describe('Impressions Tracker', () => { time: 0, bucketingKey: 'impr_bucketing_2', label: 'default rule' - } as ImpressionDTO; + } as SplitIO.ImpressionDTO; const impression3 = { feature: 'qc_team', keyName: 'marcio@split.io', @@ -142,15 +143,15 @@ describe('Impressions Tracker', () => { time: 0, bucketingKey: 'impr_bucketing_2', label: 'default rule' - } as ImpressionDTO; + } as SplitIO.ImpressionDTO; test('Should track 3 impressions with Previous Time.', () => { impression.time = impression2.time = 123456789; impression3.time = 1234567891; const trackers = [ - impressionsTrackerFactory(fakeSettings, fakeImpressionsCache, strategyDebugFactory(impressionObserverSSFactory()), undefined), - impressionsTrackerFactory(fakeSettings, fakeImpressionsCache, strategyDebugFactory(impressionObserverCSFactory()), undefined) + impressionsTrackerFactory(fakeSettings, fakeImpressionsCache, strategyDebugFactory(impressionObserverSSFactory()), fakeWhenInit, undefined), + impressionsTrackerFactory(fakeSettings, fakeImpressionsCache, strategyDebugFactory(impressionObserverCSFactory()), fakeWhenInit, undefined) ]; expect(fakeImpressionsCache.track).not.toBeCalled(); // storage method should not be called until impressions are tracked. @@ -176,8 +177,8 @@ describe('Impressions Tracker', () => { impression2.time = Date.now(); impression3.time = Date.now(); - const impressionCountsCache = new ImpressionCountsCacheInMemory(); // @ts-ignore - const tracker = impressionsTrackerFactory(fakeSettings, fakeImpressionsCache, strategyOptimizedFactory(impressionObserverCSFactory(), impressionCountsCache), undefined, fakeTelemetryCache); + const impressionCountsCache = new ImpressionCountsCacheInMemory(); + const tracker = impressionsTrackerFactory(fakeSettings, fakeImpressionsCache, strategyOptimizedFactory(impressionObserverCSFactory(), impressionCountsCache), fakeWhenInit, undefined, fakeTelemetryCache as any); expect(fakeImpressionsCache.track).not.toBeCalled(); // cache method should not be called by just creating a tracker @@ -200,7 +201,7 @@ describe('Impressions Tracker', () => { test('Should track or not impressions depending on user consent status', () => { const settings = { ...fullSettings }; - const tracker = impressionsTrackerFactory(settings, fakeImpressionsCache, strategy); + const tracker = impressionsTrackerFactory(settings, fakeImpressionsCache, strategy, fakeWhenInit); tracker.track([impression]); expect(fakeImpressionsCache.track).toBeCalledTimes(1); // impression should be tracked if userConsent is undefined diff --git a/src/trackers/__tests__/uniqueKeysTracker.spec.ts b/src/trackers/__tests__/uniqueKeysTracker.spec.ts index e9dab3c5..1e577d4e 100644 --- a/src/trackers/__tests__/uniqueKeysTracker.spec.ts +++ b/src/trackers/__tests__/uniqueKeysTracker.spec.ts @@ -48,6 +48,8 @@ describe('Unique keys tracker', () => { const uniqueKeysTrackerWithRefresh = uniqueKeysTrackerFactory(loggerMock, fakeUniqueKeysCache, fakeFilter); + uniqueKeysTrackerWithRefresh.start(); + setTimeout(() => { expect(fakeFilter.clear).toBeCalledTimes(1); diff --git a/src/trackers/eventTracker.ts b/src/trackers/eventTracker.ts index cebf3950..335baf8d 100644 --- a/src/trackers/eventTracker.ts +++ b/src/trackers/eventTracker.ts @@ -2,7 +2,8 @@ import { objectAssign } from '../utils/lang/objectAssign'; import { thenable } from '../utils/promise/thenable'; import { IEventsCacheBase, ITelemetryCacheAsync, ITelemetryCacheSync } from '../storages/types'; import { IEventsHandler, IEventTracker } from './types'; -import { ISettings, SplitIO } from '../types'; +import { ISettings } from '../types'; +import SplitIO from '../../types/splitio'; import { EVENTS_TRACKER_SUCCESS, ERROR_EVENTS_TRACKER } from '../logger/constants'; import { CONSENT_DECLINED, DROPPED, QUEUED } from '../utils/constants'; import { isConsumerMode } from '../utils/settingsValidation/mode'; @@ -10,12 +11,13 @@ import { isConsumerMode } from '../utils/settingsValidation/mode'; /** * Event tracker stores events in cache and pass them to the integrations manager if provided. * - * @param eventsCache cache to save events - * @param integrationsManager optional event handler used for integrations + * @param eventsCache - cache to save events + * @param integrationsManager - optional event handler used for integrations */ export function eventTrackerFactory( settings: ISettings, eventsCache: IEventsCacheBase, + whenInit: (cb: () => void) => void, integrationsManager?: IEventsHandler, telemetryCache?: ITelemetryCacheSync | ITelemetryCacheAsync ): IEventTracker { @@ -31,14 +33,16 @@ export function eventTrackerFactory( if (tracked) { log.info(EVENTS_TRACKER_SUCCESS, [msg]); if (integrationsManager) { - // Wrap in a timeout because we don't want it to be blocking. - setTimeout(function () { - // copy of event, to avoid unexpected behaviour if modified by integrations - const eventDataCopy = objectAssign({}, eventData); - if (properties) eventDataCopy.properties = objectAssign({}, properties); - // integrationsManager does not throw errors (they are internally handled by each integration module) - integrationsManager.handleEvent(eventDataCopy); - }, 0); + whenInit(() => { + // Wrap in a timeout because we don't want it to be blocking. + setTimeout(() => { + // copy of event, to avoid unexpected behaviour if modified by integrations + const eventDataCopy = objectAssign({}, eventData); + if (properties) eventDataCopy.properties = objectAssign({}, properties); + // integrationsManager does not throw errors (they are internally handled by each integration module) + integrationsManager.handleEvent(eventDataCopy); + }); + }); } } else { log.error(ERROR_EVENTS_TRACKER, [msg]); diff --git a/src/trackers/impressionObserver/ImpressionObserver.ts b/src/trackers/impressionObserver/ImpressionObserver.ts index 377a4f08..b9765b47 100644 --- a/src/trackers/impressionObserver/ImpressionObserver.ts +++ b/src/trackers/impressionObserver/ImpressionObserver.ts @@ -1,17 +1,17 @@ -import { ImpressionDTO } from '../../types'; +import SplitIO from '../../../types/splitio'; import { LRUCache } from '../../utils/LRUCache'; import { IImpressionObserver } from './types'; export class ImpressionObserver implements IImpressionObserver { private cache: LRUCache; - private hasher: (impression: ImpressionDTO) => K; + private hasher: (impression: SplitIO.ImpressionDTO) => K; - constructor(size: number, hasher: (impression: ImpressionDTO) => K) { + constructor(size: number, hasher: (impression: SplitIO.ImpressionDTO) => K) { this.cache = new LRUCache(size); this.hasher = hasher; } - testAndSet(impression: ImpressionDTO) { + testAndSet(impression: SplitIO.ImpressionDTO) { const hash = this.hasher(impression); const previous = this.cache.get(hash); this.cache.set(hash, impression.time); diff --git a/src/trackers/impressionObserver/__tests__/testUtils.ts b/src/trackers/impressionObserver/__tests__/testUtils.ts index a41b9f24..3ede736c 100644 --- a/src/trackers/impressionObserver/__tests__/testUtils.ts +++ b/src/trackers/impressionObserver/__tests__/testUtils.ts @@ -1,6 +1,6 @@ -import { ImpressionDTO } from '../../../types'; +import SplitIO from '../../../../types/splitio'; -export function generateImpressions(count: number): ImpressionDTO[] { +export function generateImpressions(count: number): SplitIO.ImpressionDTO[] { const impressions = []; for (let i = 0; i < count; i++) { impressions.push({ diff --git a/src/trackers/impressionObserver/buildKey.ts b/src/trackers/impressionObserver/buildKey.ts index 8adedb82..8479e4ac 100644 --- a/src/trackers/impressionObserver/buildKey.ts +++ b/src/trackers/impressionObserver/buildKey.ts @@ -1,5 +1,5 @@ -import { ImpressionDTO } from '../../types'; +import SplitIO from '../../../types/splitio'; -export function buildKey(impression: ImpressionDTO) { +export function buildKey(impression: SplitIO.ImpressionDTO) { return `${impression.keyName}:${impression.feature}:${impression.treatment}:${impression.label}:${impression.changeNumber}`; } diff --git a/src/trackers/impressionObserver/impressionObserverCS.ts b/src/trackers/impressionObserver/impressionObserverCS.ts index 712d8738..de69455c 100644 --- a/src/trackers/impressionObserver/impressionObserverCS.ts +++ b/src/trackers/impressionObserver/impressionObserverCS.ts @@ -1,9 +1,9 @@ import { ImpressionObserver } from './ImpressionObserver'; import { hash } from '../../utils/murmur3/murmur3'; import { buildKey } from './buildKey'; -import { ImpressionDTO } from '../../types'; +import SplitIO from '../../../types/splitio'; -export function hashImpression32(impression: ImpressionDTO) { +export function hashImpression32(impression: SplitIO.ImpressionDTO) { return hash(buildKey(impression)); } diff --git a/src/trackers/impressionObserver/impressionObserverSS.ts b/src/trackers/impressionObserver/impressionObserverSS.ts index 7a81279f..23efeec4 100644 --- a/src/trackers/impressionObserver/impressionObserverSS.ts +++ b/src/trackers/impressionObserver/impressionObserverSS.ts @@ -1,9 +1,9 @@ import { ImpressionObserver } from './ImpressionObserver'; import { hash128 } from '../../utils/murmur3/murmur3_128_x86'; import { buildKey } from './buildKey'; -import { ImpressionDTO } from '../../types'; +import SplitIO from '../../../types/splitio'; -export function hashImpression128(impression: ImpressionDTO) { +export function hashImpression128(impression: SplitIO.ImpressionDTO) { return hash128(buildKey(impression)); } diff --git a/src/trackers/impressionObserver/types.ts b/src/trackers/impressionObserver/types.ts index 16aeafb3..8c4f2a33 100644 --- a/src/trackers/impressionObserver/types.ts +++ b/src/trackers/impressionObserver/types.ts @@ -1,5 +1,5 @@ -import { ImpressionDTO } from '../../types'; +import SplitIO from '../../../types/splitio'; export interface IImpressionObserver { - testAndSet(impression: ImpressionDTO): number | undefined + testAndSet(impression: SplitIO.ImpressionDTO): number | undefined } diff --git a/src/trackers/impressionsTracker.ts b/src/trackers/impressionsTracker.ts index 77b5f1f1..485d0694 100644 --- a/src/trackers/impressionsTracker.ts +++ b/src/trackers/impressionsTracker.ts @@ -2,23 +2,25 @@ import { objectAssign } from '../utils/lang/objectAssign'; import { thenable } from '../utils/promise/thenable'; import { IImpressionsCacheBase, ITelemetryCacheSync, ITelemetryCacheAsync } from '../storages/types'; import { IImpressionsHandler, IImpressionsTracker, IStrategy } from './types'; -import { SplitIO, ImpressionDTO, ISettings } from '../types'; +import { ISettings } from '../types'; import { IMPRESSIONS_TRACKER_SUCCESS, ERROR_IMPRESSIONS_TRACKER, ERROR_IMPRESSIONS_LISTENER } from '../logger/constants'; import { CONSENT_DECLINED, DEDUPED, QUEUED } from '../utils/constants'; +import SplitIO from '../../types/splitio'; /** * Impressions tracker stores impressions in cache and pass them to the listener and integrations manager if provided. * - * @param impressionsCache cache to save impressions - * @param metadata runtime metadata (ip, hostname and version) - * @param impressionListener optional impression listener - * @param integrationsManager optional integrations manager - * @param strategy strategy for impressions tracking. + * @param impressionsCache - cache to save impressions + * @param metadata - runtime metadata (ip, hostname and version) + * @param impressionListener - optional impression listener + * @param integrationsManager - optional integrations manager + * @param strategy - strategy for impressions tracking. */ export function impressionsTrackerFactory( settings: ISettings, impressionsCache: IImpressionsCacheBase, strategy: IStrategy, + whenInit: (cb: () => void) => void, integrationsManager?: IImpressionsHandler, telemetryCache?: ITelemetryCacheSync | ITelemetryCacheAsync, ): IImpressionsTracker { @@ -26,7 +28,7 @@ export function impressionsTrackerFactory( const { log, impressionListener, runtime: { ip, hostname }, version } = settings; return { - track(impressions: ImpressionDTO[], attributes?: SplitIO.Attributes) { + track(impressions: SplitIO.ImpressionDTO[], attributes?: SplitIO.Attributes) { if (settings.userConsent === CONSENT_DECLINED) return; const impressionsCount = impressions.length; @@ -66,17 +68,19 @@ export function impressionsTrackerFactory( sdkLanguageVersion: version }; - // Wrap in a timeout because we don't want it to be blocking. - setTimeout(function () { - // integrationsManager.handleImpression does not throw errors - if (integrationsManager) integrationsManager.handleImpression(impressionData); + whenInit(() => { + // Wrap in a timeout because we don't want it to be blocking. + setTimeout(() => { + // integrationsManager.handleImpression does not throw errors + if (integrationsManager) integrationsManager.handleImpression(impressionData); - try { // @ts-ignore. An exception on the listeners should not break the SDK. - if (impressionListener) impressionListener.logImpression(impressionData); - } catch (err) { - log.error(ERROR_IMPRESSIONS_LISTENER, [err]); - } - }, 0); + try { // @ts-ignore. An exception on the listeners should not break the SDK. + if (impressionListener) impressionListener.logImpression(impressionData); + } catch (err) { + log.error(ERROR_IMPRESSIONS_LISTENER, [err]); + } + }); + }); } } } diff --git a/src/trackers/strategy/__tests__/testUtils.ts b/src/trackers/strategy/__tests__/testUtils.ts index 121ec69c..9c8fcd68 100644 --- a/src/trackers/strategy/__tests__/testUtils.ts +++ b/src/trackers/strategy/__tests__/testUtils.ts @@ -1,4 +1,4 @@ -import { ImpressionDTO } from '../../../types'; +import SplitIO from '../../../../types/splitio'; export const impression1 = { feature: 'qc_team', @@ -7,7 +7,7 @@ export const impression1 = { time: Date.now(), bucketingKey: 'impr_bucketing_2', label: 'default rule' -} as ImpressionDTO; +} as SplitIO.ImpressionDTO; export const impression2 = { feature: 'qc_team_2', keyName: 'emma@split.io', @@ -15,4 +15,4 @@ export const impression2 = { time: Date.now(), bucketingKey: 'impr_bucketing_2', label: 'default rule' -} as ImpressionDTO; +} as SplitIO.ImpressionDTO; diff --git a/src/trackers/strategy/strategyDebug.ts b/src/trackers/strategy/strategyDebug.ts index c6d29e8d..21ca3a5d 100644 --- a/src/trackers/strategy/strategyDebug.ts +++ b/src/trackers/strategy/strategyDebug.ts @@ -1,11 +1,11 @@ -import { ImpressionDTO } from '../../types'; +import SplitIO from '../../../types/splitio'; import { IImpressionObserver } from '../impressionObserver/types'; import { IStrategy } from '../types'; /** * Debug strategy for impressions tracker. Wraps impressions to store and adds previousTime if it corresponds * - * @param impressionsObserver impression observer. Previous time (pt property) is included in impression instances + * @param impressionsObserver - impression observer. Previous time (pt property) is included in impression instances * @returns IStrategyResult */ export function strategyDebugFactory( @@ -13,7 +13,7 @@ export function strategyDebugFactory( ): IStrategy { return { - process(impressions: ImpressionDTO[]) { + process(impressions: SplitIO.ImpressionDTO[]) { impressions.forEach((impression) => { // Adds previous time if it is enabled impression.pt = impressionsObserver.testAndSet(impression); diff --git a/src/trackers/strategy/strategyNone.ts b/src/trackers/strategy/strategyNone.ts index 0a2e75ef..452ae594 100644 --- a/src/trackers/strategy/strategyNone.ts +++ b/src/trackers/strategy/strategyNone.ts @@ -1,12 +1,12 @@ import { IImpressionCountsCacheBase } from '../../storages/types'; -import { ImpressionDTO } from '../../types'; +import SplitIO from '../../../types/splitio'; import { IStrategy, IUniqueKeysTracker } from '../types'; /** * None strategy for impressions tracker. * - * @param impressionsCounter cache to save impressions count. impressions will be deduped (OPTIMIZED mode) - * @param uniqueKeysTracker unique keys tracker in charge of tracking the unique keys per split. + * @param impressionsCounter - cache to save impressions count. impressions will be deduped (OPTIMIZED mode) + * @param uniqueKeysTracker - unique keys tracker in charge of tracking the unique keys per split. * @returns IStrategyResult */ export function strategyNoneFactory( @@ -15,7 +15,7 @@ export function strategyNoneFactory( ): IStrategy { return { - process(impressions: ImpressionDTO[]) { + process(impressions: SplitIO.ImpressionDTO[]) { impressions.forEach((impression) => { const now = Date.now(); // Increments impression counter per featureName diff --git a/src/trackers/strategy/strategyOptimized.ts b/src/trackers/strategy/strategyOptimized.ts index ce1a9857..9fe61af1 100644 --- a/src/trackers/strategy/strategyOptimized.ts +++ b/src/trackers/strategy/strategyOptimized.ts @@ -1,5 +1,5 @@ import { IImpressionCountsCacheBase } from '../../storages/types'; -import { ImpressionDTO } from '../../types'; +import SplitIO from '../../../types/splitio'; import { truncateTimeFrame } from '../../utils/time'; import { IImpressionObserver } from '../impressionObserver/types'; import { IStrategy } from '../types'; @@ -7,8 +7,8 @@ import { IStrategy } from '../types'; /** * Optimized strategy for impressions tracker. Wraps impressions to store and adds previousTime if it corresponds * - * @param impressionsObserver impression observer. previous time (pt property) is included in impression instances - * @param impressionsCounter cache to save impressions count. impressions will be deduped (OPTIMIZED mode) + * @param impressionsObserver - impression observer. previous time (pt property) is included in impression instances + * @param impressionsCounter - cache to save impressions count. impressions will be deduped (OPTIMIZED mode) * @returns IStrategyResult */ export function strategyOptimizedFactory( @@ -17,8 +17,8 @@ export function strategyOptimizedFactory( ): IStrategy { return { - process(impressions: ImpressionDTO[]) { - const impressionsToStore: ImpressionDTO[] = []; + process(impressions: SplitIO.ImpressionDTO[]) { + const impressionsToStore: SplitIO.ImpressionDTO[] = []; impressions.forEach((impression) => { impression.pt = impressionsObserver.testAndSet(impression); diff --git a/src/trackers/types.ts b/src/trackers/types.ts index 041c9b97..db6d5bcb 100644 --- a/src/trackers/types.ts +++ b/src/trackers/types.ts @@ -1,4 +1,4 @@ -import { SplitIO, ImpressionDTO } from '../types'; +import SplitIO from '../../types/splitio'; import { StreamingEventType, Method, OperationType, UpdatesFromSSEEnum } from '../sync/submitters/types'; import { IEventsCacheBase } from '../storages/types'; import { NetworkError } from '../services/types'; @@ -18,7 +18,7 @@ export interface IImpressionsHandler { } export interface IImpressionsTracker { - track(impressions: ImpressionDTO[], attributes?: SplitIO.Attributes): void + track(impressions: SplitIO.ImpressionDTO[], attributes?: SplitIO.Attributes): void } /** Telemetry tracker */ @@ -65,16 +65,17 @@ export interface IImpressionSenderAdapter { /** Unique keys tracker */ export interface IUniqueKeysTracker { + start(): void; stop(): void; track(key: string, featureName: string): void; } export interface IStrategyResult { - impressionsToStore: ImpressionDTO[], - impressionsToListener: ImpressionDTO[], + impressionsToStore: SplitIO.ImpressionDTO[], + impressionsToListener: SplitIO.ImpressionDTO[], deduped: number } export interface IStrategy { - process(impressions: ImpressionDTO[]): IStrategyResult + process(impressions: SplitIO.ImpressionDTO[]): IStrategyResult } diff --git a/src/trackers/uniqueKeysTracker.ts b/src/trackers/uniqueKeysTracker.ts index fe367c79..f8de517c 100644 --- a/src/trackers/uniqueKeysTracker.ts +++ b/src/trackers/uniqueKeysTracker.ts @@ -14,9 +14,9 @@ const noopFilterAdapter = { * Unique Keys Tracker will be in charge of checking if the MTK was already sent to the BE in the last period * or schedule to be sent; if not it will be added in an internal cache and sent in the next post. * - * @param log Logger instance - * @param uniqueKeysCache cache to save unique keys - * @param filterAdapter filter adapter + * @param log - Logger instance + * @param uniqueKeysCache - cache to save unique keys + * @param filterAdapter - filter adapter */ export function uniqueKeysTrackerFactory( log: ILogger, @@ -25,10 +25,6 @@ export function uniqueKeysTrackerFactory( ): IUniqueKeysTracker { let intervalId: any; - if (filterAdapter.refreshRate) { - intervalId = setInterval(filterAdapter.clear, filterAdapter.refreshRate); - } - return { track(key: string, featureName: string): void { @@ -39,6 +35,12 @@ export function uniqueKeysTrackerFactory( uniqueKeysCache.track(key, featureName); }, + start(): void { + if (filterAdapter.refreshRate) { + intervalId = setInterval(filterAdapter.clear, filterAdapter.refreshRate); + } + }, + stop(): void { clearInterval(intervalId); } diff --git a/src/types.ts b/src/types.ts index ac061f28..bdb0933c 100644 --- a/src/types.ts +++ b/src/types.ts @@ -1,415 +1,21 @@ +import SplitIO from '../types/splitio'; import { ISplitFiltersValidation } from './dtos/types'; -import { IIntegration, IIntegrationFactoryParams } from './integrations/types'; import { ILogger } from './logger/types'; -import { ISdkFactoryContext } from './sdkFactory/types'; -/* eslint-disable no-use-before-define */ - -import { IStorageFactoryParams, IStorageSync, IStorageAsync, IStorageSyncFactory, IStorageAsyncFactory } from './storages/types'; -import { ISyncManagerCS } from './sync/types'; - -/** - * Reduced version of NodeJS.EventEmitter interface with the minimal methods used by the SDK - * @see {@link https://nodejs.org/api/events.html} - */ -export interface IEventEmitter { - addListener(event: string, listener: (...args: any[]) => void): this; - on(event: string, listener: (...args: any[]) => void): this - once(event: string, listener: (...args: any[]) => void): this - removeListener(event: string, listener: (...args: any[]) => void): this; - off(event: string, listener: (...args: any[]) => void): this; - removeAllListeners(event?: string): this - emit(event: string, ...args: any[]): boolean -} /** - * impression DTO generated by the Sdk client when processing evaluations + * SplitIO.ISettings interface extended with private properties for internal use */ -export type ImpressionDTO = { - feature: string, - keyName: string, - treatment: string, - time: number, - bucketingKey?: string, - label: string, - changeNumber: number, - pt?: number, +export interface ISettings extends SplitIO.ISettings { + readonly sync: SplitIO.ISettings['sync'] & { + __splitFiltersValidation: ISplitFiltersValidation; + }; + readonly log: ILogger; } -/** splitio.d.ts */ - -/** - * @typedef {Object} EventConsts - * @property {string} SDK_READY The ready event. - * @property {string} SDK_READY_FROM_CACHE The ready event when fired with cached data. - * @property {string} SDK_READY_TIMED_OUT The timeout event. - * @property {string} SDK_UPDATE The update event. - */ -type EventConsts = { - SDK_READY: 'init::ready', - SDK_READY_FROM_CACHE: 'init::cache-ready', - SDK_READY_TIMED_OUT: 'init::timeout', - SDK_UPDATE: 'state::update' -}; -/** - * SDK Modes. - * @typedef {string} SDKMode - */ -export type SDKMode = 'standalone' | 'consumer' | 'localhost' | 'consumer_partial'; -/** - * User consent status. - * @typedef {string} ConsentStatus - */ -export type ConsentStatus = 'GRANTED' | 'DECLINED' | 'UNKNOWN'; -/** - * Settings interface. This is a representation of the settings the SDK expose, that's why - * most of it's props are readonly. Only features should be rewritten when localhost mode is active. - * @interface ISettings - * - * NOTE: same ISettings interface from public type declarations extended with private properties. - */ -export interface ISettings { - readonly core: { - authorizationKey: string, - key: SplitIO.SplitKey, - trafficType?: string, - labelsEnabled: boolean, - IPAddressesEnabled: boolean - }, - readonly mode: SDKMode, - readonly scheduler: { - featuresRefreshRate: number, - impressionsRefreshRate: number, - impressionsQueueSize: number, - /** - * @deprecated - */ - metricsRefreshRate?: number, - telemetryRefreshRate: number, - segmentsRefreshRate: number, - offlineRefreshRate: number, - eventsPushRate: number, - eventsQueueSize: number, - pushRetryBackoffBase: number - }, - readonly startup: { - readyTimeout: number, - requestTimeoutBeforeReady: number, - retriesOnFailureBeforeReady: number, - eventsFirstPushWindow: number - }, - readonly storage: IStorageSyncFactory | IStorageAsyncFactory, - readonly integrations: Array<{ - readonly type: string, - (params: IIntegrationFactoryParams): IIntegration | void - }>, - readonly urls: { - events: string, - sdk: string, - auth: string, - streaming: string, - telemetry: string - }, - readonly debug: boolean | LogLevel | ILogger, - readonly version: string, - features: SplitIO.MockedFeaturesFilePath | SplitIO.MockedFeaturesMap, - readonly streamingEnabled: boolean, - readonly sync: { - splitFilters: SplitIO.SplitFilter[], - impressionsMode: SplitIO.ImpressionsMode, - __splitFiltersValidation: ISplitFiltersValidation, - localhostMode?: SplitIO.LocalhostFactory, - enabled: boolean, - flagSpecVersion: string, - requestOptions?: { - getHeaderOverrides?: (context: { headers: Record }) => Record - } - }, - readonly runtime: { - ip: string | false - hostname: string | false - }, - readonly log: ILogger - readonly impressionListener?: unknown - readonly userConsent?: ConsentStatus -} -/** - * Log levels. - * @typedef {string} LogLevel - */ -export type LogLevel = 'DEBUG' | 'INFO' | 'WARN' | 'ERROR' | 'NONE'; -/** - * Logger API - * @interface ILoggerAPI - */ -export interface ILoggerAPI { - /** - * Enables SDK logging to the console. - * @function enable - * @returns {void} - */ - enable(): void, - /** - * Disables SDK logging. - * @function disable - * @returns {void} - */ - disable(): void, - /** - * Sets a log level for the SDK logs. - * @function setLogLevel - * @returns {void} - */ - setLogLevel(logLevel: LogLevel): void, - /** - * Log level constants. Use this to pass them to setLogLevel function. - */ - LogLevel: { - [level: string]: LogLevel - } -} -/** - * Common settings between Browser and NodeJS settings interface. - * @interface ISharedSettings - */ -interface ISharedSettings { - /** - * Whether the logger should be enabled or disabled by default. - * @property {Boolean} debug - * @default false - */ - debug?: boolean, - /** - * The impression listener, which is optional. Whatever you provide here needs to comply with the SplitIO.IImpressionListener interface, - * which will check for the logImpression method. - * @property {IImpressionListener} impressionListener - * @default undefined - */ - impressionListener?: SplitIO.IImpressionListener, - /** - * Boolean flag to enable the streaming service as default synchronization mechanism. In the event of any issue with streaming, - * the SDK would fallback to the polling mechanism. If false, the SDK would poll for changes as usual without attempting to use streaming. - * @property {boolean} streamingEnabled - * @default true - */ - streamingEnabled?: boolean, - /** - * SDK synchronization settings. - * @property {Object} sync - */ - sync?: { - /** - * List of feature flag filters. These filters are used to fetch a subset of the feature flag definitions in your environment, in order to reduce the delay of the SDK to be ready. - * This configuration is only meaningful when the SDK is working in "standalone" mode. - * - * Example: - * `splitFilter: [ - * { type: 'byName', values: ['my_feature_flag_1', 'my_feature_flag_2'] }, // will fetch feature flags named 'my_feature_flag_1' and 'my_feature_flag_2' - * ]` - * @property {SplitIO.SplitFilter[]} splitFilters - */ - splitFilters?: SplitIO.SplitFilter[] - /** - * Impressions Collection Mode. Option to determine how impressions are going to be sent to Split servers. - * Possible values are 'DEBUG' and 'OPTIMIZED'. - * - DEBUG: will send all the impressions generated (recommended only for debugging purposes). - * - OPTIMIZED: will send unique impressions to Split servers avoiding a considerable amount of traffic that duplicated impressions could generate. - * @property {String} impressionsMode - * @default 'OPTIMIZED' - */ - impressionsMode?: SplitIO.ImpressionsMode, - /** - * Enables synchronization. - * @property {boolean} enabled - */ - enabled?: boolean, - requestOptions?: { - getHeaderOverrides?: (context: { headers: Record }) => Record - }, - } -} /** - * Common settings interface for SDK instances on NodeJS. - * @interface INodeBasicSettings - * @extends ISharedSettings + * SplitIO.IStatusInterface interface extended with private properties for internal use */ -interface INodeBasicSettings extends ISharedSettings { - /** - * SDK Startup settings for NodeJS. - * @property {Object} startup - */ - startup?: { - /** - * Maximum amount of time used before notify a timeout. - * @property {number} readyTimeout - * @default 15 - */ - readyTimeout?: number, - /** - * Time to wait for a request before the SDK is ready. If this time expires, JS Sdk will retry 'retriesOnFailureBeforeReady' times before notifying its failure to be 'ready'. - * @property {number} requestTimeoutBeforeReady - * @default 15 - */ - requestTimeoutBeforeReady?: number, - /** - * How many quick retries we will do while starting up the SDK. - * @property {number} retriesOnFailureBeforeReady - * @default 1 - */ - retriesOnFailureBeforeReady?: number, - /** - * For SDK posts the queued events data in bulks with a given rate, but the first push window is defined separately, - * to better control on browsers. This number defines that window before the first events push. - * - * @property {number} eventsFirstPushWindow - * @default 0 - */ - eventsFirstPushWindow?: number, - }, - /** - * SDK scheduler settings. - * @property {Object} scheduler - */ - scheduler?: { - /** - * The SDK polls Split servers for changes to feature roll-out plans. This parameter controls this polling period in seconds. - * @property {number} featuresRefreshRate - * @default 5 - */ - featuresRefreshRate?: number, - /** - * The SDK sends information on who got what treatment at what time back to Split servers to power analytics. This parameter controls how often this data is sent to Split servers. The parameter should be in seconds. - * @property {number} impressionsRefreshRate - * @default 300 - */ - impressionsRefreshRate?: number, - /** - * The maximum number of impression items we want to queue. If we queue more values, it will trigger a flush and reset the timer. - * If you use a 0 here, the queue will have no maximum size. - * @property {number} impressionsQueueSize - * @default 30000 - */ - impressionsQueueSize?: number, - /** - * The SDK sends diagnostic metrics to Split servers. This parameters controls this metric flush period in seconds. - * @property {number} metricsRefreshRate - * @default 120 - * @deprecated This parameter is ignored now. - */ - metricsRefreshRate?: number, - /** - * The SDK sends diagnostic metrics to Split servers. This parameters controls this metric flush period in seconds. - * @property {number} telemetryRefreshRate - * @default 3600 - */ - telemetryRefreshRate?: number, - /** - * The SDK polls Split servers for changes to segment definitions. This parameter controls this polling period in seconds. - * @property {number} segmentsRefreshRate - * @default 60 - */ - segmentsRefreshRate?: number, - /** - * The SDK posts the queued events data in bulks. This parameter controls the posting rate in seconds. - * @property {number} eventsPushRate - * @default 60 - */ - eventsPushRate?: number, - /** - * The maximum number of event items we want to queue. If we queue more values, it will trigger a flush and reset the timer. - * If you use a 0 here, the queue will have no maximum size. - * @property {number} eventsQueueSize - * @default 500 - */ - eventsQueueSize?: number, - /** - * For mocking/testing only. The SDK will refresh the features mocked data when mode is set to "localhost" by defining the key. - * For more information @see {@link https://help.split.io/hc/en-us/articles/360020564931-Node-js-SDK#localhost-mode} - * @property {number} offlineRefreshRate - * @default 15 - */ - offlineRefreshRate?: number - /** - * When using streaming mode, seconds to wait before re attempting to connect for push notifications. - * Next attempts follow intervals in power of two: base seconds, base x 2 seconds, base x 4 seconds, ... - * @property {number} pushRetryBackoffBase - * @default 1 - */ - pushRetryBackoffBase?: number, - }, - /** - * SDK Core settings for NodeJS. - * @property {Object} core - */ - core: { - /** - * Your SDK key. More information: @see {@link https://help.split.io/hc/en-us/articles/360019916211-API-keys} - * @property {string} authorizationKey - */ - authorizationKey: string, - /** - * Disable labels from being sent to Split backend. Labels may contain sensitive information. - * @property {boolean} labelsEnabled - * @default true - */ - labelsEnabled?: boolean - /** - * Disable machine IP and Name from being sent to Split backend. - * @property {boolean} IPAddressesEnabled - * @default true - */ - IPAddressesEnabled?: boolean - }, - /** - * Defines which kind of storage we should instantiate. - * @property {Object} storage - */ - storage?: (params: any) => any, - /** - * The SDK mode. Possible values are "standalone" (which is the default) and "consumer". For "localhost" mode, use "localhost" as authorizationKey. - * @property {SDKMode} mode - * @default 'standalone' - */ - mode?: SDKMode, - /** - * Mocked features file path. For testing purposses only. For using this you should specify "localhost" as authorizationKey on core settings. - * @see {@link https://help.split.io/hc/en-us/articles/360020564931-Node-js-SDK#localhost-mode} - * @property {MockedFeaturesFilePath} features - * @default '$HOME/.split' - */ - features?: SplitIO.MockedFeaturesFilePath, -} -/** - * Common API for entities that expose status handlers. - * @interface IStatusInterface - * @extends IEventEmitter - */ -export interface IStatusInterface extends IEventEmitter { - /** - * Constant object containing the SDK events for you to use. - * @property {EventConsts} Event - */ - Event: EventConsts, - /** - * Returns a promise that will be resolved once the SDK has finished loading (SDK_READY event emitted) or rejected if the SDK has timedout (SDK_READY_TIMED_OUT event emitted). - * As it's meant to provide similar flexibility to the event approach, given that the SDK might be eventually ready after a timeout event, calling the `ready` method after the - * SDK had timed out will return a new promise that should eventually resolve if the SDK gets ready. - * - * Caveats: the method was designed to avoid an unhandled Promise rejection if the rejection case is not handled, so that `onRejected` handler is optional when using promises. - * However, when using async/await syntax, the rejection should be explicitly propagated like in the following example: - * ``` - * try { - * await client.ready().catch((e) => { throw e; }); - * // SDK is ready - * } catch(e) { - * // SDK has timedout - * } - * ``` - * - * @function ready - * @returns {Promise} - */ - ready(): Promise - +export interface IStatusInterface extends SplitIO.IStatusInterface { // Expose status for internal purposes only. Not considered part of the public API, and might be updated eventually. __getStatus(): { isReady: boolean; @@ -419,992 +25,55 @@ export interface IStatusInterface extends IEventEmitter { isDestroyed: boolean; isOperational: boolean; lastUpdate: number; - } + }; } /** - * Common definitions between clients for different environments interface. - * @interface IBasicClient - * @extends IStatusInterface + * SplitIO.IBasicClient interface extended with private properties for internal use */ -interface IBasicClient extends IStatusInterface { +export interface IBasicClient extends SplitIO.IBasicClient { /** * Flush data - * @function flush - * @return {Promise} - */ - flush(): Promise - /** - * Destroy the client instance. - * @function destroy - * @returns {Promise} + * + * @returns A promise that is resolved when the flush is completed. */ - destroy(): Promise - - // Whether the client implements the client-side API, i.e, with bound key, (true), or the server-side API (false). + flush(): Promise; + // Whether the client implements the client-side API, i.e, with bound key, (true), or the server-side API (false/undefined). // Exposed for internal purposes only. Not considered part of the public API, and might be renamed eventually. - isClientSide: boolean + isClientSide?: boolean; + key?: SplitIO.SplitKey; } /** - * Common definitions between SDK instances for different environments interface. - * @interface IBasicSDK + * Defines the format of rollout plan data to preload the factory storage (cache). */ -interface IBasicSDK { +export interface PreloadedData { /** - * Current settings of the SDK instance. - * @property settings + * Timestamp of the last moment the data was synchronized with Split servers. + * If this value is older than 10 days ago (expiration time policy), the data is not used to update the storage content. */ - settings: ISettings, + // @TODO configurable expiration time policy? + lastUpdated: number; /** - * Logger API. - * @property Logger + * Change number of the preloaded data. + * If this value is older than the current changeNumber at the storage, the data is not used to update the storage content. */ - Logger: ILoggerAPI -} -/****** Exposed namespace ******/ -/** - * Types and interfaces for @splitsoftware/splitio package for usage when integrating javascript sdk on typescript apps. - * For the SDK package information - * @see {@link https://www.npmjs.com/package/@splitsoftware/splitio} - */ -export namespace SplitIO { + since: number; /** - * Feature flag treatment value, returned by getTreatment. - * @typedef {string} Treatment + * Map of feature flags to their stringified definitions. */ - export type Treatment = string; - /** - * Feature flag treatment promise that will resolve to actual treatment value. - * @typedef {Promise} AsyncTreatment - */ - export type AsyncTreatment = Promise; - /** - * An object with the treatments for a bulk of feature flags, returned by getTreatments. For example: - * { - * feature1: 'on', - * feature2: 'off - * } - * @typedef {Object.} Treatments - */ - export type Treatments = { - [featureName: string]: Treatment + splitsData: { + [splitName: string]: string; }; /** - * Feature flag treatments promise that will resolve to the actual SplitIO.Treatments object. - * @typedef {Promise} AsyncTreatments + * Optional map of user keys to their list of segments. */ - export type AsyncTreatments = Promise; - /** - * Feature flag evaluation result with treatment and configuration, returned by getTreatmentWithConfig. - * @typedef {Object} TreatmentWithConfig - * @property {string} treatment The treatment string - * @property {string | null} config The stringified version of the JSON config defined for that treatment, null if there is no config for the resulting treatment. - */ - export type TreatmentWithConfig = { - treatment: string, - config: string | null + mySegmentsData?: { + [key: string]: string[]; }; /** - * Feature flag treatment promise that will resolve to actual treatment with config value. - * @typedef {Promise} AsyncTreatmentWithConfig - */ - export type AsyncTreatmentWithConfig = Promise; - /** - * An object with the treatments with configs for a bulk of feature flags, returned by getTreatmentsWithConfig. - * Each existing configuration is a stringified version of the JSON you defined on the Split user interface. For example: - * { - * feature1: { treatment: 'on', config: null } - * feature2: { treatment: 'off', config: '{"bannerText":"Click here."}' } - * } - * @typedef {Object.} Treatments + * Optional map of segments to their stringified definitions. + * This property is ignored if `mySegmentsData` was provided. */ - export type TreatmentsWithConfig = { - [featureName: string]: TreatmentWithConfig + segmentsData?: { + [segmentName: string]: string; }; - /** - * Feature flag treatments promise that will resolve to the actual SplitIO.TreatmentsWithConfig object. - * @typedef {Promise} AsyncTreatmentsWithConfig - */ - export type AsyncTreatmentsWithConfig = Promise; - /** - * Possible Split SDK events. - * @typedef {string} Event - */ - export type Event = 'init::timeout' | 'init::ready' | 'init::cache-ready' | 'state::update'; - /** - * Attributes should be on object with values of type string or number (dates should be sent as millis since epoch). - * @typedef {Object.} Attributes - * @see {@link https://help.split.io/hc/en-us/articles/360020448791-JavaScript-SDK#attribute-syntax} - */ - export type Attributes = { - [attributeName: string]: AttributeType - }; - /** - * Type of an attribute value - * @typedef {string | number | boolean | Array} AttributeType - */ - export type AttributeType = string | number | boolean | Array; - /** - * Properties should be an object with values of type string, number, boolean or null. Size limit of ~31kb. - * @typedef {Object.} Attributes - * @see {@link https://help.split.io/hc/en-us/articles/360020448791-JavaScript-SDK#track - */ - export type Properties = { - [propertyName: string]: string | number | boolean | null - }; - /** - * The SplitKey object format. - * @typedef {Object.} SplitKeyObject - */ - export type SplitKeyObject = { - matchingKey: string, - bucketingKey: string - }; - /** - * The customer identifier. Could be a SplitKeyObject or a string. - * @typedef {SplitKeyObject|string} SplitKey - */ - export type SplitKey = SplitKeyObject | string; - /** - * Path to file with mocked features (for node). - * @typedef {string} MockedFeaturesFilePath - */ - export type MockedFeaturesFilePath = string; - /** - * Object with mocked features mapping (for browser). We need to specify the featureName as key, and the mocked treatment as value. - * @typedef {Object} MockedFeaturesMap - */ - export type MockedFeaturesMap = { - [featureName: string]: string | TreatmentWithConfig - }; - /** - * Object with information about an impression. It contains the generated impression DTO as well as - * complementary information around where and how it was generated in that way. - * @typedef {Object} ImpressionData - */ - export type ImpressionData = { - impression: ImpressionDTO, - attributes?: SplitIO.Attributes, - ip: string | false, - hostname: string | false, - sdkLanguageVersion: string - }; - /** - * Data corresponding to one feature flag view. - * @typedef {Object} SplitView - */ - export type SplitView = { - /** - * The name of the feature flag. - * @property {string} name - */ - name: string, - /** - * The traffic type of the feature flag. - * @property {string} trafficType - */ - trafficType: string, - /** - * Whether the feature flag is killed or not. - * @property {boolean} killed - */ - killed: boolean, - /** - * The list of treatments available for the feature flag. - * @property {Array} treatments - */ - treatments: Array, - /** - * Current change number of the feature flag. - * @property {number} changeNumber - */ - changeNumber: number, - /** - * Map of configurations per treatment. - * Each existing configuration is a stringified version of the JSON you defined on the Split user interface. - * @property {Object.} configs - */ - configs: { - [treatmentName: string]: string - }, - /** - * List of sets of the feature flag. - * @property {string[]} sets - */ - sets: string[], - /** - * The default treatment of the feature flag. - * @property {string} defaultTreatment - */ - defaultTreatment: string, - }; - /** - * A promise that resolves to a feature flag view. - * @typedef {Promise} SplitView - */ - export type SplitViewAsync = Promise; - /** - * An array containing the SplitIO.SplitView elements. - */ - export type SplitViews = Array; - /** - * A promise that resolves to an SplitIO.SplitViews array. - * @typedef {Promise} SplitViewsAsync - */ - export type SplitViewsAsync = Promise; - /** - * An array of feature flag names. - * @typedef {Array} SplitNames - */ - export type SplitNames = Array; - /** - * A promise that resolves to an array of feature flag names. - * @typedef {Promise} SplitNamesAsync - */ - export type SplitNamesAsync = Promise; - /** - * Localhost mode factory. - */ - export type LocalhostFactory = { - type: 'LocalhostFromObject' | 'LocalhostFromFile' - (params: ISdkFactoryContext): ISyncManagerCS - } - /** - * Impression listener interface. This is the interface that needs to be implemented - * by the element you provide to the SDK as impression listener. - * @interface IImpressionListener - * @see {@link https://help.split.io/hc/en-us/articles/360020564931-Node-js-SDK#listener} - */ - export interface IImpressionListener { - logImpression(data: SplitIO.ImpressionData): void - } - /** - * Object with information about a Split event. - * @typedef {Object} EventData - */ - export type EventData = { - eventTypeId: string; - value?: number; - properties?: Properties; - trafficTypeName?: string; - key?: string; // matching user key - timestamp: number; - }; - /** - * Object representing the data sent by Split (events and impressions). - * @typedef {Object} IntegrationData - * @property {string} type The type of Split data, either 'IMPRESSION' or 'EVENT'. - * @property {ImpressionData | EventData} payload The data instance itself. - */ - export type IntegrationData = { type: 'IMPRESSION', payload: SplitIO.ImpressionData } | { type: 'EVENT', payload: SplitIO.EventData }; - /** - * Available URL settings for the SDKs. - */ - export type UrlSettings = { - /** - * String property to override the base URL where the SDK will get rollout plan related data, like feature flags and segments definitions. - * @property {string} sdk - * @default 'https://sdk.split.io/api' - */ - sdk?: string, - /** - * String property to override the base URL where the SDK will post event-related information like impressions. - * @property {string} events - * @default 'https://events.split.io/api' - */ - events?: string, - /** - * String property to override the base URL where the SDK will get authorization tokens to be used with functionality that requires it, like streaming. - * @property {string} auth - * @default 'https://auth.split.io/api' - */ - auth?: string, - /** - * String property to override the base URL where the SDK will connect to receive streaming updates. - * @property {string} streaming - * @default 'https://streaming.split.io' - */ - streaming?: string, - /** - * String property to override the base URL where the SDK will post telemetry data. - * @property {string} telemetry - * @default 'https://telemetry.split.io/api' - */ - telemetry?: string - }; - /** - * SplitFilter type. - * @typedef {string} SplitFilterType - */ - export type SplitFilterType = 'byName' | 'byPrefix' | 'bySet'; - /** - * Defines a feature flag filter, described by a type and list of values. - */ - export interface SplitFilter { - /** - * Type of the filter. - * @property {SplitFilterType} type - */ - type: SplitFilterType, - /** - * List of values: feature flag names for 'byName' filter type, and feature flag name prefixes for 'byPrefix' type. - * @property {string[]} values - */ - values: string[], - } - /** - * ImpressionsMode type - * @typedef {string} ImpressionsMode - */ - export type ImpressionsMode = 'OPTIMIZED' | 'DEBUG' | 'NONE' - /** - * Defines the format of rollout plan data to preload on the factory storage (cache). - */ - export interface PreloadedData { - /** - * Timestamp of the last moment the data was synchronized with Split servers. - * If this value is older than 10 days ago (expiration time policy), the data is not used to update the storage content. - * @TODO configurable expiration time policy? - */ - lastUpdated: number, - /** - * Change number of the preloaded data. - * If this value is older than the current changeNumber at the storage, the data is not used to update the storage content. - */ - since: number, - /** - * Map of feature flags to their stringified definitions. - */ - splitsData: { - [splitName: string]: string - }, - /** - * Optional map of user keys to their list of segments. - * @TODO remove when releasing first version - */ - mySegmentsData?: { - [key: string]: string[] - }, - /** - * Optional map of segments to their stringified definitions. - * This property is ignored if `mySegmentsData` was provided. - */ - segmentsData?: { - [segmentName: string]: string - }, - } - /** - * Settings interface for SDK instances created on the browser - * @interface IBrowserSettings - * @extends ISharedSettings - * @see {@link https://help.split.io/hc/en-us/articles/360020448791-JavaScript-SDK#configuration} - */ - export interface IBrowserSettings extends ISharedSettings { - /** - * SDK Startup settings for the Browser. - * @property {Object} startup - */ - startup?: { - /** - * Maximum amount of time used before notify a timeout. - * @property {number} readyTimeout - * @default 1.5 - */ - readyTimeout?: number, - /** - * Time to wait for a request before the SDK is ready. If this time expires, JS Sdk will retry 'retriesOnFailureBeforeReady' times before notifying its failure to be 'ready'. - * @property {number} requestTimeoutBeforeReady - * @default 1.5 - */ - requestTimeoutBeforeReady?: number, - /** - * How many quick retries we will do while starting up the SDK. - * @property {number} retriesOnFailureBeforeReady - * @default 1 - */ - retriesOnFailureBeforeReady?: number, - /** - * For SDK posts the queued events data in bulks with a given rate, but the first push window is defined separately, - * to better control on browsers. This number defines that window before the first events push. - * - * @property {number} eventsFirstPushWindow - * @default 10 - */ - eventsFirstPushWindow?: number, - }, - /** - * SDK scheduler settings. - * @property {Object} scheduler - */ - scheduler?: { - /** - * The SDK polls Split servers for changes to feature flag definitions. This parameter controls this polling period in seconds. - * @property {number} featuresRefreshRate - * @default 60 - */ - featuresRefreshRate?: number, - /** - * The SDK sends information on who got what treatment at what time back to Split servers to power analytics. This parameter controls how often this data is sent to Split servers. The parameter should be in seconds. - * @property {number} impressionsRefreshRate - * @default 300 - */ - impressionsRefreshRate?: number, - /** - * The maximum number of impression items we want to queue. If we queue more values, it will trigger a flush and reset the timer. - * If you use a 0 here, the queue will have no maximum size. - * @property {number} impressionsQueueSize - * @default 30000 - */ - impressionsQueueSize?: number, - /** - * The SDK sends diagnostic metrics to Split servers. This parameters controls this metric flush period in seconds. - * @property {number} metricsRefreshRate - * @default 120 - * @deprecated This parameter is ignored now. Use `telemetryRefreshRate` instead. - */ - metricsRefreshRate?: number, - /** - * The SDK sends diagnostic metrics to Split servers. This parameters controls this metric flush period in seconds. - * @property {number} telemetryRefreshRate - * @default 3600 - */ - telemetryRefreshRate?: number, - /** - * The SDK polls Split servers for changes to segment definitions. This parameter controls this polling period in seconds. - * @property {number} segmentsRefreshRate - * @default 60 - */ - segmentsRefreshRate?: number, - /** - * The SDK posts the queued events data in bulks. This parameter controls the posting rate in seconds. - * @property {number} eventsPushRate - * @default 60 - */ - eventsPushRate?: number, - /** - * The maximum number of event items we want to queue. If we queue more values, it will trigger a flush and reset the timer. - * If you use a 0 here, the queue will have no maximum size. - * @property {number} eventsQueueSize - * @default 500 - */ - eventsQueueSize?: number, - /** - * For mocking/testing only. The SDK will refresh the features mocked data when mode is set to "localhost" by defining the key. - * For more information @see {@link https://help.split.io/hc/en-us/articles/360020564931-Node-js-SDK#localhost-mode} - * @property {number} offlineRefreshRate - * @default 15 - */ - offlineRefreshRate?: number - /** - * When using streaming mode, seconds to wait before re attempting to connect for push notifications. - * Next attempts follow intervals in power of two: base seconds, base x 2 seconds, base x 4 seconds, ... - * @property {number} pushRetryBackoffBase - * @default 1 - */ - pushRetryBackoffBase?: number, - }, - /** - * SDK Core settings for the browser. - * @property {Object} core - */ - core: { - /** - * Your SDK key. More information: @see {@link https://help.split.io/hc/en-us/articles/360019916211-API-keys} - * @property {string} authorizationKey - */ - authorizationKey: string, - /** - * Customer identifier. Whatever this means to you. @see {@link https://help.split.io/hc/en-us/articles/360019916311-Traffic-type} - * @property {SplitKey} key - */ - key: SplitKey, - /** - * Traffic type associated with the customer identifier. @see {@link https://help.split.io/hc/en-us/articles/360019916311-Traffic-type} - * If no provided as a setting it will be required on the client.track() calls. - * @property {string} trafficType - */ - trafficType?: string, - /** - * Disable labels from being sent to Split backend. Labels may contain sensitive information. - * @property {boolean} labelsEnabled - * @default true - */ - labelsEnabled?: boolean - }, - /** - * Mocked features map. For testing purposses only. For using this you should specify "localhost" as authorizationKey on core settings. - * @see {@link https://help.split.io/hc/en-us/articles/360020564931-Node-js-SDK#localhost-mode} - */ - features?: MockedFeaturesMap, - /** - * Defines which kind of storage we should instantiate. - * @property {Object} storage - */ - storage?: (params: IStorageFactoryParams) => IStorageSync | IStorageAsync, - /** - * List of URLs that the SDK will use as base for it's synchronization functionalities, applicable only when running as standalone. - * Do not change these settings unless you're working an advanced use case, like connecting to the Split proxy. - * @property {Object} urls - */ - urls?: UrlSettings, - } - /** - * Settings interface for SDK instances created on NodeJS. - * If your storage is asynchronous (Redis for example) use SplitIO.INodeAsyncSettings instead. - * @interface INodeSettings - * @extends INodeBasicSettings - * @see {@link https://help.split.io/hc/en-us/articles/360020564931-Node-js-SDK#configuration} - */ - export interface INodeSettings extends INodeBasicSettings { - /** - * List of URLs that the SDK will use as base for it's synchronization functionalities, applicable only when running as standalone. - * Do not change these settings unless you're working an advanced use case, like connecting to the Split proxy. - * @property {Object} urls - */ - urls?: UrlSettings, - /** - * Defines which kind of storage we should instantiate. - * @property {Object} storage - */ - storage?: (params: IStorageFactoryParams) => IStorageSync, - } - /** - * Settings interface with async storage for SDK instances created on NodeJS. - * If your storage is synchronous (by defaut we use memory, which is sync) use SplitIO.INodeSyncSettings instead. - * @interface INodeAsyncSettings - * @extends INodeBasicSettings - * @see {@link https://help.split.io/hc/en-us/articles/360020564931-Node-js-SDK#configuration} - */ - export interface INodeAsyncSettings extends INodeBasicSettings { - storage?: (params: IStorageFactoryParams) => IStorageAsync, - } - /** - * This represents the interface for the Server-side SDK instance with synchronous storage. - * @interface ISDK - * @extends IBasicSDK - */ - export interface ISDK extends IBasicSDK { - /** - * Returns the client instance of the SDK. - * @function client - * @returns {IClient} The client instance. - */ - client(): IClient, - /** - * Returns a manager instance of the SDK to explore available information. - * @function manager - * @returns {IManager} The manager instance. - */ - manager(): IManager - } - /** - * This represents the interface for the Server-side SDK instance with asynchronous storage. - * @interface IAsyncSDK - * @extends IBasicSDK - */ - export interface IAsyncSDK extends IBasicSDK { - /** - * Returns the default client instance of the SDK. - * @function client - * @returns {IAsyncClient} The asynchronous client instance. - */ - client(): IAsyncClient, - /** - * Returns a manager instance of the SDK to explore available information. - * @function manager - * @returns {IManager} The manager instance. - */ - manager(): IAsyncManager - } - /** - * This represents the interface for the Client-side SDK instance with synchronous storage. - * @interface ICsSDK - * @extends IBasicSDK - */ - export interface ICsSDK extends IBasicSDK { - /** - * Returns the default client instance of the SDK, with the key and optional traffic type from settings. - * @function client - * @returns {ICsClient} The client instance. - */ - client(): ICsClient, - /** - * Returns a shared client of the SDK, with the given key and optional traffic type. - * @function client - * @param {SplitKey} key The key for the new client instance. - * @param {string=} trafficType The traffic type of the provided key. - * @returns {ICsClient} The client instance. - */ - client(key: SplitKey, trafficType?: string): ICsClient, - /** - * Returns a manager instance of the SDK to explore available information. - * @function manager - * @returns {IManager} The manager instance. - */ - manager(): IManager - } - /** - * This represents the interface for the Client instance with synchronous storage for server-side SDK, where we don't have only one key. - * @interface IClient - * @extends IBasicClient - */ - export interface IClient extends IBasicClient { - /** - * Returns a Treatment value, which is the treatment string for the given feature. - * @function getTreatment - * @param {string} key - The string key representing the consumer. - * @param {string} featureFlagName - The string that represents the feature flag we want to get the treatment. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {Treatment} The treatment string. - */ - getTreatment(key: SplitKey, featureFlagName: string, attributes?: Attributes): Treatment, - /** - * Returns a TreatmentWithConfig value, which is an object with both treatment and config string for the given feature. - * @function getTreatmentWithConfig - * @param {string} key - The string key representing the consumer. - * @param {string} featureFlagName - The string that represents the feature flag we want to get the treatment. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {TreatmentWithConfig} The TreatmentWithConfig, the object containing the treatment string and the - * configuration stringified JSON (or null if there was no config for that treatment). - */ - getTreatmentWithConfig(key: SplitKey, featureFlagName: string, attributes?: Attributes): TreatmentWithConfig, - /** - * Returns a Treatments value, which is an object map with the treatments for the given features. - * @function getTreatments - * @param {string} key - The string key representing the consumer. - * @param {Array} featureFlagNames - An array of the feature flag names we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {Treatments} The treatments object map. - */ - getTreatments(key: SplitKey, featureFlagNames: string[], attributes?: Attributes): Treatments, - /** - * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the given features. - * @function getTreatmentsWithConfig - * @param {string} key - The string key representing the consumer. - * @param {Array} featureFlagNames - An array of the feature flag names we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {TreatmentsWithConfig} The map with all the TreatmentWithConfig objects - */ - getTreatmentsWithConfig(key: SplitKey, featureFlagNames: string[], attributes?: Attributes): TreatmentsWithConfig, - /** - * Returns a Treatments value, which is an object map with the treatments for the feature flags related to the given flagSet. - * @function getTreatmentsByFlagSet - * @param {string} key - The string key representing the consumer. - * @param {string} flagSet - The flagSet name we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {Treatments} The map with all the TreatmentWithConfig objects - */ - getTreatmentsByFlagSet(key: SplitKey, flagSet: string, attributes?: Attributes): Treatments, - /** - * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the feature flags related to the given flagSets. - * @function getTreatmentsWithConfigByFlagSet - * @param {string} key - The string key representing the consumer. - * @param {string} flagSet - The flagSet name we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {Treatments} The map with all the TreatmentWithConfig objects - */ - getTreatmentsWithConfigByFlagSet(key: SplitKey, flagSet: string, attributes?: Attributes): TreatmentsWithConfig, - /** - * Returns a Returns a Treatments value, which is an object with both treatment and config string for to the feature flags related to the given flagSets. - * @function getTreatmentsByFlagSets - * @param {string} key - The string key representing the consumer. - * @param {Array} flagSets - An array of the flagSet names we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {Treatments} The map with all the TreatmentWithConfig objects - */ - getTreatmentsByFlagSets(key: SplitKey, flagSets: string[], attributes?: Attributes): Treatments, - /** - * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the feature flags related to the given flagSets. - * @function getTreatmentsWithConfigByFlagSets - * @param {string} key - The string key representing the consumer. - * @param {Array} flagSets - An array of the flagSet names we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {Treatments} The map with all the TreatmentWithConfig objects - */ - getTreatmentsWithConfigByFlagSets(key: SplitKey, flagSets: string[], attributes?: Attributes): TreatmentsWithConfig, - /** - * Tracks an event to be fed to the results product on Split user interface. - * @function track - * @param {SplitKey} key - The key that identifies the entity related to this event. - * @param {string} trafficType - The traffic type of the entity related to this event. - * @param {string} eventType - The event type corresponding to this event. - * @param {number=} value - The value of this event. - * @param {Properties=} properties - The properties of this event. Values can be string, number, boolean or null. - * @returns {boolean} Whether the event was added to the queue successfully or not. - */ - track(key: SplitIO.SplitKey, trafficType: string, eventType: string, value?: number, properties?: Properties): boolean, - } - /** - * This represents the interface for the Client instance with asynchronous storage for server-side SDK, where we don't have only one key. - * @interface IAsyncClient - * @extends IBasicClient - */ - export interface IAsyncClient extends IBasicClient { - /** - * Returns a Treatment value, which will be (or eventually be) the treatment string for the given feature. - * For usage on NodeJS as we don't have only one key. - * NOTE: Treatment will be a promise only in async storages, like REDIS. - * @function getTreatment - * @param {string} key - The string key representing the consumer. - * @param {string} featureFlagName - The string that represents the feature flag we want to get the treatment. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {AsyncTreatment} Treatment promise that resolves to the treatment string. - */ - getTreatment(key: SplitKey, featureFlagName: string, attributes?: Attributes): AsyncTreatment, - /** - * Returns a TreatmentWithConfig value, which will be (or eventually be) an object with both treatment and config string for the given feature. - * For usage on NodeJS as we don't have only one key. - * NOTE: Treatment will be a promise only in async storages, like REDIS. - * @function getTreatmentWithConfig - * @param {string} key - The string key representing the consumer. - * @param {string} featureFlagName - The string that represents the feature flag we want to get the treatment. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {AsyncTreatmentWithConfig} TreatmentWithConfig promise that resolves to the TreatmentWithConfig object. - */ - getTreatmentWithConfig(key: SplitKey, featureFlagName: string, attributes?: Attributes): AsyncTreatmentWithConfig, - /** - * Returns a Treatments value, which will be (or eventually be) an object map with the treatments for the given features. - * For usage on NodeJS as we don't have only one key. - * @function getTreatments - * @param {string} key - The string key representing the consumer. - * @param {Array} featureFlagNames - An array of the feature flag names we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {AsyncTreatments} Treatments promise that resolves to the treatments object map. - */ - getTreatments(key: SplitKey, featureFlagNames: string[], attributes?: Attributes): AsyncTreatments, - /** - * Returns a TreatmentsWithConfig value, which will be (or eventually be) an object map with the TreatmentWithConfig (an object with both treatment and config string) for the given features. - * For usage on NodeJS as we don't have only one key. - * @function getTreatmentsWithConfig - * @param {string} key - The string key representing the consumer. - * @param {Array} featureFlagNames - An array of the feature flag names we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {AsyncTreatmentsWithConfig} TreatmentsWithConfig promise that resolves to the map of TreatmentsWithConfig objects. - */ - getTreatmentsWithConfig(key: SplitKey, featureFlagNames: string[], attributes?: Attributes): AsyncTreatmentsWithConfig, - /** - * Returns a Treatments value, which will be (or eventually be) an object map with the treatments for the features related to the given flag set. - * For usage on NodeJS as we don't have only one key. - * @function getTreatmentsByFlagSet - * @param {string} key - The string key representing the consumer. - * @param {string} flagSet - The flag set name we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {Treatments} The map with all the TreatmentWithConfig objects - */ - getTreatmentsByFlagSet(key: SplitKey, flagSet: string, attributes?: Attributes): AsyncTreatments, - /** - * Returns a TreatmentWithConfig value, which will be (or eventually be) an object with both treatment and config string for features related to the given flag set. - * For usage on NodeJS as we don't have only one key. - * @function getTreatmentsWithConfigByFlagSet - * @param {string} key - The string key representing the consumer. - * @param {string} flagSet - The flag set name we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {Treatments} The map with all the TreatmentWithConfig objects - */ - getTreatmentsWithConfigByFlagSet(key: SplitKey, flagSet: string, attributes?: Attributes): AsyncTreatmentsWithConfig, - /** - * Returns a Treatments value, which will be (or eventually be) an object map with the treatments for the feature flags related to the given flag sets. - * For usage on NodeJS as we don't have only one key. - * @function getTreatmentsByFlagSets - * @param {string} key - The string key representing the consumer. - * @param {Array} flagSets - An array of the flag set names we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {Treatments} The map with all the TreatmentWithConfig objects - */ - getTreatmentsByFlagSets(key: SplitKey, flagSets: string[], attributes?: Attributes): AsyncTreatments, - /** - * Returns a TreatmentWithConfig value, which will be (or eventually be) an object with both treatment and config string for the feature flags related to the given flag sets. - * For usage on NodeJS as we don't have only one key. - * @function getTreatmentsWithConfigByFlagSets - * @param {string} key - The string key representing the consumer. - * @param {Array} flagSets - An array of the flag set names we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {Treatments} The map with all the TreatmentWithConfig objects - */ - getTreatmentsWithConfigByFlagSets(key: SplitKey, flagSets: string[], attributes?: Attributes): AsyncTreatmentsWithConfig, - /** - * Tracks an event to be fed to the results product on Split user interface, and returns a promise to signal when the event was successfully queued (or not). - * @function track - * @param {SplitKey} key - The key that identifies the entity related to this event. - * @param {string} trafficType - The traffic type of the entity related to this event. - * @param {string} eventType - The event type corresponding to this event. - * @param {number=} value - The value of this event. - * @param {Properties=} properties - The properties of this event. Values can be string, number, boolean or null. - * @returns {Promise} A promise that resolves to a boolean indicating if the event was added to the queue successfully or not. - */ - track(key: SplitIO.SplitKey, trafficType: string, eventType: string, value?: number, properties?: Properties): Promise - } - /** - * This represents the interface for the Client instance with synchronous storage for client-side SDK, where each client has associated a key and optionally a traffic type. - * @interface IClient - * @extends IBasicClient - */ - export interface ICsClient extends IBasicClient { - /** - * Returns a Treatment value, which is the treatment string for the given feature. - * @function getTreatment - * @param {string} featureFlagName - The string that represents the feature flag we want to get the treatment. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {Treatment} The treatment string. - */ - getTreatment(featureFlagName: string, attributes?: Attributes): Treatment, - /** - * Returns a TreatmentWithConfig value, which is an object with both treatment and config string for the given feature. - * @function getTreatmentWithConfig - * @param {string} featureFlagName - The string that represents the feature flag we want to get the treatment. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {TreatmentWithConfig} The map containing the treatment and the configuration stringified JSON (or null if there was no config for that treatment). - */ - getTreatmentWithConfig(featureFlagName: string, attributes?: Attributes): TreatmentWithConfig, - /** - * Returns a Treatments value, which is an object map with the treatments for the given features. - * @function getTreatments - * @param {Array} featureFlagNames - An array of the feature flag names we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {Treatments} The treatments object map. - */ - getTreatments(featureFlagNames: string[], attributes?: Attributes): Treatments, - /** - * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the given features. - * @function getTreatmentsWithConfig - * @param {Array} featureFlagNames - An array of the feature flag names we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {TreatmentsWithConfig} The map with all the TreatmentWithConfig objects - */ - getTreatmentsWithConfig(featureFlagNames: string[], attributes?: Attributes): TreatmentsWithConfig, - /** - * Returns a Treatments value, which is an object map with the treatments for the feature flags related to the given flag set. - * @function getTreatmentsByFlagSet - * @param {string} flagSet - The flag set name we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {Treatments} The map with all the TreatmentWithConfig objects - */ - getTreatmentsByFlagSet(flagSet: string, attributes?: Attributes): Treatments, - /** - * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the feature flags related to the given flag set. - * @function getTreatmentsWithConfigByFlagSet - * @param {string} flagSet - The flag set name we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {Treatments} The map with all the TreatmentWithConfig objects - */ - getTreatmentsWithConfigByFlagSet(flagSet: string, attributes?: Attributes): TreatmentsWithConfig, - /** - * Returns a Returns a Treatments value, which is an object with both treatment and config string for to the feature flags related to the given flag sets. - * @function getTreatmentsByFlagSets - * @param {Array} flagSets - An array of the flag set names we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {Treatments} The map with all the TreatmentWithConfig objects - */ - getTreatmentsByFlagSets(flagSets: string[], attributes?: Attributes): Treatments, - /** - * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the feature flags related to the given flag sets. - * @function getTreatmentsWithConfigByFlagSets - * @param {Array} flagSets - An array of the flag set names we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {Treatments} The map with all the TreatmentWithConfig objects - */ - getTreatmentsWithConfigByFlagSets(flagSets: string[], attributes?: Attributes): TreatmentsWithConfig, - /** - * Tracks an event to be fed to the results product on Split user interface. - * @function track - * @param {string} trafficType - The traffic type of the entity related to this event. - * @param {string} eventType - The event type corresponding to this event. - * @param {number=} value - The value of this event. - * @param {Properties=} properties - The properties of this event. Values can be string, number, boolean or null. - * @returns {boolean} Whether the event was added to the queue successfully or not. - */ - track(trafficType: string, eventType: string, value?: number, properties?: Properties): boolean, - /** - * Add an attribute to client's in memory attributes storage - * - * @param {string} attributeName Attribute name - * @param {AttributeType} attributeValue Attribute value - * @returns {boolean} true if the attribute was stored and false otherwise - */ - setAttribute(attributeName: string, attributeValue: AttributeType): boolean, - /** - * Returns the attribute with the given name - * - * @param {string} attributeName Attribute name - * @returns {AttributeType} Attribute with the given name - */ - getAttribute(attributeName: string): AttributeType, - /** - * Removes from client's in memory attributes storage the attribute with the given name. - * - * @param {string} attributeName - * @returns {boolean} true if attribute was removed and false otherwise - */ - removeAttribute(attributeName: string): boolean, - /** - * Add to client's in memory attributes storage the attributes in 'attributes'. - * - * @param {Attributes} attributes Object with attributes to store - * @returns true if attributes were stored an false otherwise - */ - setAttributes(attributes: Attributes): boolean, - /** - * Return all the attributes stored in client's in memory attributes storage. - * - * @returns {Attributes} returns all the stored attributes - */ - getAttributes(): Attributes, - /** - * Remove all the stored attributes in the client's in memory attribute storage. - * - * @returns {boolean} true if all attribute were removed and false otherwise - */ - clearAttributes(): boolean - } - /** - * Representation of a manager instance with synchronous storage of the SDK. - * @interface IManager - * @extends IStatusInterface - */ - export interface IManager extends IStatusInterface { - /** - * Get the array of feature flag names. - * @function names - * @returns {SplitNames} The list of feature flag names. - */ - names(): SplitNames, - /** - * Get the array of feature flags data in SplitView format. - * @function splits - * @returns {SplitViews} The list of SplitIO.SplitView. - */ - splits(): SplitViews, - /** - * Get the data of a split in SplitView format. - * @function split - * @param {string} featureFlagName The name of the feature flag we want to get info of. - * @returns {SplitView} The SplitIO.SplitView of the given split. - */ - split(featureFlagName: string): SplitView, - } - /** - * Representation of a manager instance with asynchronous storage of the SDK. - * @interface IAsyncManager - * @extends IStatusInterface - */ - export interface IAsyncManager extends IStatusInterface { - /** - * Get the array of feature flag names. - * @function names - * @returns {SplitNamesAsync} A promise that resolves to the list of feature flag names. - */ - names(): SplitNamesAsync, - /** - * Get the array of feature flags data in SplitView format. - * @function splits - * @returns {SplitViewsAsync} A promise that resolves to the SplitIO.SplitView list. - */ - splits(): SplitViewsAsync, - /** - * Get the data of a split in SplitView format. - * @function split - * @param {string} featureFlagName The name of the feature flag we want to get info of. - * @returns {SplitViewAsync} A promise that resolves to the SplitIO.SplitView value. - */ - split(featureFlagName: string): SplitViewAsync, - } } diff --git a/src/utils/Backoff.ts b/src/utils/Backoff.ts index 95ff5c34..519b4890 100644 --- a/src/utils/Backoff.ts +++ b/src/utils/Backoff.ts @@ -15,10 +15,6 @@ export class Backoff { /** * Schedule function calls with exponential backoff - * - * @param {function} cb - * @param {number} baseMillis - * @param {number} maxMillis */ constructor(cb: (...args: any[]) => any, baseMillis?: number, maxMillis?: number) { this.baseMillis = Backoff.__TEST__BASE_MILLIS || baseMillis || Backoff.DEFAULT_BASE_MILLIS; diff --git a/src/utils/LRUCache/index.ts b/src/utils/LRUCache/index.ts index edf1b59b..2d1a0ec5 100644 --- a/src/utils/LRUCache/index.ts +++ b/src/utils/LRUCache/index.ts @@ -1,14 +1,13 @@ -import { IMap, _Map } from '../lang/maps'; import { LinkedList, Node } from './LinkedList'; export class LRUCache { maxLen: number; - items: IMap>; + items: Map>; lru: LinkedList<{ key: K, value: V }>; constructor(maxSize?: number) { this.maxLen = maxSize || 1; - this.items = new _Map(); + this.items = new Map(); this.lru = new LinkedList(); } diff --git a/src/utils/MinEventEmitter.ts b/src/utils/MinEventEmitter.ts index a081f1e7..750ce7b9 100644 --- a/src/utils/MinEventEmitter.ts +++ b/src/utils/MinEventEmitter.ts @@ -1,5 +1,4 @@ - -import { IEventEmitter } from '../types'; +import SplitIO from '../../types/splitio'; const NEW_LISTENER_EVENT = 'newListener'; const REMOVE_LISTENER_EVENT = 'removeListener'; @@ -11,7 +10,7 @@ function checkListener(listener: unknown) { } // @TODO implement missing methods, check spec and add UTs -export class EventEmitter implements IEventEmitter { +export class EventEmitter implements SplitIO.IEventEmitter { private listeners: Record void, // the event listener diff --git a/src/utils/MinEvents.ts b/src/utils/MinEvents.ts index 71aa3626..51b57c50 100644 --- a/src/utils/MinEvents.ts +++ b/src/utils/MinEvents.ts @@ -30,7 +30,7 @@ // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. -import { IEventEmitter } from '../types'; +import SplitIO from '../../types/splitio'; var R = typeof Reflect === 'object' ? Reflect : null; var ReflectApply = R && typeof R.apply === 'function' @@ -39,7 +39,7 @@ var ReflectApply = R && typeof R.apply === 'function' return Function.prototype.apply.call(target, receiver, args); }; -export const EventEmitter: { new(): IEventEmitter } = function EventEmitter() { +export const EventEmitter: { new(): SplitIO.IEventEmitter } = function EventEmitter() { EventEmitter.init.call(this); }; diff --git a/src/utils/Semver.ts b/src/utils/Semver.ts index f94f3fa5..7f11d9ae 100644 --- a/src/utils/Semver.ts +++ b/src/utils/Semver.ts @@ -84,7 +84,7 @@ export class Semver { /** * Precedence comparision between 2 Semver objects. * - * @return `0` if `this === toCompare`, `-1` if `this < toCompare`, and `1` if `this > toCompare` + * @returns `0` if `this === toCompare`, `-1` if `this < toCompare`, and `1` if `this > toCompare` */ public compare(toCompare: Semver): number { if (this.version === toCompare.version) return 0; diff --git a/src/utils/base64/index.ts b/src/utils/base64/index.ts index 32077037..3fa878de 100644 --- a/src/utils/base64/index.ts +++ b/src/utils/base64/index.ts @@ -22,7 +22,7 @@ const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/= /** * Decode a given string value in Base64 format * - * @param {string} input value to decode + * @param input - value to decode */ export function decodeFromBase64(input: string): string { const str = (String(input)).replace(/[=]+$/, ''); // #31: ExtendScript bad parse of /= @@ -48,7 +48,7 @@ export function decodeFromBase64(input: string): string { /** * Encode a given string value to Base64 format. * - * @param {string} input value to encode + * @param input - value to encode */ export function encodeToBase64(input: string): string { const str = String(input); diff --git a/src/utils/constants/browser.ts b/src/utils/constants/browser.ts index ec2add2c..d627f780 100644 --- a/src/utils/constants/browser.ts +++ b/src/utils/constants/browser.ts @@ -1,6 +1,2 @@ -// Integration types -export const GOOGLE_ANALYTICS_TO_SPLIT = 'GOOGLE_ANALYTICS_TO_SPLIT'; -export const SPLIT_TO_GOOGLE_ANALYTICS = 'SPLIT_TO_GOOGLE_ANALYTICS'; - // This value might be eventually set via a config parameter export const DEFAULT_CACHE_EXPIRATION_IN_MILLIS = 864000000; // 10 days diff --git a/src/utils/constants/index.ts b/src/utils/constants/index.ts index 77bda4e1..cd11790f 100644 --- a/src/utils/constants/index.ts +++ b/src/utils/constants/index.ts @@ -1,5 +1,4 @@ -import { StorageType } from '../../storages/types'; -import { SDKMode } from '../../types'; +import SplitIO from '../../../types/splitio'; // Special treatments export const CONTROL = 'control'; @@ -22,17 +21,17 @@ export const OPTIMIZED = 'OPTIMIZED'; export const NONE = 'NONE'; // SDK Modes -export const LOCALHOST_MODE: SDKMode = 'localhost'; -export const STANDALONE_MODE: SDKMode = 'standalone'; +export const LOCALHOST_MODE = 'localhost'; +export const STANDALONE_MODE = 'standalone'; export const PRODUCER_MODE = 'producer'; -export const CONSUMER_MODE: SDKMode = 'consumer'; -export const CONSUMER_PARTIAL_MODE: SDKMode = 'consumer_partial'; +export const CONSUMER_MODE = 'consumer'; +export const CONSUMER_PARTIAL_MODE = 'consumer_partial'; // Storage types -export const STORAGE_MEMORY: StorageType = 'MEMORY'; -export const STORAGE_LOCALSTORAGE: StorageType = 'LOCALSTORAGE'; -export const STORAGE_REDIS: StorageType = 'REDIS'; -export const STORAGE_PLUGGABLE: StorageType = 'PLUGGABLE'; +export const STORAGE_MEMORY: SplitIO.StorageType = 'MEMORY'; +export const STORAGE_LOCALSTORAGE: SplitIO.StorageType = 'LOCALSTORAGE'; +export const STORAGE_REDIS: SplitIO.StorageType = 'REDIS'; +export const STORAGE_PLUGGABLE: SplitIO.StorageType = 'PLUGGABLE'; // User consent export const CONSENT_GRANTED = 'GRANTED'; // The user has granted consent for tracking events and impressions @@ -75,7 +74,7 @@ export const EVENTS = 'ev'; export const TELEMETRY = 'te'; export const TOKEN = 'to'; export const SEGMENT = 'se'; -export const MY_SEGMENT = 'ms'; +export const MEMBERSHIPS = 'ms'; export const TREATMENT = 't'; export const TREATMENTS = 'ts'; @@ -105,7 +104,8 @@ export const DISABLED = 0; export const ENABLED = 1; export const PAUSED = 2; -export const FLAG_SPEC_VERSION = '1.1'; +export const FLAG_SPEC_VERSION = '1.2'; // Matcher types export const IN_SEGMENT = 'IN_SEGMENT'; +export const IN_LARGE_SEGMENT = 'IN_LARGE_SEGMENT'; diff --git a/src/utils/decompress/index.ts b/src/utils/decompress/index.ts index e0f5c2cd..fdbead14 100644 --- a/src/utils/decompress/index.ts +++ b/src/utils/decompress/index.ts @@ -409,8 +409,8 @@ export const algorithms = (function iifeDecompress() { return { /** * Expands GZIP data - * @param data The data to decompress - * @param out Where to write the data. GZIP already encodes the output size, so providing this doesn't save memory. + * @param data - The data to decompress + * @param out - Where to write the data. GZIP already encodes the output size, so providing this doesn't save memory. * @returns The decompressed version of the data */ gunzipSync(data: Uint8Array, out?: Uint8Array): Uint8Array { @@ -418,8 +418,8 @@ export const algorithms = (function iifeDecompress() { }, /** * Expands Zlib data - * @param data The data to decompress - * @param out Where to write the data. Saves memory if you know the decompressed size and provide an output buffer of that length. + * @param data - The data to decompress + * @param out - Where to write the data. Saves memory if you know the decompressed size and provide an output buffer of that length. * @returns The decompressed version of the data */ unzlibSync(data: Uint8Array, out?: Uint8Array): Uint8Array { diff --git a/src/utils/inputValidation/attributes.ts b/src/utils/inputValidation/attributes.ts index e9824113..a84b7e47 100644 --- a/src/utils/inputValidation/attributes.ts +++ b/src/utils/inputValidation/attributes.ts @@ -1,5 +1,5 @@ import { isObject } from '../lang'; -import { SplitIO } from '../../types'; +import SplitIO from '../../../types/splitio'; import { ILogger } from '../../logger/types'; import { validateAttribute } from './attribute'; import { ERROR_NOT_PLAIN_OBJECT } from '../../logger/constants'; diff --git a/src/utils/inputValidation/eventProperties.ts b/src/utils/inputValidation/eventProperties.ts index 1fb2984e..310946cc 100644 --- a/src/utils/inputValidation/eventProperties.ts +++ b/src/utils/inputValidation/eventProperties.ts @@ -1,6 +1,6 @@ import { isObject, isString, isFiniteNumber, isBoolean } from '../lang'; import { objectAssign } from '../lang/objectAssign'; -import { SplitIO } from '../../types'; +import SplitIO from '../../../types/splitio'; import { ILogger } from '../../logger/types'; import { ERROR_NOT_PLAIN_OBJECT, ERROR_SIZE_EXCEEDED, WARN_SETTING_NULL, WARN_TRIMMING_PROPERTIES } from '../../logger/constants'; diff --git a/src/utils/inputValidation/key.ts b/src/utils/inputValidation/key.ts index 9068bed6..3a612a59 100644 --- a/src/utils/inputValidation/key.ts +++ b/src/utils/inputValidation/key.ts @@ -1,5 +1,5 @@ import { isObject, isString, isFiniteNumber, toString } from '../lang'; -import { SplitIO } from '../../types'; +import SplitIO from '../../../types/splitio'; import { ILogger } from '../../logger/types'; import { ERROR_NULL, WARN_CONVERTING, ERROR_EMPTY, ERROR_TOO_LONG, ERROR_INVALID, ERROR_INVALID_KEY_OBJECT } from '../../logger/constants'; diff --git a/src/utils/inputValidation/preloadedData.ts b/src/utils/inputValidation/preloadedData.ts index 10531580..f07ee432 100644 --- a/src/utils/inputValidation/preloadedData.ts +++ b/src/utils/inputValidation/preloadedData.ts @@ -1,6 +1,5 @@ import { isObject, isString, isFiniteNumber } from '../lang'; import { validateSplit } from './split'; -import { SplitIO } from '../../types'; import { ILogger } from '../../logger/types'; function validateTimestampData(log: ILogger, maybeTimestamp: any, method: string, item: string) { @@ -42,7 +41,7 @@ function validateSegmentsData(log: ILogger, maybeSegmentsData: any, method: stri return false; } -export function validatePreloadedData(log: ILogger, maybePreloadedData: any, method: string): maybePreloadedData is SplitIO.PreloadedData { +export function validatePreloadedData(log: ILogger, maybePreloadedData: any, method: string) { if (!isObject(maybePreloadedData)) { log.error(`${method}: preloadedData must be an object.`); } else if ( diff --git a/src/utils/inputValidation/trafficTypeExistence.ts b/src/utils/inputValidation/trafficTypeExistence.ts index 9619f197..8040f849 100644 --- a/src/utils/inputValidation/trafficTypeExistence.ts +++ b/src/utils/inputValidation/trafficTypeExistence.ts @@ -2,7 +2,7 @@ import { thenable } from '../promise/thenable'; import { LOCALHOST_MODE } from '../constants'; import { ISplitsCacheBase } from '../../storages/types'; import { IReadinessManager } from '../../readiness/types'; -import { SDKMode } from '../../types'; +import SplitIO from '../../../types/splitio'; import { MaybeThenable } from '../../dtos/types'; import { ILogger } from '../../logger/types'; import { WARN_NOT_EXISTENT_TT } from '../../logger/constants'; @@ -14,7 +14,7 @@ function logTTExistenceWarning(log: ILogger, maybeTT: string, method: string) { /** * Separated from the previous method since on some cases it'll be async. */ -export function validateTrafficTypeExistence(log: ILogger, readinessManager: IReadinessManager, splitsCache: ISplitsCacheBase, mode: SDKMode, maybeTT: string, method: string): MaybeThenable { +export function validateTrafficTypeExistence(log: ILogger, readinessManager: IReadinessManager, splitsCache: ISplitsCacheBase, mode: SplitIO.SDKMode, maybeTT: string, method: string): MaybeThenable { // If not ready or in localhost mode, we won't run the validation if (!readinessManager.isReady() || mode === LOCALHOST_MODE) return true; diff --git a/src/utils/key/index.ts b/src/utils/key/index.ts index aa252beb..fc763b6e 100644 --- a/src/utils/key/index.ts +++ b/src/utils/key/index.ts @@ -1,4 +1,4 @@ -import { SplitIO } from '../../types'; +import SplitIO from '../../../types/splitio'; import { isObject } from '../lang'; // function isSplitKeyObject(key: any): key is SplitIO.SplitKeyObject { diff --git a/src/utils/lang/__tests__/maps.spec.ts b/src/utils/lang/__tests__/maps.spec.ts deleted file mode 100644 index 02312d50..00000000 --- a/src/utils/lang/__tests__/maps.spec.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { __getMapConstructor, MapPoly } from '../maps'; - -test('__getMapConstructor', () => { - - // should return global Map constructor if available - expect(__getMapConstructor()).toBe(global.Map); - - const originalMap = global.Map; // @ts-ignore - global.Map = undefined; // overwrite global Map - - // should return Map polyfill if global Map constructor is not available - expect(__getMapConstructor()).toBe(MapPoly); - - global.Map = originalMap; // restore original global Map - -}); diff --git a/src/utils/lang/__tests__/sets.spec.ts b/src/utils/lang/__tests__/sets.spec.ts index 1cb99853..8be359eb 100644 --- a/src/utils/lang/__tests__/sets.spec.ts +++ b/src/utils/lang/__tests__/sets.spec.ts @@ -1,29 +1,26 @@ -import { __getSetConstructor, _Set, returnSetsUnion, SetPoly } from '../sets'; - -test('__getSetConstructor', () => { - - // should return global Set constructor if available - expect(__getSetConstructor()).toBe(global.Set); - - const originalSet = global.Set; // @ts-ignore - global.Set = undefined; // overwrite global Set - - // should return Set polyfill if global Set constructor is not available - expect(__getSetConstructor()).toBe(SetPoly); - - global.Set = originalSet; // restore original global Set - -}); +import { returnSetsUnion, returnDifference } from '../sets'; test('returnSetsUnion', () => { - const set = new _Set(['1','2','3']); - const set2 = new _Set(['4','5','6']); - expect(returnSetsUnion(set, set2)).toEqual(new _Set(['1','2','3','4','5','6'])); - expect(set).toEqual(new _Set(['1','2','3'])); - expect(set2).toEqual(new _Set(['4','5','6'])); + const set = new Set(['1', '2', '3', '4']); + const set2 = new Set(['4', '5', '6', '1']); + expect(returnSetsUnion(set, set2)).toEqual(new Set(['1', '2', '3', '4', '5', '6'])); + expect(set).toEqual(new Set(['1', '2', '3', '4'])); + expect(set2).toEqual(new Set(['4', '5', '6', '1'])); - const emptySet = new _Set([]); + const emptySet = new Set([]); expect(returnSetsUnion(emptySet, emptySet)).toEqual(emptySet); expect(returnSetsUnion(set, emptySet)).toEqual(set); expect(returnSetsUnion(emptySet, set2)).toEqual(set2); }); + +test('returnDifference', () => { + const list = ['1', '2', '3']; + const list2 = ['2', '3', '4']; + expect(returnDifference(list, list2)).toEqual(['1']); + expect(list).toEqual(['1', '2', '3']); + expect(list2).toEqual(['2', '3', '4']); + + expect(returnDifference([], [])).toEqual([]); + expect(returnDifference(list, [])).toEqual(list); + expect(returnDifference([], list2)).toEqual([]); +}); diff --git a/src/utils/lang/binarySearch.ts b/src/utils/lang/binarySearch.ts index 7ff83fa0..2c38e0da 100644 --- a/src/utils/lang/binarySearch.ts +++ b/src/utils/lang/binarySearch.ts @@ -1,8 +1,8 @@ /** * Searches the index of the specified `value` inside an ordered array of `items` using the binary search algorithm. * - * @param items the array to be searched - * @param value the value to be searched for + * @param items - the array to be searched + * @param value - the value to be searched for * @returns integer number between 0 and `items.length`. This value is the index of the search value, * if it is contained in the array, or the index at which the value should be inserted to keep the array ordered. */ diff --git a/src/utils/lang/index.ts b/src/utils/lang/index.ts index 0a828dda..11b6afd0 100644 --- a/src/utils/lang/index.ts +++ b/src/utils/lang/index.ts @@ -122,10 +122,9 @@ export function isBoolean(val: any): boolean { */ export function isFiniteNumber(val: any): boolean { if (val instanceof Number) val = val.valueOf(); - // @TODO remove `isFinite` once `Number.isFinite` is fully supported by targets - // eslint-disable-next-line compat/compat - if (typeof val === 'number') return Number.isFinite ? Number.isFinite(val) : isFinite(val); - return false; + return typeof val === 'number' ? + Number.isFinite ? Number.isFinite(val) : isFinite(val) : + false; } /** @@ -134,9 +133,9 @@ export function isFiniteNumber(val: any): boolean { */ export function isIntegerNumber(val: any): boolean { if (val instanceof Number) val = val.valueOf(); - // eslint-disable-next-line compat/compat - if (typeof val === 'number') return Number.isInteger ? Number.isInteger(val) : isFinite(val) && Math.floor(val) === val; - return false; + return typeof val === 'number' ? + Number.isInteger ? Number.isInteger(val) : isFinite(val) && Math.floor(val) === val : + false; } /** diff --git a/src/utils/lang/maps.ts b/src/utils/lang/maps.ts deleted file mode 100644 index 277712bd..00000000 --- a/src/utils/lang/maps.ts +++ /dev/null @@ -1,108 +0,0 @@ -/** - * Map implementation based on es6-map polyfill (https://github.com/medikoo/es6-map/blob/master/polyfill.js), - * with the minimal features used by the SDK. - -Copyright (C) 2013 Mariusz Nowak (www.medikoo.com) - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. -**/ - -export interface IMap { - clear(): void; - delete(key: K): boolean; - forEach(callbackfn: (value: V, key: K, map: Map) => void, thisArg?: any): void; - get(key: K): V | undefined; - has(key: K): boolean; - set(key: K, value: V): this; - readonly size: number; -} - -export class MapPoly implements IMap{ - private __mapKeysData__: K[] = []; - private __mapValuesData__: V[] = []; - - // unlike ES6 `Map`, it only accepts an array as first argument iterable - constructor(entries?: readonly (readonly [K, V])[] | null) { - if (Array.isArray(entries)) entries.forEach(entry => { this.set(entry[0], entry[1]); }); - } - - clear() { - if (!this.__mapKeysData__.length) return; - this.__mapKeysData__.length = 0; - this.__mapValuesData__.length = 0; - } - - delete(key: K) { - const index = this.__mapKeysData__.indexOf(key); - if (index === -1) return false; - this.__mapKeysData__.splice(index, 1); - this.__mapValuesData__.splice(index, 1); - return true; - } - - forEach(callbackfn: (value: V, key: K, map: Map) => void, thisArg?: any) { - for (let i = 0; i < this.__mapKeysData__.length; i++) { - callbackfn.call(thisArg, this.__mapValuesData__[i], this.__mapKeysData__[i], this as any); - } - } - - get(key: K) { - const index = this.__mapKeysData__.indexOf(key); - if (index === -1) return; - return this.__mapValuesData__[index]; - } - - has(key: K): boolean { - return this.__mapKeysData__.indexOf(key) !== -1; - } - - set(key: K, value: V) { - let index = this.__mapKeysData__.indexOf(key); - if (index === -1) index = this.__mapKeysData__.push(key) - 1; - this.__mapValuesData__[index] = value; - return this; - } - - get size() { - return this.__mapKeysData__.length; - } - -} - -interface IMapConstructor { - new(): IMap; - new (entries?: readonly (readonly [K, V])[] | null): IMap; - readonly prototype: IMap; -} - -/** - * return the Map constructor to use. If native Map is not available or it doesn't support the required features (e.g., IE11), - * a ponyfill with minimal features is returned instead. - * - * Exported for testing purposes only. - */ -export function __getMapConstructor(): IMapConstructor { - // eslint-disable-next-line compat/compat - if (typeof Array.from === 'function' && typeof Map === 'function' && Map.prototype && Map.prototype.values) { - return Map; - } - return MapPoly; -} - -export const _Map = __getMapConstructor(); diff --git a/src/utils/lang/objectAssign.ts b/src/utils/lang/objectAssign.ts index 23fcbd9a..1253b355 100644 --- a/src/utils/lang/objectAssign.ts +++ b/src/utils/lang/objectAssign.ts @@ -1,71 +1,6 @@ -/* -Adaptation of "object-assign" library (https://www.npmjs.com/package/object-assign) -exported as an ES module instead of CommonJS, to avoid extra configuration steps when using -the ESM build of the SDK with tools that doesn't support CommonJS by default (e.g. Rollup). - -object-assign -(c) Sindre Sorhus -@license MIT -*/ - -/* eslint-disable */ -// @ts-nocheck - -var getOwnPropertySymbols = Object.getOwnPropertySymbols; -var hasOwnProperty = Object.prototype.hasOwnProperty; -var propIsEnumerable = Object.prototype.propertyIsEnumerable; - -function toObject(val) { - if (val === null || val === undefined) { - throw new TypeError('Object.assign cannot be called with null or undefined'); - } - - return Object(val); -} - -function shouldUseNative() { - try { - if (!Object.assign) { - return false; - } - - // Detect buggy property enumeration order in older V8 versions. - - // https://bugs.chromium.org/p/v8/issues/detail?id=4118 - var test1 = new String('abc'); - test1[5] = 'de'; - if (Object.getOwnPropertyNames(test1)[0] === '5') { - return false; - } - - // https://bugs.chromium.org/p/v8/issues/detail?id=3056 - var test2 = {}; - for (var i = 0; i < 10; i++) { - test2['_' + String.fromCharCode(i)] = i; - } - var order2 = Object.getOwnPropertyNames(test2).map(function (n) { - return test2[n]; - }); - if (order2.join('') !== '0123456789') { - return false; - } - - // https://bugs.chromium.org/p/v8/issues/detail?id=3056 - var test3 = {}; - 'abcdefghijklmnopqrst'.split('').forEach(function (letter) { - test3[letter] = letter; - }); - if (Object.keys(Object.assign({}, test3)).join('') !== - 'abcdefghijklmnopqrst') { - return false; - } - - return true; - } catch (err) { - // We don't expect any of the above to throw, but better to be safe. - return false; - } -} +/** + * A tiny polyfill for Object.assign + */ // https://www.npmjs.com/package/@types/object-assign type ObjectAssign = ((target: T, source: U) => T & U) & @@ -74,31 +9,17 @@ type ObjectAssign = ((target: T, source: U) => T & U) & ((target: T, source1: U, source2: V, source3: W, source4: Q) => T & U & V & W & Q) & ((target: T, source1: U, source2: V, source3: W, source4: Q, source5: R) => T & U & V & W & Q & R) & ((target: any, ...sources: any[]) => any); - -export const objectAssign: ObjectAssign = shouldUseNative() ? Object.assign : function (target, source) { - var from; - var to = toObject(target); - var symbols; - - for (var s = 1; s < arguments.length; s++) { - from = Object(arguments[s]); - - // eslint-disable-next-line no-restricted-syntax - for (var key in from) { - if (hasOwnProperty.call(from, key)) { - to[key] = from[key]; - } - } - - if (getOwnPropertySymbols) { - symbols = getOwnPropertySymbols(from); - for (var i = 0; i < symbols.length; i++) { - if (propIsEnumerable.call(from, symbols[i])) { - to[symbols[i]] = from[symbols[i]]; - } +export const objectAssign: ObjectAssign = Object.assign || function (target: any) { + if (target === null || target === undefined) throw new TypeError('Object.assign cannot be called with null or undefined'); + target = Object(target); + + for (let i = 1; i < arguments.length; i++) { + const source = Object(arguments[i]); // eslint-disable-next-line no-restricted-syntax + for (const key in source) { + if (Object.prototype.hasOwnProperty.call(source, key)) { + target[key] = source[key]; } } } - - return to; + return target; }; diff --git a/src/utils/lang/sets.ts b/src/utils/lang/sets.ts index d8d63e7a..5015a0fc 100644 --- a/src/utils/lang/sets.ts +++ b/src/utils/lang/sets.ts @@ -1,129 +1,17 @@ -/** - * Set implementation based on es6-set polyfill (https://github.com/medikoo/es6-set/blob/master/polyfill.js), - * with the minimal features used by the SDK. +export function setToArray(set: Set): T[] { + if (Array.from) return Array.from(set); -Copyright (C) 2013 Mariusz Nowak (www.medikoo.com) - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. -**/ - -export interface ISet { - add(value: T): this; - clear(): void; - delete(value: T): boolean; - forEach(callbackfn: (value: T, value2: T, set: ISet) => void, thisArg?: any): void; - has(value: T): boolean; - readonly size: number; -} - -export class SetPoly implements ISet{ - __setData__: T[] = []; - - // unlike ES6 `Set`, it only accepts an array as first argument iterable - constructor(values?: readonly T[] | null) { - if (Array.isArray(values)) values.forEach(value => { this.add(value); }); - } - - clear() { - if (!this.__setData__.length) return; - this.__setData__.length = 0; - } - - add(value: T) { - if (this.has(value)) return this; - this.__setData__.push(value); - return this; - } - - delete(value: T) { - let index = this.__setData__.indexOf(value); - if (index === -1) return false; - this.__setData__.splice(index, 1); - return true; - } - - has(value: T) { - return this.__setData__.indexOf(value) !== -1; - } - - forEach(callbackfn: (value: T, value2: T, set: SetPoly) => void, thisArg?: any): void { - if (typeof callbackfn !== 'function') throw new TypeError(callbackfn + ' is not a function'); - - for (let i = 0; i < this.__setData__.length; i++) { - const value = this.__setData__[i]; - callbackfn.call(thisArg, value, value, this); - } - } - - get size() { - return this.__setData__.length; - } - -} - - -/** - * return an array containing the items of the given set. - * @param set Set or SetPoly instance - */ -export function setToArray(set: ISet): T[] { - if (set instanceof SetPoly) { - return set.__setData__.slice(); - } - // if not using SetPoly as set, it means Array.from is supported - // eslint-disable-next-line compat/compat - return Array.from(set as Set); -} - -interface ISetConstructor { - new (values?: readonly T[] | null): ISet; - readonly prototype: ISet; -} - -/** - * return the Set constructor to use. If `Array.from` built-in or native Set is not available or it doesn't support the required features, - * a ponyfill with minimal features is returned instead. - * - * Exported for testing purposes only. - */ -export function __getSetConstructor(): ISetConstructor { - // eslint-disable-next-line compat/compat - if (typeof Array.from === 'function' && typeof Set === 'function' && Set.prototype && Set.prototype.values) { - return Set; - } - return SetPoly; + const array: T[] = []; + set.forEach((value: T) => { + array.push(value); + }); + return array; } -export const _Set = __getSetConstructor(); - -export function returnSetsUnion(set: ISet, set2: ISet): ISet { - const result = new _Set(setToArray(set)); - set2.forEach(value => { - result.add(value); - }); - return result; +export function returnSetsUnion(set: Set, set2: Set): Set { + return new Set(setToArray(set).concat(setToArray(set2))); } export function returnDifference(list: T[] = [], list2: T[] = []): T[] { - const result = new _Set(list); - list2.forEach(item => { - result.delete(item); - }); - return setToArray(result); + return list.filter(item => list2.indexOf(item) === -1); } diff --git a/src/utils/murmur3/murmur3_128.ts b/src/utils/murmur3/murmur3_128.ts index a9929324..954ba591 100644 --- a/src/utils/murmur3/murmur3_128.ts +++ b/src/utils/murmur3/murmur3_128.ts @@ -266,7 +266,7 @@ function hash128x64(key?: string, seed?: number) { /** * x64 version of Murmur3 for 128bits. * - * @param {string} str + * @param str - The string to hash. */ export function hash128(str: string, seed?: number): string { return hash128x64(UTF16ToUTF8(str), (seed as number) >>> 0); diff --git a/src/utils/murmur3/murmur3_128_x86.ts b/src/utils/murmur3/murmur3_128_x86.ts index a05ad3dc..a76d3374 100644 --- a/src/utils/murmur3/murmur3_128_x86.ts +++ b/src/utils/murmur3/murmur3_128_x86.ts @@ -181,7 +181,7 @@ function hash128x86(key?: string, seed?: number): string { * x86 version of Murmur3 for 128bits. * Used by hashImpression128 because in JS it is more efficient than the x64 version, no matter the underlying OS/CPU arch. * - * @param {string} str + * @param str - The string to hash. */ export function hash128(str: string, seed?: number): string { return hash128x86(UTF16ToUTF8(str), (seed as number) >>> 0); diff --git a/src/utils/murmur3/murmur3_64.ts b/src/utils/murmur3/murmur3_64.ts index aba873c7..92fdc76b 100644 --- a/src/utils/murmur3/murmur3_64.ts +++ b/src/utils/murmur3/murmur3_64.ts @@ -25,7 +25,7 @@ function hex2dec(s: string): string { /** * Gets the higher 64 bits of the x64 version of Murmur3 for 128bits, as decimal and hexadecimal number strings. * Used for MySegments channel V2 notifications. - * @param {string} str + * @param str - The string to hash */ export function hash64(str: string): Hash64 { const hex = hash128(str).slice(0, 16); diff --git a/src/utils/murmur3/utfx.ts b/src/utils/murmur3/utfx.ts index fd9125f4..c53322e3 100644 --- a/src/utils/murmur3/utfx.ts +++ b/src/utils/murmur3/utfx.ts @@ -14,10 +14,9 @@ export interface utfx { /** * Encodes UTF8 code points to UTF8 bytes. - * @param {(!function():number|null) | number} src Code points source, either as a function returning the next code point + * @param src - Code points source, either as a function returning the next code point * respectively `null` if there are no more code points left or a single numeric code point. - * @param {!function(number)} dst Bytes destination as a function successively called with the next byte - * @expose + * @param dst - Bytes destination as a function successively called with the next byte */ function encodeUTF8(src: (() => number | null) | number, dst: (cp: number) => void): void { var cp = null; @@ -38,11 +37,10 @@ function encodeUTF8(src: (() => number | null) | number, dst: (cp: number) => vo /** * Converts UTF16 characters to UTF8 code points. - * @param {!function():number|null} src Characters source as a function returning the next char code respectively + * @param src - Characters source as a function returning the next char code respectively * `null` if there are no more characters left. - * @param {!function(number)} dst Code points destination as a function successively called with each converted code + * @param dst - Code points destination as a function successively called with each converted code * point. - * @expose */ function UTF16toUTF8(src: () => number | null, dst: (cp: number) => void): void { var c1, c2 = null; @@ -65,10 +63,9 @@ function UTF16toUTF8(src: () => number | null, dst: (cp: number) => void): void /** * Converts and encodes UTF16 characters to UTF8 bytes. - * @param {!function():number|null} src Characters source as a function returning the next char code respectively `null` + * @param src - Characters source as a function returning the next char code respectively `null` * if there are no more characters left. - * @param {!function(number)} dst Bytes destination as a function successively called with the next byte. - * @expose + * @param dst - Bytes destination as a function successively called with the next byte. */ export function encodeUTF16toUTF8(src: () => number | null, dst: (...args: number[]) => string | undefined): void { UTF16toUTF8(src, function (cp) { @@ -78,18 +75,15 @@ export function encodeUTF16toUTF8(src: () => number | null, dst: (...args: numbe /** * String.fromCharCode reference for compile time renaming. - * @type {!function(...[number]):string} - * @inner */ var stringFromCharCode = String.fromCharCode; /** * Creates a source function for a string. - * @param {string} s String to read from - * @returns {!function():number|null} Source function returning the next char code respectively `null` if there are + * @param s - String to read from + * @returns Source function returning the next char code respectively `null` if there are * no more characters left. - * @throws {TypeError} If the argument is invalid - * @expose + * @throws If the argument is invalid */ export function stringSource(s: string): () => number | null { if (typeof s !== 'string') @@ -101,9 +95,8 @@ export function stringSource(s: string): () => number | null { /** * Creates a destination function for a string. - * @returns {function(number=):undefined|string} Destination function successively called with the next char code. + * @returns Destination function successively called with the next char code. * Returns the final string when called without arguments. - * @expose */ export function stringDestination(): (...args: number[]) => string | undefined { const cs: number[] = [], ps: string[] = []; return function () { diff --git a/src/utils/promise/wrapper.ts b/src/utils/promise/wrapper.ts index d0100fd6..62266457 100644 --- a/src/utils/promise/wrapper.ts +++ b/src/utils/promise/wrapper.ts @@ -11,8 +11,8 @@ * - If the wrapped promise is rejected when using native async/await syntax, the `defaultOnRejected` handler is invoked * and neither the catch block nor the remaining try block are executed. * - * @param customPromise promise to wrap - * @param defaultOnRejected default onRejected function + * @param customPromise - promise to wrap + * @param defaultOnRejected - default onRejected function * @returns a promise that doesn't need to be handled for rejection (except when using async/await syntax) and * includes a method named `hasOnFulfilled` that returns true if the promise has attached an onFulfilled handler. */ diff --git a/src/utils/settingsValidation/__tests__/index.spec.ts b/src/utils/settingsValidation/__tests__/index.spec.ts index 8e0238c4..a230fdf3 100644 --- a/src/utils/settingsValidation/__tests__/index.spec.ts +++ b/src/utils/settingsValidation/__tests__/index.spec.ts @@ -233,40 +233,32 @@ describe('settingsValidation', () => { expect(settings.core.key).toBe(undefined); }); - test('validates and sanitizes key and traffic type in client-side', () => { - const clientSideValidationParams = { ...minimalSettingsParams, acceptKey: true, acceptTT: true }; + test('validates and sanitizes key in client-side', () => { + const clientSideValidationParams = { ...minimalSettingsParams, acceptKey: true }; const samples = [{ key: ' valid-key ', settingsKey: 'valid-key', // key string is trimmed - trafficType: 'VALID-TT', settingsTrafficType: 'valid-tt', // TT is converted to lowercase }, { key: undefined, settingsKey: false, // undefined key is not valid in client-side - trafficType: undefined, settingsTrafficType: undefined, }, { - key: null, settingsKey: false, - trafficType: null, settingsTrafficType: false, + key: {}, settingsKey: false, }, { key: true, settingsKey: false, - trafficType: true, settingsTrafficType: false, }, { key: 1.5, settingsKey: '1.5', // finite number as key is parsed - trafficType: 100, settingsTrafficType: false, }, { key: { matchingKey: 100, bucketingKey: ' BUCK ' }, settingsKey: { matchingKey: '100', bucketingKey: 'BUCK' }, - trafficType: {}, settingsTrafficType: false, }]; - samples.forEach(({ key, trafficType, settingsKey, settingsTrafficType }) => { + samples.forEach(({ key, settingsKey }) => { const settings = settingsValidation({ core: { authorizationKey: 'dummy token', - key, - trafficType + key } }, clientSideValidationParams); expect(settings.core.key).toEqual(settingsKey); - expect(settings.core.trafficType).toEqual(settingsTrafficType); }); }); @@ -275,12 +267,11 @@ describe('settingsValidation', () => { core: { authorizationKey: 'dummy token', key: true, - trafficType: true + trafficType: 'ignored' } }, { ...minimalSettingsParams, acceptKey: true }); expect(settings.core.key).toEqual(false); // key is validated - expect(settings.core.trafficType).toEqual(true); // traffic type is ignored }); // Not implemented yet @@ -309,13 +300,13 @@ test('SETTINGS / urls should be correctly assigned', () => { const baseEventsUrl = 'https://events.split.io/api'; [ - '/mySegments/nico', - '/mySegments/events@split', - '/mySegments/metrics@split', - '/mySegments/testImpressions@split', - '/mySegments/testImpressions', - '/mySegments/events', - '/mySegments/metrics', + '/memberships/nico', + '/memberships/events@split', + '/memberships/metrics@split', + '/memberships/testImpressions@split', + '/memberships/testImpressions', + '/memberships/events', + '/memberships/metrics', '/splitChanges?since=-1', '/splitChanges?since=100', '/segmentChanges/segment1?since=100', diff --git a/src/utils/settingsValidation/__tests__/settings.mocks.ts b/src/utils/settingsValidation/__tests__/settings.mocks.ts index a3057515..a2a3fb14 100644 --- a/src/utils/settingsValidation/__tests__/settings.mocks.ts +++ b/src/utils/settingsValidation/__tests__/settings.mocks.ts @@ -1,7 +1,6 @@ import { InMemoryStorageCSFactory } from '../../../storages/inMemory/InMemoryStorageCS'; import { ISettings } from '../../../types'; import { loggerMock } from '../../../logger/__tests__/sdkLogger.mock'; -import { LocalhostFromObject } from '../../../sync/offline/LocalhostFromObject'; export const settingsWithKey = { core: { @@ -13,17 +12,6 @@ export const settingsWithKey = { log: loggerMock }; -export const settingsWithKeyAndTT = { - core: { - key: 'some_key', - trafficType: 'some_tt' - }, - startup: { - readyTimeout: 1, - }, - log: loggerMock -}; - export const settingsWithKeyObject = { core: { key: { @@ -73,14 +61,13 @@ export const fullSettings: ISettings = { sync: { splitFilters: [], impressionsMode: 'OPTIMIZED', - localhostMode: LocalhostFromObject(), __splitFiltersValidation: { validFilters: [], queryString: null, groupedFilters: { bySet: [], byName: [], byPrefix: [] }, }, enabled: true, - flagSpecVersion: '1.1' + flagSpecVersion: '1.2' }, version: 'jest', runtime: { diff --git a/src/utils/settingsValidation/consent.ts b/src/utils/settingsValidation/consent.ts index 98b4112b..8c706b75 100644 --- a/src/utils/settingsValidation/consent.ts +++ b/src/utils/settingsValidation/consent.ts @@ -1,12 +1,12 @@ import { ERROR_INVALID_CONFIG_PARAM } from '../../logger/constants'; import { ILogger } from '../../logger/types'; -import { ConsentStatus } from '../../types'; +import SplitIO from '../../../types/splitio'; import { CONSENT_DECLINED, CONSENT_GRANTED, CONSENT_UNKNOWN } from '../constants'; import { stringToUpperCase } from '../lang'; const userConsentValues = [CONSENT_DECLINED, CONSENT_GRANTED, CONSENT_UNKNOWN]; -export function validateConsent({ userConsent, log }: { userConsent?: any, log: ILogger }): ConsentStatus { +export function validateConsent({ userConsent, log }: { userConsent?: any, log: ILogger }): SplitIO.ConsentStatus { userConsent = stringToUpperCase(userConsent); if (userConsentValues.indexOf(userConsent) > -1) return userConsent; diff --git a/src/utils/settingsValidation/impressionsMode.ts b/src/utils/settingsValidation/impressionsMode.ts index 98e23d95..a273161a 100644 --- a/src/utils/settingsValidation/impressionsMode.ts +++ b/src/utils/settingsValidation/impressionsMode.ts @@ -1,6 +1,6 @@ import { ERROR_INVALID_CONFIG_PARAM } from '../../logger/constants'; import { ILogger } from '../../logger/types'; -import { SplitIO } from '../../types'; +import SplitIO from '../../../types/splitio'; import { DEBUG, OPTIMIZED, NONE } from '../constants'; import { stringToUpperCase } from '../lang'; diff --git a/src/utils/settingsValidation/index.ts b/src/utils/settingsValidation/index.ts index 9cc1c3f4..ce1d1772 100644 --- a/src/utils/settingsValidation/index.ts +++ b/src/utils/settingsValidation/index.ts @@ -6,7 +6,6 @@ import { validImpressionsMode } from './impressionsMode'; import { ISettingsValidationParams } from './types'; import { ISettings } from '../../types'; import { validateKey } from '../inputValidation/key'; -import { validateTrafficType } from '../inputValidation/trafficType'; import { ERROR_MIN_CONFIG_PARAM, LOG_PREFIX_CLIENT_INSTANTIATION } from '../../logger/constants'; // Exported for telemetry @@ -19,8 +18,6 @@ export const base = { authorizationKey: undefined, // key used in your system (only required for browser version) key: undefined, - // traffic type for the given key (only used on browser version) - trafficType: undefined, // toggle impressions tracking of labels labelsEnabled: true, // toggle sendind (true) or not sending (false) IP and Host Name with impressions, events, and telemetries requests (only used on nodejs version) @@ -83,7 +80,6 @@ export const base = { splitFilters: undefined, // impressions collection mode impressionsMode: OPTIMIZED, - localhostMode: undefined, enabled: true, flagSpecVersion: FLAG_SPEC_VERSION }, @@ -100,12 +96,12 @@ function fromSecondsToMillis(n: number) { * Validates the given config and use it to build a settings object. * NOTE: it doesn't validate the SDK Key. Call `validateApiKey` or `validateAndTrackApiKey` for that after settings validation. * - * @param config user defined configuration - * @param validationParams defaults and fields validators used to validate and creates a settings object from a given config + * @param config - user defined configuration + * @param validationParams - defaults and fields validators used to validate and creates a settings object from a given config */ export function settingsValidation(config: unknown, validationParams: ISettingsValidationParams) { - const { defaults, runtime, storage, integrations, logger, localhost, consent, flagSpec } = validationParams; + const { defaults, runtime, storage, integrations, logger, consent, flagSpec } = validationParams; // creates a settings object merging base, defaults and config objects. const withDefaults = merge({}, base, defaults, config) as ISettings; @@ -168,13 +164,6 @@ export function settingsValidation(config: unknown, validationParams: ISettingsV // @ts-ignore, @TODO handle invalid keys as a non-recoverable error? withDefaults.core.key = validateKey(log, maybeKey, LOG_PREFIX_CLIENT_INSTANTIATION); } - - if (validationParams.acceptTT) { - const maybeTT = withDefaults.core.trafficType; - if (maybeTT !== undefined) { // @ts-ignore - withDefaults.core.trafficType = validateTrafficType(log, maybeTT, LOG_PREFIX_CLIENT_INSTANTIATION); - } - } } else { // On server-side, key is undefined and used to distinguish from client-side if (maybeKey !== undefined) log.warn('Provided `key` is ignored in server-side SDK.'); // @ts-ignore @@ -190,8 +179,6 @@ export function settingsValidation(config: unknown, validationParams: ISettingsV // @ts-ignore, modify readonly prop if (integrations) withDefaults.integrations = integrations(withDefaults); - if (localhost) sync.localhostMode = localhost(withDefaults); - // validate push options if (withDefaults.streamingEnabled !== false) { // @ts-ignore, modify readonly prop withDefaults.streamingEnabled = true; @@ -209,11 +196,12 @@ export function settingsValidation(config: unknown, validationParams: ISettingsV const splitFiltersValidation = validateSplitFilters(log, sync.splitFilters, withDefaults.mode); sync.splitFilters = splitFiltersValidation.validFilters; sync.__splitFiltersValidation = splitFiltersValidation; - sync.flagSpecVersion = flagSpec ? flagSpec(withDefaults) : FLAG_SPEC_VERSION; + // ensure a valid flag spec version + sync.flagSpecVersion = flagSpec ? flagSpec(withDefaults) : FLAG_SPEC_VERSION; // ensure a valid user consent value // @ts-ignore, modify readonly prop - withDefaults.userConsent = consent(withDefaults); + withDefaults.userConsent = consent ? consent(withDefaults) : undefined; return withDefaults; } diff --git a/src/utils/settingsValidation/integrations/__tests__/plugable.spec.ts b/src/utils/settingsValidation/integrations/__tests__/plugable.spec.ts index a9b76aa1..3de62641 100644 --- a/src/utils/settingsValidation/integrations/__tests__/plugable.spec.ts +++ b/src/utils/settingsValidation/integrations/__tests__/plugable.spec.ts @@ -16,8 +16,8 @@ describe('integrations validator for pluggable integrations', () => { }); test('Filters invalid integration factories from `integrations` array', () => { - const validNoopIntFactory = () => { }; // no-op integration, such as GoogleAnalyticsToSplit - const validIntFactory = () => { return { queue() { } }; }; // integration with queue handler, such as SplitToGoogleAnalytics + const validNoopIntFactory = () => { }; // integration with no queue handler, such as 3rdPartyAnalyticsToSplit + const validIntFactory = () => { return { queue() { } }; }; // integration with queue handler, such as SplitTo3rdPartyAnalytics const invalid = { queue() { } }; // Integration factories that are invalid objects are removed diff --git a/src/utils/settingsValidation/integrations/common.ts b/src/utils/settingsValidation/integrations/common.ts index 5ead4509..2ecb24b6 100644 --- a/src/utils/settingsValidation/integrations/common.ts +++ b/src/utils/settingsValidation/integrations/common.ts @@ -4,11 +4,11 @@ import { ILogger } from '../../../logger/types'; /** * This function validates `settings.integrations` object * - * @param {any} settings config object provided by the user to initialize the sdk - * @param {function} integrationValidator filter used to remove invalid integration items - * @param {string} extraWarning optional string used to better describe why an item might be invalid + * @param settings - config object provided by the user to initialize the sdk + * @param integrationValidator - filter used to remove invalid integration items + * @param extraWarning - optional string used to better describe why an item might be invalid * - * @returns {Array} array of valid integration items. The array might be empty if `settings` object does not have valid integrations. + * @returns array of valid integration items. The array might be empty if `settings` object does not have valid integrations. */ export function validateIntegrations(settings: { log: ILogger, integrations?: any }, integrationValidator: (integrationItem: any) => boolean, extraWarning?: string) { const { integrations, log } = settings; diff --git a/src/utils/settingsValidation/integrations/configurable.ts b/src/utils/settingsValidation/integrations/configurable.ts index fb24e35b..17283d3b 100644 --- a/src/utils/settingsValidation/integrations/configurable.ts +++ b/src/utils/settingsValidation/integrations/configurable.ts @@ -5,10 +5,10 @@ import { ILogger } from '../../../logger/types'; /** * This function validates `settings.integrations` object that consists of a list of configuration items, used by the isomorphic JS SDK. * - * @param {any} settings config object provided by the user to initialize the sdk - * @param {Array} validIntegrationTypes list of integration types to filter from `settings.integrations` + * @param settings - config object provided by the user to initialize the sdk + * @param validIntegrationTypes - list of integration types to filter from `settings.integrations` * - * @returns {Array} array of valid integration items. The array might be empty if `settings` object does not have valid integrations. + * @returns array of valid integration items. The array might be empty if `settings` object does not have valid integrations. */ export function validateConfigurableIntegrations(settings: { log: ILogger, integrations?: any }, validIntegrationTypes: string[] = []) { diff --git a/src/utils/settingsValidation/integrations/pluggable.ts b/src/utils/settingsValidation/integrations/pluggable.ts index b4a96ee9..f60cfce0 100644 --- a/src/utils/settingsValidation/integrations/pluggable.ts +++ b/src/utils/settingsValidation/integrations/pluggable.ts @@ -5,9 +5,9 @@ import { ILogger } from '../../../logger/types'; /** * This function validates `settings.integrations` object that consists of a list of pluggable integration factories. * - * @param {any} settings config object provided by the user to initialize the sdk + * @param settings - config object provided by the user to initialize the sdk * - * @returns {Array} array of valid integration factories. The array might be empty if `settings` object does not have valid integrations. + * @returns array of valid integration factories. The array might be empty if `settings` object does not have valid integrations. */ export function validatePluggableIntegrations(settings: { log: ILogger, integrations?: any }): ISettings['integrations'] { diff --git a/src/utils/settingsValidation/localhost/__tests__/index.spec.ts b/src/utils/settingsValidation/localhost/__tests__/index.spec.ts deleted file mode 100644 index 663530dc..00000000 --- a/src/utils/settingsValidation/localhost/__tests__/index.spec.ts +++ /dev/null @@ -1,62 +0,0 @@ -import { validateLocalhost } from '../pluggable'; -import { validateLocalhostWithDefault } from '../builtin'; -import { LocalhostFromObject } from '../../../../sync/offline/LocalhostFromObject'; -import { loggerMock as log } from '../../../../logger/__tests__/sdkLogger.mock'; - -const localhostModeObject = LocalhostFromObject(); - -describe('validateLocalhost, for slim SplitFactory', () => { - - afterEach(() => { - log.error.mockClear(); - }); - - test('if mode is LOCALHOST_MODE and localhostMode object is invalid or not provided, returns undefined and logs an error', () => { - expect(validateLocalhost({ log, sync: {}, mode: 'localhost' })).toBe(undefined); - expect(validateLocalhost({ log, sync: { localhostMode: null }, mode: 'localhost' })).toBe(undefined); - expect(validateLocalhost({ log, sync: { localhostMode: () => { } }, mode: 'localhost' })).toBe(undefined); - expect(log.error).toBeCalledTimes(3); // logs error if provided object is invalid - }); - - test('if mode is LOCALHOST_MODE and localhostMode object is valid, returns the provided object', () => { - expect(validateLocalhost({ log, sync: { localhostMode: localhostModeObject }, mode: 'localhost' })).toBe(localhostModeObject); - expect(log.error).not.toBeCalled(); - }); - - test('if mode is not LOCALHOST_MODE, returns the provided object (it is not validated)', () => { - expect(validateLocalhost({ log, sync: {}, mode: 'standalone' })).toBe(undefined); - expect(validateLocalhost({ log, sync: { localhostMode: 'INVALID_BUT_IGNORED' }, mode: 'standalone' })).toBe('INVALID_BUT_IGNORED'); - expect(log.error).not.toBeCalled(); - }); - -}); - -describe('validateLocalhostWithDefault, for full SplitFactory', () => { - - afterEach(() => { - log.error.mockClear(); - }); - - test('if mode is LOCALHOST_MODE and localhostMode object is not provided, returns default without logging an error', () => { - expect(validateLocalhostWithDefault({ log, sync: {}, mode: 'localhost' })).toBe(localhostModeObject); - expect(validateLocalhostWithDefault({ log, sync: { localhostMode: null }, mode: 'localhost' })).toBe(localhostModeObject); - expect(log.error).not.toBeCalled(); - }); - - test('if mode is LOCALHOST_MODE and localhostMode object is invalid, returns default and logs an error', () => { - expect(validateLocalhostWithDefault({ log, sync: { localhostMode: () => { } }, mode: 'localhost' })).toBe(localhostModeObject); - expect(log.error).toBeCalledTimes(1); // logs error if provided object is invalid - }); - - test('if mode is LOCALHOST_MODE and localhostMode object is valid, returns the provided object', () => { - expect(validateLocalhostWithDefault({ log, sync: { localhostMode: localhostModeObject }, mode: 'localhost' })).toBe(localhostModeObject); - expect(log.error).not.toBeCalled(); - }); - - test('if mode is not LOCALHOST_MODE, returns the provided object or the default one. Provided object is not validated and so no errors are logged', () => { - expect(validateLocalhostWithDefault({ log, sync: {}, mode: 'standalone' })).toBe(localhostModeObject); - expect(validateLocalhostWithDefault({ log, sync: { localhostMode: 'INVALID_BUT_IGNORED' }, mode: 'standalone' })).toBe('INVALID_BUT_IGNORED'); - expect(log.error).not.toBeCalled(); - }); - -}); diff --git a/src/utils/settingsValidation/localhost/builtin.ts b/src/utils/settingsValidation/localhost/builtin.ts deleted file mode 100644 index 35a30145..00000000 --- a/src/utils/settingsValidation/localhost/builtin.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { ILogger } from '../../../logger/types'; -import { SDKMode, } from '../../../types'; -import { LocalhostFromObject } from '../../../sync/offline/LocalhostFromObject'; -import { validateLocalhost } from './pluggable'; - -/** - * This function validates `settings.sync.localhostMode` object - * - * @param {any} settings config object provided by the user to initialize the sdk - * - * @returns {Object} provided localhost mode module at `settings.sync.localhostMode` if valid, or a default LocalhostFromObject instance if not provided or invalid - */ -export function validateLocalhostWithDefault(settings: { log: ILogger, sync: { localhostMode?: any }, mode: SDKMode }) { - if (!settings.sync.localhostMode) return LocalhostFromObject(); - return validateLocalhost(settings) || LocalhostFromObject(); -} diff --git a/src/utils/settingsValidation/localhost/pluggable.ts b/src/utils/settingsValidation/localhost/pluggable.ts deleted file mode 100644 index 3231e612..00000000 --- a/src/utils/settingsValidation/localhost/pluggable.ts +++ /dev/null @@ -1,22 +0,0 @@ -import { ERROR_LOCALHOST_MODULE_REQUIRED } from '../../../logger/constants'; -import { ILogger } from '../../../logger/types'; -import { SDKMode, } from '../../../types'; -import { LOCALHOST_MODE } from '../../constants'; - -/** - * This function validates `settings.sync.localhostMode` object - * - * @param {any} settings config object provided by the user to initialize the sdk - * - * @returns {Object | undefined} provided localhost mode module at `settings.sync.localhostMode`, or undefined if it is not provided or invalid - */ -export function validateLocalhost(settings: { log: ILogger, sync: { localhostMode?: any}, mode: SDKMode }) { - const localhostMode = settings.sync.localhostMode; - - // localhostMode.type is used for internal validation. Not considered part of the public API, and might be updated eventually. - if (settings.mode === LOCALHOST_MODE && (typeof localhostMode !== 'function' || localhostMode.type !== 'LocalhostFromObject')) { - settings.log.error(ERROR_LOCALHOST_MODULE_REQUIRED); - return undefined; - } - return localhostMode; -} diff --git a/src/utils/settingsValidation/logger/builtinLogger.ts b/src/utils/settingsValidation/logger/builtinLogger.ts index 5db9cfb0..4f099c7c 100644 --- a/src/utils/settingsValidation/logger/builtinLogger.ts +++ b/src/utils/settingsValidation/logger/builtinLogger.ts @@ -3,11 +3,10 @@ import { ILogger } from '../../../logger/types'; import { isLocalStorageAvailable } from '../../env/isLocalStorageAvailable'; import { isNode } from '../../env/isNode'; import { codesDebug } from '../../../logger/messages/debug'; -import { _Map } from '../../lang/maps'; import { getLogLevel } from './commons'; -import { LogLevel } from '../../../types'; +import SplitIO from '../../../../types/splitio'; -const allCodes = new _Map(codesDebug); +const allCodes = new Map(codesDebug); // @TODO set default debug setting instead of initialLogLevel when integrating in JS and Node packages const LS_KEY = 'splitio_debug'; @@ -38,13 +37,13 @@ if (/^(enabled?|on)/i.test(initialState)) { /** * Validates the `debug` property at config and use it to set the log level. * - * @param settings user config object, with an optional `debug` property of type boolean or string log level. + * @param settings - user config object, with an optional `debug` property of type boolean or string log level. * @returns a logger instance with the log level at `settings.debug`. If `settings.debug` is invalid or not provided, `initialLogLevel` is used. */ export function validateLogger(settings: { debug: unknown }): ILogger { const { debug } = settings; - const logLevel: LogLevel | undefined = debug !== undefined ? getLogLevel(debug) : initialLogLevel; + const logLevel: SplitIO.LogLevel | undefined = debug !== undefined ? getLogLevel(debug) : initialLogLevel; const log = new Logger({ logLevel: logLevel || initialLogLevel }, allCodes); diff --git a/src/utils/settingsValidation/logger/commons.ts b/src/utils/settingsValidation/logger/commons.ts index a51ba991..8c11cbbb 100644 --- a/src/utils/settingsValidation/logger/commons.ts +++ b/src/utils/settingsValidation/logger/commons.ts @@ -1,15 +1,15 @@ import { LogLevels, isLogLevelString } from '../../../logger'; -import { LogLevel } from '../../../types'; +import SplitIO from '../../../../types/splitio'; /** * Returns the LogLevel for the given debugValue or undefined if it is invalid, * i.e., if the debugValue is not a boolean or LogLevel string. * - * @param debugValue debug value at config + * @param debugValue - debug value at config * @returns LogLevel of the given debugValue or undefined if the provided value is invalid */ -export function getLogLevel(debugValue: unknown): LogLevel | undefined { +export function getLogLevel(debugValue: unknown): SplitIO.LogLevel | undefined { if (typeof debugValue === 'boolean') { if (debugValue) { return LogLevels.DEBUG; diff --git a/src/utils/settingsValidation/logger/pluggableLogger.ts b/src/utils/settingsValidation/logger/pluggableLogger.ts index 633d523a..063134c9 100644 --- a/src/utils/settingsValidation/logger/pluggableLogger.ts +++ b/src/utils/settingsValidation/logger/pluggableLogger.ts @@ -1,6 +1,6 @@ import { Logger, LogLevels } from '../../../logger'; import { ILogger } from '../../../logger/types'; -import { LogLevel } from '../../../types'; +import SplitIO from '../../../../types/splitio'; import { getLogLevel } from './commons'; function isLogger(log: any): log is ILogger { @@ -13,13 +13,13 @@ let initialLogLevel = LogLevels.NONE; /** * Validates the `debug` property at config and use it to set the logger. * - * @param settings user config object, with an optional `debug` property of type boolean, string log level or a Logger object. + * @param settings - user config object, with an optional `debug` property of type boolean, string log level or a Logger object. * @returns a logger instance, that might be: the provided logger at `settings.debug`, or one with the given `debug` log level, * or one with NONE log level if `debug` is not defined or invalid. */ export function validateLogger(settings: { debug: unknown }): ILogger { const { debug } = settings; - let logLevel: LogLevel | undefined = initialLogLevel; + let logLevel: SplitIO.LogLevel | undefined = initialLogLevel; if (debug !== undefined) { if (isLogger(debug)) return debug; diff --git a/src/utils/settingsValidation/splitFilters.ts b/src/utils/settingsValidation/splitFilters.ts index db9207df..cea3117f 100644 --- a/src/utils/settingsValidation/splitFilters.ts +++ b/src/utils/settingsValidation/splitFilters.ts @@ -1,6 +1,6 @@ import { validateSplits } from '../inputValidation/splits'; import { ISplitFiltersValidation } from '../../dtos/types'; -import { SplitIO } from '../../types'; +import SplitIO from '../../../types/splitio'; import { ILogger } from '../../logger/types'; import { WARN_SPLITS_FILTER_IGNORED, WARN_SPLITS_FILTER_EMPTY, WARN_SPLITS_FILTER_INVALID, SETTINGS_SPLITS_FILTER, LOG_PREFIX_SETTINGS, ERROR_SETS_FILTER_EXCLUSIVE, WARN_LOWERCASE_FLAGSET, WARN_INVALID_FLAGSET, WARN_FLAGSET_NOT_CONFIGURED } from '../../logger/constants'; import { objectAssign } from '../lang/objectAssign'; @@ -40,9 +40,9 @@ function validateFilterType(maybeFilterType: any): maybeFilterType is SplitIO.Sp /** * Validate, deduplicate and sort a given list of filter values. * - * @param {string} type filter type string used for log messages - * @param {string[]} values list of values to validate, deduplicate and sort - * @param {number} maxLength + * @param type - filter type string used for log messages + * @param values - list of values to validate, deduplicate and sort + * @param maxLength - max length allowed for the list of values * @returns list of valid, unique and alphabetically sorted non-empty strings. The list is empty if `values` param is not a non-empty array or all its values are invalid. * * @throws Error if the sanitized list exceeds the length indicated by `maxLength` @@ -75,7 +75,7 @@ function validateSplitFilter(log: ILogger, type: SplitIO.SplitFilterType, values * - '&prefixes=': if only `byName` filter is undefined * - '&names=&prefixes=': if no one is undefined * - * @param {Object} groupedFilters object of filters. Each filter must be a list of valid, unique and ordered string values. + * @param groupedFilters - object of filters. Each filter must be a list of valid, unique and ordered string values. * @returns null or string with the `split filter query` component of the URL. */ function queryStringBuilder(groupedFilters: Record) { @@ -90,15 +90,12 @@ function queryStringBuilder(groupedFilters: Record & { version: string } & { startup: ISettings['startup'] }, /** If true, validates core.key */ acceptKey?: boolean, - /** If true, validates core.trafficType */ - acceptTT?: boolean, /** Define runtime values (`settings.runtime`) */ runtime: (settings: ISettings) => ISettings['runtime'], /** Storage validator (`settings.storage`) */ @@ -22,10 +20,8 @@ export interface ISettingsValidationParams { integrations?: (settings: ISettings) => ISettings['integrations'], /** Logger validator (`settings.debug`) */ logger: (settings: ISettings) => ISettings['log'], - /** Localhost mode validator (`settings.sync.localhostMode`) */ - localhost?: (settings: ISettings) => ISettings['sync']['localhostMode'], /** User consent validator (`settings.userConsent`) */ - consent: (settings: ISettings) => ISettings['userConsent'], + consent?: (settings: ISettings) => ISettings['userConsent'], /** Flag spec version validation. Configurable by the JS Synchronizer but not by the SDKs */ flagSpec?: (settings: ISettings) => ISettings['sync']['flagSpecVersion'] } diff --git a/src/utils/settingsValidation/url.ts b/src/utils/settingsValidation/url.ts index 4dd0179e..c483f930 100644 --- a/src/utils/settingsValidation/url.ts +++ b/src/utils/settingsValidation/url.ts @@ -9,9 +9,9 @@ const streamingEndpointMatcher = /^\/(sse|event-stream)/; * Get URL based on a given target (path). * ATM, it is only used for testing purposes. * - * @param settings settings object - * @param target url path - * @return complete url + * @param settings - settings object + * @param target - url path + * @returns complete url */ export function url(settings: ISettings, target: string) { if (telemetryEndpointMatcher.test(target)) { diff --git a/types/index.d.ts b/types/index.d.ts new file mode 100644 index 00000000..714efbf1 --- /dev/null +++ b/types/index.d.ts @@ -0,0 +1,5 @@ +// Declaration file for JavaScript Browser Split Software SDK +// Project: http://www.split.io/ +// Definitions by: Nico Zelaya + +/// diff --git a/types/splitio.d.ts b/types/splitio.d.ts new file mode 100644 index 00000000..a87dfc9b --- /dev/null +++ b/types/splitio.d.ts @@ -0,0 +1,1931 @@ +// Type definitions for Split Software SDKs +// Project: http://www.split.io/ + +import { RedisOptions } from 'ioredis'; +import { RequestOptions } from 'http'; + +export as namespace SplitIO; +export = SplitIO; + +/** + * Common settings properties. + */ +interface ISharedSettings { + /** + * The impression listener, which is optional. Whatever you provide here needs to comply with the SplitIO.IImpressionListener interface, + * which will check for the logImpression method. + * + * @defaultValue `undefined` + */ + impressionListener?: SplitIO.IImpressionListener; + /** + * SDK synchronization settings. + */ + sync?: { + /** + * List of feature flag filters. These filters are used to fetch a subset of the feature flag definitions in your environment, in order to reduce the delay of the SDK to be ready. + * This configuration is only meaningful when the SDK is working in "standalone" mode. + * + * Example: + * ``` + * splitFilter: [ + * { type: 'byName', values: ['my_feature_flag_1', 'my_feature_flag_2'] }, // will fetch feature flags named 'my_feature_flag_1' and 'my_feature_flag_2' + * ] + * ``` + */ + splitFilters?: SplitIO.SplitFilter[]; + /** + * Impressions Collection Mode. Option to determine how impressions are going to be sent to Split servers. + * Possible values are 'DEBUG', 'OPTIMIZED', and 'NONE'. + * - DEBUG: will send all the impressions generated (recommended only for debugging purposes). + * - OPTIMIZED: will send unique impressions to Split servers, avoiding a considerable amount of traffic that duplicated impressions could generate. + * - NONE: will send unique keys evaluated per feature to Split servers instead of full blown impressions, avoiding a considerable amount of traffic that impressions could generate. + * + * @defaultValue `'OPTIMIZED'` + */ + impressionsMode?: SplitIO.ImpressionsMode; + /** + * Custom options object for HTTP(S) requests. + * If provided, this object is merged with the options object passed by the SDK for EventSource and Fetch calls. + * This configuration has no effect in "consumer" mode, as no HTTP(S) requests are made by the SDK. + */ + requestOptions?: { + /** + * Custom function called before each request, allowing you to add or update headers in SDK HTTP requests. + * Some headers, such as `SplitSDKVersion`, are required by the SDK and cannot be overridden. + * To pass multiple headers with the same name, combine their values into a single line, separated by commas. Example: `{ 'Authorization': 'value1, value2' }` + * Or provide keys with different cases since headers are case-insensitive. Example: `{ 'authorization': 'value1', 'Authorization': 'value2' }` + * + * NOTE: to pass custom headers to the streaming connection in Browser, you should polyfill the `window.EventSource` object with a library that supports headers, + * like https://www.npmjs.com/package/event-source-polyfill, since native EventSource does not support them and they will be ignored. + * + * @defaultValue `undefined` + * + * @param context - The context for the request, which contains the `headers` property object representing the current headers in the request. + * @returns An object representing a set of headers to be merged with the current headers. + * + * @example + * ``` + * const getHeaderOverrides = (context) => { + * return { + * 'Authorization': context.headers['Authorization'] + ', other-value', + * 'custom-header': 'custom-value' + * }; + * }; + * ``` + */ + getHeaderOverrides?: (context: { headers: Record }) => Record; + }; + }; + /** + * List of URLs that the SDK will use as base for it's synchronization functionalities, applicable only when running as standalone and partial consumer modes. + * Do not change these settings unless you're working an advanced use case, like connecting to the Split proxy. + */ + urls?: SplitIO.UrlSettings; +} +/** + * Common settings properties for SDKs with synchronous API (standalone and localhost modes). + */ +interface ISyncSharedSettings extends ISharedSettings { + /** + * The SDK mode. When using the default in-memory storage or `InLocalStorage` as storage, the only possible value is "standalone", which is the default. + * For "localhost" mode, use "localhost" as authorizationKey. + * + * @defaultValue `'standalone'` + */ + mode?: 'standalone'; + /** + * Boolean flag to enable the streaming service as default synchronization mechanism. In the event of any issue with streaming, + * the SDK would fallback to the polling mechanism. If false, the SDK would poll for changes as usual without attempting to use streaming. + * + * @defaultValue `true` + */ + streamingEnabled?: boolean; + /** + * SDK synchronization settings. + */ + sync?: ISharedSettings['sync'] & { + /** + * Controls the SDK continuous synchronization flags. + * + * When `true` a running SDK will process rollout plan updates performed on the UI (default). + * When false it'll just fetch all data upon init. + * + * @defaultValue `true` + */ + enabled?: boolean; + }; +} +/** + * Common settings properties for SDKs with pluggable options. + */ +interface IPluggableSharedSettings { + /** + * Boolean value to indicate whether the logger should be enabled or disabled by default, or a log level string or a Logger object. + * Passing a logger object is required to get descriptive log messages. Otherwise most logs will print with message codes. + * @see {@link https://help.split.io/hc/en-us/articles/360058730852-Browser-SDK#logging}. + * + * Examples: + * ``` + * config.debug = true + * config.debug = 'WARN' + * config.debug = ErrorLogger() + * ``` + * + * @defaultValue `false` + */ + debug?: boolean | SplitIO.LogLevel | SplitIO.ILogger; + /** + * Defines an optional list of factory functions used to instantiate SDK integrations. + * + * NOTE: at the moment there are not integrations to plug in. + */ + integrations?: SplitIO.IntegrationFactory[]; +} +/** + * Common settings properties for SDKs without pluggable options. + */ +interface INonPluggableSharedSettings { + /** + * Boolean value to indicate whether the logger should be enabled or disabled, or a log level string. + * + * Examples: + * ``` + * config.debug = true + * config.debug = 'WARN' + * ``` + * + * @defaultValue `false` + */ + debug?: boolean | SplitIO.LogLevel; +} +/** + * Common settings properties for SDKs with server-side API. + */ +interface IServerSideSharedSettings { + /** + * SDK Core settings for NodeJS. + */ + core: { + /** + * Your SDK key. + * + * @see {@link https://help.split.io/hc/en-us/articles/360019916211-API-keys} + */ + authorizationKey: string; + /** + * Disable labels from being sent to Split backend. Labels may contain sensitive information. + * + * @defaultValue `true` + */ + labelsEnabled?: boolean; + /** + * Disable machine IP and Name from being sent to Split backend. + * + * @defaultValue `true` + */ + IPAddressesEnabled?: boolean; + }; + /** + * SDK Startup settings for NodeJS. + */ + startup?: { + /** + * Maximum amount of time used before notify a timeout. + * + * @defaultValue `15` + */ + readyTimeout?: number; + /** + * Time to wait for a request before the SDK is ready. If this time expires, JS SDK will retry 'retriesOnFailureBeforeReady' times before notifying its failure to be 'ready'. + * + * @defaultValue `15` + */ + requestTimeoutBeforeReady?: number; + /** + * How many quick retries we will do while starting up the SDK. + * + * @defaultValue `1` + */ + retriesOnFailureBeforeReady?: number; + /** + * For SDK posts the queued events data in bulks with a given rate, but the first push window is defined separately, + * to better control on browsers. This number defines that window before the first events push. + * + * @defaultValue `0` + */ + eventsFirstPushWindow?: number; + }; + /** + * SDK scheduler settings. + */ + scheduler?: { + /** + * The SDK polls Split servers for changes to feature flag definitions. This parameter controls this polling period in seconds. + * + * @defaultValue `60` + */ + featuresRefreshRate?: number; + /** + * The SDK sends information on who got what treatment at what time back to Split servers to power analytics. This parameter controls how often this data is sent to Split servers. The parameter should be in seconds. + * + * @defaultValue `300` + */ + impressionsRefreshRate?: number; + /** + * The maximum number of impression items we want to queue. If we queue more values, it will trigger a flush and reset the timer. + * If you use a 0 here, the queue will have no maximum size. + * + * @defaultValue `30000` + */ + impressionsQueueSize?: number; + /** + * The SDK sends diagnostic metrics to Split servers. This parameters controls this metric flush period in seconds. + * + * @defaultValue `120` + * @deprecated This parameter is ignored now. Use `telemetryRefreshRate` instead. + */ + metricsRefreshRate?: number; + /** + * The SDK sends diagnostic metrics to Split servers. This parameters controls this metric flush period in seconds. + * + * @defaultValue `3600` + */ + telemetryRefreshRate?: number; + /** + * The SDK polls Split servers for changes to segment definitions. This parameter controls this polling period in seconds. + * + * @defaultValue `60` + */ + segmentsRefreshRate?: number; + /** + * The SDK posts the queued events data in bulks. This parameter controls the posting rate in seconds. + * + * @defaultValue `60` + */ + eventsPushRate?: number; + /** + * The maximum number of event items we want to queue. If we queue more values, it will trigger a flush and reset the timer. + * If you use a 0 here, the queue will have no maximum size. + * + * @defaultValue `500` + */ + eventsQueueSize?: number; + /** + * For mocking/testing only. The SDK will refresh the features mocked data when mode is set to "localhost" by defining the key. + * For more information see {@link https://help.split.io/hc/en-us/articles/360020564931-Node-js-SDK#localhost-mode} + * + * @defaultValue `15` + */ + offlineRefreshRate?: number; + /** + * When using streaming mode, seconds to wait before re attempting to connect for push notifications. + * Next attempts follow intervals in power of two: base seconds, base x 2 seconds, base x 4 seconds, ... + * + * @defaultValue `1` + */ + pushRetryBackoffBase?: number; + }; + /** + * Mocked features file path. For testing purposes only. For using this you should specify "localhost" as authorizationKey on core settings. + * @see {@link https://help.split.io/hc/en-us/articles/360020564931-Node-js-SDK#localhost-mode} + * + * @defaultValue `'$HOME/.split'` + */ + features?: SplitIO.MockedFeaturesFilePath; +} +/** + * Common settings properties for SDKs with client-side API. + */ +interface IClientSideSharedSettings { + /** + * SDK Core settings for client-side. + */ + core: { + /** + * Your SDK key. + * + * @see {@link https://help.split.io/hc/en-us/articles/360019916211-API-keys} + */ + authorizationKey: string; + /** + * Customer identifier. Whatever this means to you. + * + * @see {@link https://help.split.io/hc/en-us/articles/360019916311-Traffic-type} + */ + key: SplitIO.SplitKey; + /** + * Disable labels from being sent to Split backend. Labels may contain sensitive information. + * + * @defaultValue `true` + */ + labelsEnabled?: boolean; + }; + /** + * User consent status. Possible values are `'GRANTED'`, which is the default, `'DECLINED'` or `'UNKNOWN'`. + * - `'GRANTED'`: the user grants consent for tracking events and impressions. The SDK sends them to Split cloud. + * - `'DECLINED'`: the user declines consent for tracking events and impressions. The SDK does not send them to Split cloud. + * - `'UNKNOWN'`: the user neither grants nor declines consent for tracking events and impressions. The SDK tracks them in its internal storage, and eventually either sends + * them or not if the consent status is updated to 'GRANTED' or 'DECLINED' respectively. The status can be updated at any time with the `UserConsent.setStatus` factory method. + * + * @defaultValue `'GRANTED'` + */ + userConsent?: SplitIO.ConsentStatus; +} +/** + * Common settings properties for SDKs with client-side and synchronous API (standalone and localhost modes). + */ +interface IClientSideSyncSharedSettings extends IClientSideSharedSettings, ISyncSharedSettings { + /** + * Mocked features map. For testing purposes only. For using this you should specify "localhost" as authorizationKey on core settings. + * @see {@link https://help.split.io/hc/en-us/articles/360020448791-JavaScript-SDK#localhost-mode} + */ + features?: SplitIO.MockedFeaturesMap; + /** + * SDK Startup settings. + */ + startup?: { + /** + * Maximum amount of time used before notify a timeout. + * + * @defaultValue `10` + */ + readyTimeout?: number; + /** + * Time to wait for a request before the SDK is ready. If this time expires, JS SDK will retry 'retriesOnFailureBeforeReady' times before notifying its failure to be 'ready'. + * + * @defaultValue `5` + */ + requestTimeoutBeforeReady?: number; + /** + * How many quick retries we will do while starting up the SDK. + * + * @defaultValue `1` + */ + retriesOnFailureBeforeReady?: number; + /** + * For SDK posts the queued events data in bulks with a given rate, but the first push window is defined separately, + * to better control on browsers or mobile. This number defines that window before the first events push. + * + * @defaultValue `10` + */ + eventsFirstPushWindow?: number; + }; + /** + * SDK scheduler settings. + */ + scheduler?: { + /** + * The SDK polls Split servers for changes to feature flag definitions. This parameter controls this polling period in seconds. + * + * @defaultValue `60` + */ + featuresRefreshRate?: number; + /** + * The SDK sends information on who got what treatment at what time back to Split servers to power analytics. This parameter controls how often this data is sent to Split servers. The parameter should be in seconds. + * + * @defaultValue `60` + */ + impressionsRefreshRate?: number; + /** + * The maximum number of impression items we want to queue. If we queue more values, it will trigger a flush and reset the timer. + * If you use a 0 here, the queue will have no maximum size. + * + * @defaultValue `30000` + */ + impressionsQueueSize?: number; + /** + * The SDK sends diagnostic metrics to Split servers. This parameters controls this metric flush period in seconds. + * + * @defaultValue `120` + * @deprecated This parameter is ignored now. Use `telemetryRefreshRate` instead. + */ + metricsRefreshRate?: number; + /** + * The SDK sends diagnostic metrics to Split servers. This parameters controls this metric flush period in seconds. + * + * @defaultValue `3600` + */ + telemetryRefreshRate?: number; + /** + * The SDK polls Split servers for changes to segment definitions. This parameter controls this polling period in seconds. + * + * @defaultValue `60` + */ + segmentsRefreshRate?: number; + /** + * The SDK posts the queued events data in bulks. This parameter controls the posting rate in seconds. + * + * @defaultValue `60` + */ + eventsPushRate?: number; + /** + * The maximum number of event items we want to queue. If we queue more values, it will trigger a flush and reset the timer. + * If you use a 0 here, the queue will have no maximum size. + * + * @defaultValue `500` + */ + eventsQueueSize?: number; + /** + * For mocking/testing only. The SDK will refresh the features mocked data when mode is set to "localhost" by defining the key. + * For more information see {@link https://help.split.io/hc/en-us/articles/360020448791-JavaScript-SDK#localhost-mode} + * + * @defaultValue `15` + */ + offlineRefreshRate?: number; + /** + * When using streaming mode, seconds to wait before re attempting to connect for push notifications. + * Next attempts follow intervals in power of two: base seconds, base x 2 seconds, base x 4 seconds, ... + * + * @defaultValue `1` + */ + pushRetryBackoffBase?: number; + }; +} + +/****** Exposed namespace ******/ +/** + * Shared types and interfaces for `@splitsoftware` packages, to support integrating JavaScript SDKs with TypeScript. + */ +declare namespace SplitIO { + + /** + * EventEmitter interface based on a subset of the NodeJS.EventEmitter methods. + */ + interface IEventEmitter { + addListener(event: string, listener: (...args: any[]) => void): this; + on(event: string, listener: (...args: any[]) => void): this; + once(event: string, listener: (...args: any[]) => void): this; + removeListener(event: string, listener: (...args: any[]) => void): this; + off(event: string, listener: (...args: any[]) => void): this; + removeAllListeners(event?: string): this; + emit(event: string, ...args: any[]): boolean; + } + /** + * NodeJS.EventEmitter interface + * @see {@link https://nodejs.org/api/events.html} + */ + interface EventEmitter extends IEventEmitter { + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + removeListener(event: string | symbol, listener: (...args: any[]) => void): this; + off(event: string | symbol, listener: (...args: any[]) => void): this; + removeAllListeners(event?: string | symbol): this; + emit(event: string | symbol, ...args: any[]): boolean; + setMaxListeners(n: number): this; + getMaxListeners(): number; + listeners(event: string | symbol): Function[]; + rawListeners(event: string | symbol): Function[]; + listenerCount(type: string | symbol): number; + // Added in Node 6... + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + eventNames(): Array; + } + /** + * Event constants. + */ + type EventConsts = { + /** + * The ready event. + */ + SDK_READY: 'init::ready'; + /** + * The ready event when fired with cached data. + */ + SDK_READY_FROM_CACHE: 'init::cache-ready'; + /** + * The timeout event. + */ + SDK_READY_TIMED_OUT: 'init::timeout'; + /** + * The update event. + */ + SDK_UPDATE: 'state::update'; + }; + /** + * SDK Modes. + */ + type SDKMode = 'standalone' | 'localhost' | 'consumer' | 'consumer_partial'; + /** + * Storage types. + */ + type StorageType = 'MEMORY' | 'LOCALSTORAGE' | 'REDIS' | 'PLUGGABLE'; + /** + * Settings interface. This is a representation of the settings the SDK expose, that's why + * most of it's props are readonly. Only features should be rewritten when localhost mode is active. + */ + interface ISettings { + readonly core: { + authorizationKey: string; + key: SplitKey; + labelsEnabled: boolean; + IPAddressesEnabled: boolean; + }; + readonly mode: SDKMode; + readonly scheduler: { + featuresRefreshRate: number; + impressionsRefreshRate: number; + impressionsQueueSize: number; + /** + * @deprecated Use `telemetryRefreshRate` instead. + */ + metricsRefreshRate?: number; + telemetryRefreshRate: number; + segmentsRefreshRate: number; + offlineRefreshRate: number; + eventsPushRate: number; + eventsQueueSize: number; + pushRetryBackoffBase: number; + }; + readonly startup: { + readyTimeout: number; + requestTimeoutBeforeReady: number; + retriesOnFailureBeforeReady: number; + eventsFirstPushWindow: number; + }; + readonly storage: StorageSyncFactory | StorageAsyncFactory | StorageOptions; + readonly urls: { + events: string; + sdk: string; + auth: string; + streaming: string; + telemetry: string; + }; + readonly integrations?: IntegrationFactory[]; + readonly debug: boolean | LogLevel | ILogger; + readonly version: string; + /** + * Mocked features map if using in client-side, or mocked features file path string if using in server-side (NodeJS). + */ + features: MockedFeaturesMap | MockedFeaturesFilePath; + readonly streamingEnabled: boolean; + readonly sync: { + splitFilters: SplitFilter[]; + impressionsMode: ImpressionsMode; + enabled: boolean; + flagSpecVersion: string; + requestOptions?: { + getHeaderOverrides?: (context: { headers: Record }) => Record; + }; + }; + readonly runtime: { + ip: string | false; + hostname: string | false; + }; + readonly impressionListener?: IImpressionListener; + /** + * User consent status if using in client-side. Undefined if using in server-side (NodeJS). + */ + readonly userConsent?: ConsentStatus; + } + /** + * Log levels. + */ + type LogLevel = 'DEBUG' | 'INFO' | 'WARN' | 'ERROR' | 'NONE'; + /** + * Logger API + */ + interface ILoggerAPI { + /** + * Enables SDK logging to the console. + */ + enable(): void; + /** + * Disables SDK logging. + */ + disable(): void; + /** + * Sets a log level for the SDK logs. + */ + setLogLevel(logLevel: LogLevel): void; + /** + * Log level constants. Use this to pass them to setLogLevel function. + */ + LogLevel: { + [level in LogLevel]: LogLevel; + }; + } + /** + * User consent API + */ + interface IUserConsentAPI { + /** + * Sets or updates the user consent status. Possible values are `true` and `false`, which represent user consent `'GRANTED'` and `'DECLINED'` respectively. + * - `true ('GRANTED')`: the user has granted consent for tracking events and impressions. The SDK will send them to Split cloud. + * - `false ('DECLINED')`: the user has declined consent for tracking events and impressions. The SDK will not send them to Split cloud. + * + * NOTE: calling this method updates the user consent at a factory level, affecting all clients of the same factory. + * + * @param userConsent - The user consent status, true for 'GRANTED' and false for 'DECLINED'. + * @returns Whether the provided param is a valid value (i.e., a boolean value) or not. + */ + setStatus(userConsent: boolean): boolean; + /** + * Gets the user consent status. + * + * @returns The user consent status. + */ + getStatus(): ConsentStatus; + /** + * Consent status constants. Use this to compare with the getStatus function result. + */ + Status: { + [status in ConsentStatus]: ConsentStatus; + }; + } + /** + * Common API for entities that expose status handlers. + */ + interface IStatusInterface extends EventEmitter { + /** + * Constant object containing the SDK events for you to use. + */ + Event: EventConsts; + /** + * Returns a promise that resolves once the SDK has finished loading (`SDK_READY` event emitted) or rejected if the SDK has timedout (`SDK_READY_TIMED_OUT` event emitted). + * As it's meant to provide similar flexibility to the event approach, given that the SDK might be eventually ready after a timeout event, the `ready` method will return a resolved promise once the SDK is ready. + * + * Caveats: the method was designed to avoid an unhandled Promise rejection if the rejection case is not handled, so that `onRejected` handler is optional when using promises. + * However, when using async/await syntax, the rejection should be explicitly propagated like in the following example: + * ``` + * try { + * await client.ready().catch((e) => { throw e; }); + * // SDK is ready + * } catch(e) { + * // SDK has timedout + * } + * ``` + * + * @returns A promise that resolves once the SDK is ready or rejects if the SDK has timedout. + */ + ready(): Promise; + } + /** + * Common definitions between clients for different environments interface. + */ + interface IBasicClient extends IStatusInterface { + /** + * Destroys the client instance. + * + * In 'standalone' and 'partial consumer' modes, this method will flush any pending impressions and events. + * In 'standalone' mode, it also stops the synchronization of feature flag definitions with the backend. + * In 'consumer' and 'partial consumer' modes, this method also disconnects the SDK from the Pluggable storage. + * + * @returns A promise that resolves once the client is destroyed. + */ + destroy(): Promise; + } + /** + * Common definitions between SDK instances for different environments interface. + */ + interface IBasicSDK { + /** + * Current settings of the SDK instance. + */ + settings: ISettings; + /** + * Logger API. + */ + Logger: ILoggerAPI; + /** + * Destroys all the clients created by this factory. + * + * @returns A promise that resolves once all clients are destroyed. + */ + destroy(): Promise; + } + /** + * Feature flag treatment value, returned by getTreatment. + */ + type Treatment = string; + /** + * Feature flag treatment promise that resolves to actual treatment value. + */ + type AsyncTreatment = Promise; + /** + * An object with the treatments for a bulk of feature flags, returned by getTreatments. For example: + * ``` + * { + * feature1: 'on', + * feature2: 'off' + * } + * ``` + */ + type Treatments = { + [featureName: string]: Treatment; + }; + /** + * Feature flag treatments promise that resolves to the actual SplitIO.Treatments object. + */ + type AsyncTreatments = Promise; + /** + * Feature flag evaluation result with treatment and configuration, returned by getTreatmentWithConfig. + */ + type TreatmentWithConfig = { + /** + * The treatment string. + */ + treatment: string; + /** + * The stringified version of the JSON config defined for that treatment, null if there is no config for the resulting treatment. + */ + config: string | null; + }; + /** + * Feature flag treatment promise that resolves to actual SplitIO.TreatmentWithConfig object. + */ + type AsyncTreatmentWithConfig = Promise; + /** + * An object with the treatments with configs for a bulk of feature flags, returned by getTreatmentsWithConfig. + * Each existing configuration is a stringified version of the JSON you defined on the Split user interface. For example: + * ``` + * { + * feature1: { treatment: 'on', config: null } + * feature2: { treatment: 'off', config: '{"bannerText":"Click here."}' } + * } + * ``` + */ + type TreatmentsWithConfig = { + [featureName: string]: TreatmentWithConfig; + }; + /** + * Feature flag treatments promise that resolves to the actual SplitIO.TreatmentsWithConfig object. + */ + type AsyncTreatmentsWithConfig = Promise; + /** + * Possible Split SDK events. + */ + type Event = 'init::timeout' | 'init::ready' | 'init::cache-ready' | 'state::update'; + /** + * Attributes should be on object with values of type string, boolean, number (dates should be sent as millis since epoch) or array of strings or numbers. + * + * @see {@link https://help.split.io/hc/en-us/articles/360020448791-JavaScript-SDK#attribute-syntax} + */ + type Attributes = { + [attributeName: string]: AttributeType; + }; + /** + * Type of an attribute value + */ + type AttributeType = string | number | boolean | Array; + /** + * Properties should be an object with values of type string, number, boolean or null. Size limit of ~31kb. + * + * @see {@link https://help.split.io/hc/en-us/articles/360020448791-JavaScript-SDK#track} + */ + type Properties = { + [propertyName: string]: string | number | boolean | null; + }; + /** + * The SplitKey object format. + */ + type SplitKeyObject = { + matchingKey: string; + bucketingKey: string; + }; + /** + * The customer identifier. Could be a SplitKeyObject or a string. + */ + type SplitKey = SplitKeyObject | string; + /** + * Path to file with mocked features (for node). + */ + type MockedFeaturesFilePath = string; + /** + * Object with mocked features mapping for client-side (e.g., Browser or React Native). We need to specify the featureName as key, and the mocked treatment as value. + */ + type MockedFeaturesMap = { + [featureName: string]: string | TreatmentWithConfig; + }; + /** + * Impression DTO generated by the SDK when processing evaluations. + */ + type ImpressionDTO = { + feature: string; + keyName: string; + treatment: string; + time: number; + bucketingKey?: string; + label: string; + changeNumber: number; + pt?: number; + } + /** + * Object with information about an impression. It contains the generated impression DTO as well as + * complementary information around where and how it was generated in that way. + */ + type ImpressionData = { + impression: ImpressionDTO; + attributes?: Attributes; + ip: string | false; + hostname: string | false; + sdkLanguageVersion: string; + }; + /** + * Data corresponding to one feature flag view. + */ + type SplitView = { + /** + * The name of the feature flag. + */ + name: string; + /** + * The traffic type of the feature flag. + */ + trafficType: string; + /** + * Whether the feature flag is killed or not. + */ + killed: boolean; + /** + * The list of treatments available for the feature flag. + */ + treatments: Array; + /** + * Current change number of the feature flag. + */ + changeNumber: number; + /** + * Map of configurations per treatment. + * Each existing configuration is a stringified version of the JSON you defined on the Split user interface. + */ + configs: { + [treatmentName: string]: string; + }; + /** + * List of sets of the feature flag. + */ + sets: string[]; + /** + * The default treatment of the feature flag. + */ + defaultTreatment: string; + }; + /** + * A promise that resolves to a feature flag view or null if the feature flag is not found. + */ + type SplitViewAsync = Promise; + /** + * An array containing the SplitIO.SplitView elements. + */ + type SplitViews = Array; + /** + * A promise that resolves to an SplitIO.SplitViews array. + */ + type SplitViewsAsync = Promise; + /** + * An array of feature flag names. + */ + type SplitNames = Array; + /** + * A promise that resolves to an array of feature flag names. + */ + type SplitNamesAsync = Promise; + /** + * Storage for synchronous (standalone) SDK. + * Its interface details are not part of the public API. + */ + type StorageSync = any; + /** + * Storage builder for synchronous (standalone) SDK. + * Input parameter details are not part of the public API. + */ + type StorageSyncFactory = { + readonly type: StorageType; + (params: any): (StorageSync | undefined); + } + /** + * Configuration params for `InLocalStorage` + */ + type InLocalStorageOptions = { + /** + * Optional prefix to prevent any kind of data collision when having multiple factories using the same storage type. + * + * @defaultValue `'SPLITIO'` + */ + prefix?: string; + } + /** + * Storage for asynchronous (consumer) SDK. + * Its interface details are not part of the public API. + */ + type StorageAsync = any + /** + * Storage builder for asynchronous (consumer) SDK. + * Input parameter details are not part of the public API. + */ + type StorageAsyncFactory = { + readonly type: StorageType; + (params: any): StorageAsync; + } + /** + * Configuration params for `PluggableStorage` + */ + type PluggableStorageOptions = { + /** + * Optional prefix to prevent any kind of data collision when having multiple factories using the same storage wrapper. + * + * @defaultValue `'SPLITIO'` + */ + prefix?: string; + /** + * Storage wrapper. + */ + wrapper: Object; + } + /** + * Synchronous storage valid types for NodeJS. + */ + type NodeSyncStorage = 'MEMORY'; + /** + * Asynchronous storages valid types for NodeJS. + */ + type NodeAsyncStorage = 'REDIS'; + /** + * Storage valid types for the browser. + */ + type BrowserStorage = 'MEMORY' | 'LOCALSTORAGE'; + /** + * Storage options for the SDK with no pluggable storage. + */ + type StorageOptions = { + type: NodeSyncStorage | NodeAsyncStorage | BrowserStorage; + prefix?: string; + options?: Object; + } + /** + * Impression listener interface. This is the interface that needs to be implemented + * by the element you provide to the SDK as impression listener. + * + * @see {@link https://help.split.io/hc/en-us/articles/360020564931-Node-js-SDK#listener} + */ + interface IImpressionListener { + logImpression(data: ImpressionData): void; + } + /** + * SDK integration instance. + * Its interface details are not part of the public API. + */ + type Integration = any; + /** + * SDK integration factory. + * By returning an integration, the SDK will queue events and impressions into it. + * Input parameter details are not part of the public API. + */ + type IntegrationFactory = { + readonly type: string; + (params: any): (Integration | void); + } + /** + * A pair of user key and it's trafficType, required for tracking valid Split events. + */ + type Identity = { + /** + * The user key. + */ + key: string; + /** + * The key traffic type. + */ + trafficType: string; + }; + /** + * Object with information about a Split event. + */ + type EventData = { + eventTypeId: string; + value?: number; + properties?: Properties; + trafficTypeName?: string; + key?: string; + timestamp: number; + }; + /** + * Object representing the data sent by Split (events and impressions). + */ + type IntegrationData = { + /** + * The type of Split data. + */ + type: 'IMPRESSION'; + /** + * The impression data. + */ + payload: ImpressionData; + } | { + /** + * The type of Split data. + */ + type: 'EVENT'; + /** + * The event data. + */ + payload: EventData; + }; + /** + * Available URL settings for the SDKs. + */ + type UrlSettings = { + /** + * String property to override the base URL where the SDK will get rollout plan related data, like feature flags and segments definitions. + * + * @defaultValue `'https://sdk.split.io/api'` + */ + sdk?: string; + /** + * String property to override the base URL where the SDK will post event-related information like impressions. + * + * @defaultValue `'https://events.split.io/api'` + */ + events?: string; + /** + * String property to override the base URL where the SDK will get authorization tokens to be used with functionality that requires it, like streaming. + * + * @defaultValue `'https://auth.split.io/api'` + */ + auth?: string; + /** + * String property to override the base URL where the SDK will connect to receive streaming updates. + * + * @defaultValue `'https://streaming.split.io'` + */ + streaming?: string; + /** + * String property to override the base URL where the SDK will post telemetry data. + * + * @defaultValue `'https://telemetry.split.io/api'` + */ + telemetry?: string; + }; + + /** + * SplitFilter type. + */ + type SplitFilterType = 'bySet' | 'byName' | 'byPrefix'; + /** + * Defines a feature flag filter, described by a type and list of values. + */ + interface SplitFilter { + /** + * Type of the filter. + */ + type: SplitFilterType; + /** + * List of values: feature flag names for 'byName' filter type, and feature flag name prefixes for 'byPrefix' type. + */ + values: string[]; + } + /** + * ImpressionsMode type + */ + type ImpressionsMode = 'OPTIMIZED' | 'DEBUG' | 'NONE'; + /** + * User consent status. + */ + type ConsentStatus = 'GRANTED' | 'DECLINED' | 'UNKNOWN'; + /** + * Logger. Its interface details are not part of the public API. It shouldn't be used directly. + */ + interface ILogger { + setLogLevel(logLevel: LogLevel): void; + } + /** + * Settings interface for Browser SDK instances created with client-side API and synchronous storage (e.g., in-memory or local storage). + * + * @see {@link https://help.split.io/hc/en-us/articles/360058730852-Browser-SDK#configuration} + */ + interface IClientSideSettings extends IClientSideSyncSharedSettings, IPluggableSharedSettings { + /** + * Defines the factory function to instantiate the storage. If not provided, the default in-memory storage is used. + * + * NOTE: Currently, there is no persistent storage option available for the React Native SDK; only `InLocalStorage` for the Browser SDK. + * + * Example: + * ``` + * SplitFactory({ + * ... + * storage: InLocalStorage() + * }) + * ``` + */ + storage?: StorageSyncFactory; + } + /** + * Settings interface for React Native SDK instances, with client-side API and synchronous storage. + * + * @see {@link https://help.split.io/hc/en-us/articles/4406066357901-React-Native-SDK#configuration} + */ + interface IReactNativeSettings extends IClientSideSettings { } + /** + * Settings interface for Browser SDK instances created with client-side API and asynchronous storage (e.g., serverless environments with a persistent storage). + * If your storage is synchronous (by default we use memory, which is sync) use SplitIO.IClientSideSettings instead. + * + * @see {@link https://help.split.io/hc/en-us/articles/360058730852-Browser-SDK#sharing-state-with-a-pluggable-storage} + */ + interface IClientSideAsyncSettings extends IClientSideSharedSettings, ISharedSettings, IPluggableSharedSettings { + /** + * The SDK mode. When using `PluggableStorage` as storage, the possible values are "consumer" and "consumer_partial". + * + * @see {@link https://help.split.io/hc/en-us/articles/360058730852-Browser-SDK#sharing-state-with-a-pluggable-storage} + */ + mode: 'consumer' | 'consumer_partial'; + /** + * Defines the factory function to instantiate the storage. + * + * Example: + * ``` + * SplitFactory({ + * ... + * storage: PluggableStorage({ wrapper: SomeWrapper }) + * }) + * ``` + */ + storage: StorageAsyncFactory; + /** + * SDK Startup settings. + */ + startup?: { + /** + * Maximum amount of time used before notify a timeout. + * + * @defaultValue `5` + */ + readyTimeout?: number; + /** + * For SDK posts the queued events data in bulks with a given rate, but the first push window is defined separately, + * to better control on browsers or mobile. This number defines that window before the first events push. + * + * NOTE: this param is ignored in 'consumer' mode. + * + * @defaultValue `10` + */ + eventsFirstPushWindow?: number; + }; + /** + * SDK scheduler settings. + */ + scheduler?: { + /** + * The SDK sends information on who got what treatment at what time back to Split servers to power analytics. This parameter controls how often this data is sent to Split servers. The parameter should be in seconds. + * + * NOTE: this param is ignored in 'consumer' mode. + * + * @defaultValue `60` + */ + impressionsRefreshRate?: number; + /** + * The maximum number of impression items we want to queue. If we queue more values, it will trigger a flush and reset the timer. + * If you use a 0 here, the queue will have no maximum size. + * + * NOTE: this param is ignored in 'consumer' mode. + * + * @defaultValue `30000` + */ + impressionsQueueSize?: number; + /** + * The SDK sends diagnostic metrics to Split servers. This parameters controls this metric flush period in seconds. + * + * NOTE: this param is ignored in 'consumer' mode. + * + * @defaultValue `3600` + */ + telemetryRefreshRate?: number; + /** + * The SDK posts the queued events data in bulks. This parameter controls the posting rate in seconds. + * + * NOTE: this param is ignored in 'consumer' mode. + * + * @defaultValue `60` + */ + eventsPushRate?: number; + /** + * The maximum number of event items we want to queue. If we queue more values, it will trigger a flush and reset the timer. + * If you use a 0 here, the queue will have no maximum size. + * + * NOTE: this param is ignored in 'consumer' mode. + * + * @defaultValue `500` + */ + eventsQueueSize?: number; + }; + } + /** + * Settings interface for JavaScript SDK instances created on the browser, with client-side API and synchronous storage (e.g., in-memory or local storage). + * + * @see {@link https://help.split.io/hc/en-us/articles/360020448791-JavaScript-SDK#configuration} + */ + interface IBrowserSettings extends IClientSideSyncSharedSettings, INonPluggableSharedSettings { + /** + * Defines which kind of storage we can instantiate on the browser. + * Possible storage types are 'MEMORY', which is the default, and 'LOCALSTORAGE'. + */ + storage?: { + /** + * Storage type to be instantiated by the SDK. + * + * @defaultValue `'MEMORY'` + */ + type?: BrowserStorage; + /** + * Optional prefix to prevent any kind of data collision between SDK versions. + * + * @defaultValue `'SPLITIO'` + */ + prefix?: string; + }; + } + /** + * Settings interface for JavaScript SDK instances created on NodeJS, with server-side API and synchronous in-memory storage. + * If your storage is asynchronous (Redis for example) use SplitIO.INodeAsyncSettings instead. + * + * @see {@link https://help.split.io/hc/en-us/articles/360020564931-Node-js-SDK#configuration} + */ + interface INodeSettings extends IServerSideSharedSettings, ISyncSharedSettings, INonPluggableSharedSettings { + /** + * Defines which kind of storage we can instantiate on NodeJS for 'standalone' mode. + * The only possible storage type is 'MEMORY', which is the default. + */ + storage?: { + /** + * Synchronous storage type to be instantiated by the SDK. + * + * @defaultValue `'MEMORY'` + */ + type?: NodeSyncStorage; + /** + * Optional prefix to prevent any kind of data collision between SDK versions. + * + * @defaultValue `'SPLITIO'` + */ + prefix?: string; + }; + sync?: ISyncSharedSettings['sync'] & { + /** + * Custom options object for HTTP(S) requests in NodeJS. + * If provided, this object is merged with the options object passed by the SDK for EventSource and Node-Fetch calls. + * @see {@link https://www.npmjs.com/package/node-fetch#options} + */ + requestOptions?: { + /** + * Custom function called before each request, allowing you to add or update headers in SDK HTTP requests. + * Some headers, such as `SplitSDKVersion`, are required by the SDK and cannot be overridden. + * To pass multiple headers with the same name, combine their values into a single line, separated by commas. Example: `{ 'Authorization': 'value1, value2' }` + * Or provide keys with different cases since headers are case-insensitive. Example: `{ 'authorization': 'value1', 'Authorization': 'value2' }` + * + * @defaultValue `undefined` + * + * @param context - The context for the request, which contains the `headers` property object representing the current headers in the request. + * @returns An object representing a set of headers to be merged with the current headers. + * + * @example + * ``` + * const getHeaderOverrides = (context) => { + * return { + * 'Authorization': context.headers['Authorization'] + ', other-value', + * 'custom-header': 'custom-value' + * }; + * }; + * ``` + */ + getHeaderOverrides?: (context: { headers: Record }) => Record; + /** + * Custom NodeJS HTTP(S) Agent used by the SDK for HTTP(S) requests. + * + * You can use it, for example, for certificate pinning or setting a network proxy: + * + * ``` + * const { HttpsProxyAgent } = require('https-proxy-agent'); + * + * const proxyAgent = new HttpsProxyAgent(process.env.HTTPS_PROXY || 'http://10.10.1.10:1080'); + * + * const factory = SplitFactory({ + * ... + * sync: { + * requestOptions: { + * agent: proxyAgent + * } + * } + * }) + * ``` + * + * @see {@link https://nodejs.org/api/https.html#class-httpsagent} + * + * @defaultValue `undefined` + */ + agent?: RequestOptions['agent']; + }; + }; + } + /** + * Settings interface for JavaScript SDK instances created on NodeJS, with asynchronous storage like Redis. + * If your storage is synchronous (by default we use memory, which is sync) use SplitIO.INodeSettings instead. + * + * @see {@link https://help.split.io/hc/en-us/articles/360020564931-Node-js-SDK#configuration} + */ + interface INodeAsyncSettings extends IServerSideSharedSettings, ISharedSettings, INonPluggableSharedSettings { + /** + * The SDK mode. When using 'REDIS' storage type, the only possible value is "consumer", which is required. + * + * @see {@link https://help.split.io/hc/en-us/articles/360020564931-Node-js-SDK#state-sharing-redis-integration} + */ + mode: 'consumer'; + /** + * Defines which kind of async storage we can instantiate on NodeJS for 'consumer' mode. + * The only possible storage type is 'REDIS'. + */ + storage: { + /** + * 'REDIS' storage type to be instantiated by the SDK. + */ + type: NodeAsyncStorage; + /** + * Options to be passed to the Redis storage. Use it with storage type: 'REDIS'. + */ + options?: { + /** + * Redis URL. If set, `host`, `port`, `db` and `pass` params will be ignored. + * + * Examples: + * ``` + * url: 'localhost' + * url: '127.0.0.1:6379' + * url: 'redis://:authpassword@127.0.0.1:6379/0' + * ``` + */ + url?: string; + /** + * Redis host. + * + * @defaultValue `'localhost'` + */ + host?: string; + /** + * Redis port. + * + * @defaultValue `6379` + */ + port?: number; + /** + * Redis database to be used. + * + * @defaultValue `0` + */ + db?: number; + /** + * Redis password. Don't define if no password is used. + * + * @defaultValue `undefined` + */ + pass?: string; + /** + * The milliseconds before a timeout occurs during the initial connection to the Redis server. + * + * @defaultValue `10000` + */ + connectionTimeout?: number; + /** + * The milliseconds before Redis commands are timeout by the SDK. + * Method calls that involve Redis commands, like `client.getTreatment` or `client.track` calls, are resolved when the commands success or timeout. + * + * @defaultValue `5000` + */ + operationTimeout?: number; + /** + * TLS configuration for Redis connection. + * @see {@link https://www.npmjs.com/package/ioredis#tls-options } + * + * @defaultValue `undefined` + */ + tls?: RedisOptions['tls']; + }; + /** + * Optional prefix to prevent any kind of data collision between SDK versions. + * + * @defaultValue `'SPLITIO'` + */ + prefix?: string; + }; + } + /** + * This represents the interface for the SDK instance with synchronous storage and client-side API, + * i.e., where client instances have a bound user key. + */ + interface IBrowserSDK extends IBasicSDK { + /** + * Returns the default client instance of the SDK, associated with the key provided on settings. + * + * @returns The client instance. + */ + client(): IBrowserClient; + /** + * Returns a shared client of the SDK, associated with the given key. + * @param key - The key for the new client instance. + * @returns The client instance. + */ + client(key: SplitKey): IBrowserClient; + /** + * Returns a manager instance of the SDK to explore available information. + * + * @returns The manager instance. + */ + manager(): IManager; + /** + * User consent API. + */ + UserConsent: IUserConsentAPI; + } + /** + * This represents the interface for the SDK instance with asynchronous storage and client-side API, + * i.e., where client instances have a bound user key. + */ + interface IBrowserAsyncSDK extends IBasicSDK { + /** + * Returns the default client instance of the SDK, associated with the key provided on settings. + * + * @returns The asynchronous client instance. + */ + client(): IBrowserAsyncClient; + /** + * Returns a shared client of the SDK, associated with the given key. + * + * @param key - The key for the new client instance. + * @returns The asynchronous client instance. + */ + client(key: SplitKey): IBrowserAsyncClient; + /** + * Returns a manager instance of the SDK to explore available information. + * + * @returns The manager instance. + */ + manager(): IAsyncManager; + /** + * User consent API. + */ + UserConsent: IUserConsentAPI; + } + /** + * This represents the interface for the SDK instance for server-side with synchronous storage. + */ + interface ISDK extends IBasicSDK { + /** + * Returns the default client instance of the SDK. + * + * @returns The client instance. + */ + client(): IClient; + /** + * Returns a manager instance of the SDK to explore available information. + * + * @returns The manager instance. + */ + manager(): IManager; + } + /** + * This represents the interface for the SDK instance for server-side with asynchronous storage. + */ + interface IAsyncSDK extends IBasicSDK { + /** + * Returns the default client instance of the SDK. + * + * @returns The asynchronous client instance. + */ + client(): IAsyncClient; + /** + * Returns a manager instance of the SDK to explore available information. + * + * @returns The manager instance. + */ + manager(): IAsyncManager; + } + /** + * This represents the interface for the Client instance on server-side, where the user key is not bound to the instance and must be provided on each method call. + * This interface is available in NodeJS, or when importing the 'server' sub-package of JS SDK (e.g., `import { SplitFactory } from '@splitsoftware/splitio/server'`). + */ + interface IClient extends IBasicClient { + /** + * Returns a Treatment value, which is the treatment string for the given feature. + * + * @param key - The string key representing the consumer. + * @param featureFlagName - The string that represents the feature flag we want to get the treatment. + * @param attributes - An object of type Attributes defining the attributes for the given key. + * @returns The treatment string. + */ + getTreatment(key: SplitKey, featureFlagName: string, attributes?: Attributes): Treatment; + /** + * Returns a TreatmentWithConfig value, which is an object with both treatment and config string for the given feature. + * + * @param key - The string key representing the consumer. + * @param featureFlagName - The string that represents the feature flag we want to get the treatment. + * @param attributes - An object of type Attributes defining the attributes for the given key. + * @returns The TreatmentWithConfig, the object containing the treatment string and the + * configuration stringified JSON (or null if there was no config for that treatment). + */ + getTreatmentWithConfig(key: SplitKey, featureFlagName: string, attributes?: Attributes): TreatmentWithConfig; + /** + * Returns a Treatments value, which is an object map with the treatments for the given features. + * + * @param key - The string key representing the consumer. + * @param featureFlagNames - An array of the feature flag names we want to get the treatments. + * @param attributes - An object of type Attributes defining the attributes for the given key. + * @returns The treatments object map. + */ + getTreatments(key: SplitKey, featureFlagNames: string[], attributes?: Attributes): Treatments; + /** + * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the given features. + * + * @param key - The string key representing the consumer. + * @param featureFlagNames - An array of the feature flag names we want to get the treatments. + * @param attributes - An object of type Attributes defining the attributes for the given key. + * @returns The map with all the TreatmentWithConfig objects + */ + getTreatmentsWithConfig(key: SplitKey, featureFlagNames: string[], attributes?: Attributes): TreatmentsWithConfig; + /** + * Returns a Treatments value, which is an object map with the treatments for the feature flags related to the given flag set. + * + * @param key - The string key representing the consumer. + * @param flagSet - The flag set name we want to get the treatments. + * @param attributes - An object of type Attributes defining the attributes for the given key. + * @returns The map with all the Treatment objects + */ + getTreatmentsByFlagSet(key: SplitKey, flagSet: string, attributes?: Attributes): Treatments; + /** + * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the feature flags related to the given flag set. + * + * @param key - The string key representing the consumer. + * @param flagSet - The flag set name we want to get the treatments. + * @param attributes - An object of type Attributes defining the attributes for the given key. + * @returns The map with all the TreatmentWithConfig objects + */ + getTreatmentsWithConfigByFlagSet(key: SplitKey, flagSet: string, attributes?: Attributes): TreatmentsWithConfig; + /** + * Returns a Returns a Treatments value, which is an object with both treatment and config string for to the feature flags related to the given flag sets. + * + * @param key - The string key representing the consumer. + * @param flagSets - An array of the flag set names we want to get the treatments. + * @param attributes - An object of type Attributes defining the attributes for the given key. + * @returns The map with all the Treatment objects + */ + getTreatmentsByFlagSets(key: SplitKey, flagSets: string[], attributes?: Attributes): Treatments; + /** + * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the feature flags related to the given flag sets. + * + * @param key - The string key representing the consumer. + * @param flagSets - An array of the flag set names we want to get the treatments. + * @param attributes - An object of type Attributes defining the attributes for the given key. + * @returns The map with all the TreatmentWithConfig objects + */ + getTreatmentsWithConfigByFlagSets(key: SplitKey, flagSets: string[], attributes?: Attributes): TreatmentsWithConfig; + /** + * Tracks an event to be fed to the results product on Split user interface. + * + * @param key - The key that identifies the entity related to this event. + * @param trafficType - The traffic type of the entity related to this event. See {@link https://help.split.io/hc/en-us/articles/360019916311-Traffic-type} + * @param eventType - The event type corresponding to this event. + * @param value - The value of this event. + * @param properties - The properties of this event. Values can be string, number, boolean or null. + * @returns Whether the event was added to the queue successfully or not. + */ + track(key: SplitKey, trafficType: string, eventType: string, value?: number, properties?: Properties): boolean; + } + /** + * This represents the interface for the Client instance on server-side with asynchronous storage, like REDIS. + * User key is not bound to the instance and must be provided on each method call, which returns a promise. + * This interface is available in NodeJS, or when importing the 'server' sub-package in JS SDK (e.g., `import { SplitFactory } from '@splitsoftware/splitio/server'`). + */ + interface IAsyncClient extends IBasicClient { + /** + * Returns a Treatment value, which will be (or eventually be) the treatment string for the given feature. + * + * @param key - The string key representing the consumer. + * @param featureFlagName - The string that represents the feature flag we want to get the treatment. + * @param attributes - An object of type Attributes defining the attributes for the given key. + * @returns Treatment promise that resolves to the treatment string. + */ + getTreatment(key: SplitKey, featureFlagName: string, attributes?: Attributes): AsyncTreatment; + /** + * Returns a TreatmentWithConfig value, which will be (or eventually be) an object with both treatment and config string for the given feature. + * + * @param key - The string key representing the consumer. + * @param featureFlagName - The string that represents the feature flag we want to get the treatment. + * @param attributes - An object of type Attributes defining the attributes for the given key. + * @returns TreatmentWithConfig promise that resolves to the TreatmentWithConfig object. + */ + getTreatmentWithConfig(key: SplitKey, featureFlagName: string, attributes?: Attributes): AsyncTreatmentWithConfig; + /** + * Returns a Treatments value, which will be (or eventually be) an object map with the treatments for the given features. + * + * @param key - The string key representing the consumer. + * @param featureFlagNames - An array of the feature flag names we want to get the treatments. + * @param attributes - An object of type Attributes defining the attributes for the given key. + * @returns Treatments promise that resolves to the treatments object map. + */ + getTreatments(key: SplitKey, featureFlagNames: string[], attributes?: Attributes): AsyncTreatments; + /** + * Returns a TreatmentsWithConfig value, which will be (or eventually be) an object map with the TreatmentWithConfig (an object with both treatment and config string) for the given features. + * + * @param key - The string key representing the consumer. + * @param featureFlagNames - An array of the feature flag names we want to get the treatments. + * @param attributes - An object of type Attributes defining the attributes for the given key. + * @returns TreatmentsWithConfig promise that resolves to the map of TreatmentsWithConfig objects. + */ + getTreatmentsWithConfig(key: SplitKey, featureFlagNames: string[], attributes?: Attributes): AsyncTreatmentsWithConfig; + /** + * Returns a Treatments value, which is an object map with the treatments for the feature flags related to the given flag set. + * + * @param key - The string key representing the consumer. + * @param flagSet - The flag set name we want to get the treatments. + * @param attributes - An object of type Attributes defining the attributes for the given key. + * @returns Treatments promise that resolves to the treatments object map. + */ + getTreatmentsByFlagSet(key: SplitKey, flagSet: string, attributes?: Attributes): AsyncTreatments; + /** + * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the feature flags related to the given flag set. + * + * @param key - The string key representing the consumer. + * @param flagSet - The flag set name we want to get the treatments. + * @param attributes - An object of type Attributes defining the attributes for the given key. + * @returns TreatmentsWithConfig promise that resolves to the map of TreatmentsWithConfig objects. + */ + getTreatmentsWithConfigByFlagSet(key: SplitKey, flagSet: string, attributes?: Attributes): AsyncTreatmentsWithConfig; + /** + * Returns a Returns a Treatments value, which is an object with both treatment and config string for to the feature flags related to the given flag sets. + * + * @param key - The string key representing the consumer. + * @param flagSets - An array of the flag set names we want to get the treatments. + * @param attributes - An object of type Attributes defining the attributes for the given key. + * @returns Treatments promise that resolves to the treatments object map. + */ + getTreatmentsByFlagSets(key: SplitKey, flagSets: string[], attributes?: Attributes): AsyncTreatments; + /** + * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the feature flags related to the given flag sets. + * + * @param key - The string key representing the consumer. + * @param flagSets - An array of the flag set names we want to get the treatments. + * @param attributes - An object of type Attributes defining the attributes for the given key. + * @returns TreatmentsWithConfig promise that resolves to the map of TreatmentsWithConfig objects. + */ + getTreatmentsWithConfigByFlagSets(key: SplitKey, flagSets: string[], attributes?: Attributes): AsyncTreatmentsWithConfig; + /** + * Tracks an event to be fed to the results product on Split user interface, and returns a promise to signal when the event was successfully queued (or not). + * + * @param key - The key that identifies the entity related to this event. + * @param trafficType - The traffic type of the entity related to this event. See {@link https://help.split.io/hc/en-us/articles/360019916311-Traffic-type} + * @param eventType - The event type corresponding to this event. + * @param value - The value of this event. + * @param properties - The properties of this event. Values can be string, number, boolean or null. + * @returns A promise that resolves to a boolean indicating if the event was added to the queue successfully or not. + */ + track(key: SplitKey, trafficType: string, eventType: string, value?: number, properties?: Properties): Promise; + } + interface IClientWithAttributes extends IBasicClient { + /** + * Add an attribute to client's in-memory attributes storage. + * + * @param attributeName - Attribute name + * @param attributeValue - Attribute value + * @returns true if the attribute was stored and false otherwise + */ + setAttribute(attributeName: string, attributeValue: AttributeType): boolean; + /** + * Returns the attribute with the given name. + * + * @param attributeName - Attribute name + * @returns Attribute with the given name + */ + getAttribute(attributeName: string): AttributeType; + /** + * Removes from client's in-memory attributes storage the attribute with the given name. + * + * @param attributeName - Attribute name + * @returns true if attribute was removed and false otherwise + */ + removeAttribute(attributeName: string): boolean; + /** + * Add to client's in-memory attributes storage the attributes in 'attributes'. + * + * @param attributes - Object with attributes to store + * @returns true if attributes were stored an false otherwise + */ + setAttributes(attributes: Attributes): boolean; + /** + * Return all the attributes stored in client's in-memory attributes storage. + * + * @returns returns all the stored attributes + */ + getAttributes(): Attributes; + /** + * Remove all the stored attributes in the client's in-memory attribute storage. + * + * @returns true if all attribute were removed and false otherwise + */ + clearAttributes(): boolean; + } + /** + * This represents the interface for the Client instance on client-side, where the user key is bound to the instance on creation and does not need to be provided on each method call. + */ + interface IBrowserClient extends IClientWithAttributes { + /** + * Returns a Treatment value, which is the treatment string for the given feature. + * + * @param featureFlagName - The string that represents the feature flag we want to get the treatment. + * @param attributes - An object of type Attributes defining the attributes for the given key. + * @returns The treatment string. + */ + getTreatment(featureFlagName: string, attributes?: Attributes): Treatment; + /** + * Returns a TreatmentWithConfig value, which is an object with both treatment and config string for the given feature. + * + * @param featureFlagName - The string that represents the feature flag we want to get the treatment. + * @param attributes - An object of type Attributes defining the attributes for the given key. + * @returns The map containing the treatment and the configuration stringified JSON (or null if there was no config for that treatment). + */ + getTreatmentWithConfig(featureFlagName: string, attributes?: Attributes): TreatmentWithConfig; + /** + * Returns a Treatments value, which is an object map with the treatments for the given features. + * + * @param featureFlagNames - An array of the feature flag names we want to get the treatments. + * @param attributes - An object of type Attributes defining the attributes for the given key. + * @returns The treatments object map. + */ + getTreatments(featureFlagNames: string[], attributes?: Attributes): Treatments; + /** + * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the given features. + * + * @param featureFlagNames - An array of the feature flag names we want to get the treatments. + * @param attributes - An object of type Attributes defining the attributes for the given key. + * @returns The map with all the TreatmentWithConfig objects + */ + getTreatmentsWithConfig(featureFlagNames: string[], attributes?: Attributes): TreatmentsWithConfig; + /** + * Returns a Treatments value, which is an object map with the treatments for the feature flags related to the given flag set. + * + * @param flagSet - The flag set name we want to get the treatments. + * @param attributes - An object of type Attributes defining the attributes for the given key. + * @returns The map with all the Treatments objects + */ + getTreatmentsByFlagSet(flagSet: string, attributes?: Attributes): Treatments; + /** + * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the feature flags related to the given flag set. + * + * @param flagSet - The flag set name we want to get the treatments. + * @param attributes - An object of type Attributes defining the attributes for the given key. + * @returns The map with all the TreatmentWithConfig objects + */ + getTreatmentsWithConfigByFlagSet(flagSet: string, attributes?: Attributes): TreatmentsWithConfig; + /** + * Returns a Returns a Treatments value, which is an object with both treatment and config string for to the feature flags related to the given flag sets. + * + * @param flagSets - An array of the flag set names we want to get the treatments. + * @param attributes - An object of type Attributes defining the attributes for the given key. + * @returns The map with all the Treatments objects + */ + getTreatmentsByFlagSets(flagSets: string[], attributes?: Attributes): Treatments; + /** + * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the feature flags related to the given flag sets. + * + * @param flagSets - An array of the flag set names we want to get the treatments. + * @param attributes - An object of type Attributes defining the attributes for the given key. + * @returns The map with all the TreatmentWithConfig objects + */ + getTreatmentsWithConfigByFlagSets(flagSets: string[], attributes?: Attributes): TreatmentsWithConfig; + /** + * Tracks an event to be fed to the results product on Split user interface. + * + * @param trafficType - The traffic type of the entity related to this event. See {@link https://help.split.io/hc/en-us/articles/360019916311-Traffic-type} + * @param eventType - The event type corresponding to this event. + * @param value - The value of this event. + * @param properties - The properties of this event. Values can be string, number, boolean or null. + * @returns Whether the event was added to the queue successfully or not. + */ + track(trafficType: string, eventType: string, value?: number, properties?: Properties): boolean; + } + /** + * This represents the interface for the Client instance with asynchronous storage for client-side SDK, where each client has associated a key. + */ + interface IBrowserAsyncClient extends IClientWithAttributes { + /** + * Returns a Treatment value, which will be (or eventually be) the treatment string for the given feature. + * + * @param featureFlagName - The string that represents the feature flag we want to get the treatment. + * @param attributes - An object of type Attributes defining the attributes for the given key. + * @returns Treatment promise that resolves to the treatment string. + */ + getTreatment(featureFlagName: string, attributes?: Attributes): AsyncTreatment; + /** + * Returns a TreatmentWithConfig value, which will be (or eventually be) an object with both treatment and config string for the given feature. + * + * @param featureFlagName - The string that represents the feature flag we want to get the treatment. + * @param attributes - An object of type Attributes defining the attributes for the given key. + * @returns TreatmentWithConfig promise that resolves to the TreatmentWithConfig object. + */ + getTreatmentWithConfig(featureFlagName: string, attributes?: Attributes): AsyncTreatmentWithConfig; + /** + * Returns a Treatments value, which will be (or eventually be) an object map with the treatments for the given features. + * + * @param featureFlagNames - An array of the feature flag names we want to get the treatments. + * @param attributes - An object of type Attributes defining the attributes for the given key. + * @returns Treatments promise that resolves to the treatments object map. + */ + getTreatments(featureFlagNames: string[], attributes?: Attributes): AsyncTreatments; + /** + * Returns a TreatmentsWithConfig value, which will be (or eventually be) an object map with the TreatmentWithConfig (an object with both treatment and config string) for the given features. + * + * @param featureFlagNames - An array of the feature flag names we want to get the treatments. + * @param attributes - An object of type Attributes defining the attributes for the given key. + * @returns TreatmentsWithConfig promise that resolves to the TreatmentsWithConfig object. + */ + getTreatmentsWithConfig(featureFlagNames: string[], attributes?: Attributes): AsyncTreatmentsWithConfig; + /** + * Returns a Treatments value, which is an object map with the treatments for the feature flags related to the given flag set. + * + * @param flagSet - The flag set name we want to get the treatments. + * @param attributes - An object of type Attributes defining the attributes for the given key. + * @returns Treatments promise that resolves to the treatments object map. + */ + getTreatmentsByFlagSet(flagSet: string, attributes?: Attributes): AsyncTreatments; + /** + * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the feature flags related to the given flag set. + * + * @param flagSet - The flag set name we want to get the treatments. + * @param attributes - An object of type Attributes defining the attributes for the given key. + * @returns TreatmentsWithConfig promise that resolves to the TreatmentsWithConfig object. + */ + getTreatmentsWithConfigByFlagSet(flagSet: string, attributes?: Attributes): AsyncTreatmentsWithConfig; + /** + * Returns a Returns a Treatments value, which is an object with both treatment and config string for to the feature flags related to the given flag sets. + * + * @param flagSets - An array of the flag set names we want to get the treatments. + * @param attributes - An object of type Attributes defining the attributes for the given key. + * @returns Treatments promise that resolves to the treatments object map. + */ + getTreatmentsByFlagSets(flagSets: string[], attributes?: Attributes): AsyncTreatments; + /** + * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the feature flags related to the given flag sets. + * + * @param flagSets - An array of the flag set names we want to get the treatments. + * @param attributes - An object of type Attributes defining the attributes for the given key. + * @returns TreatmentsWithConfig promise that resolves to the TreatmentsWithConfig object. + */ + getTreatmentsWithConfigByFlagSets(flagSets: string[], attributes?: Attributes): AsyncTreatmentsWithConfig; + /** + * Tracks an event to be fed to the results product on Split user interface, and returns a promise to signal when the event was successfully queued (or not). + * + * @param trafficType - The traffic type of the entity related to this event. + * @param eventType - The event type corresponding to this event. + * @param value - The value of this event. + * @param properties - The properties of this event. Values can be string, number, boolean or null. + * @returns A promise that resolves to a boolean indicating if the event was added to the queue successfully or not. + */ + track(trafficType: string, eventType: string, value?: number, properties?: Properties): Promise; + } + /** + * Representation of a manager instance with synchronous storage of the SDK. + */ + interface IManager extends IStatusInterface { + /** + * Get the array of feature flag names. + * + * @returns The list of feature flag names. + */ + names(): SplitNames; + /** + * Get the array of feature flags data in SplitView format. + * + * @returns The list of SplitIO.SplitView. + */ + splits(): SplitViews; + /** + * Get the data of a feature flag in SplitView format. + * + * @param featureFlagName - The name of the feature flag we want to get info of. + * @returns The SplitIO.SplitView of the given feature flag name or null if the feature flag is not found. + */ + split(featureFlagName: string): SplitView | null; + } + /** + * Representation of a manager instance with asynchronous storage of the SDK. + */ + interface IAsyncManager extends IStatusInterface { + /** + * Get the array of feature flag names. + * + * @returns A promise that resolves to the list of feature flag names. + */ + names(): SplitNamesAsync; + /** + * Get the array of feature flags data in SplitView format. + * + * @returns A promise that resolves to the SplitIO.SplitView list. + */ + splits(): SplitViewsAsync; + /** + * Get the data of a feature flag in SplitView format. + * + * @param featureFlagName - The name of the feature flag we want to get info of. + * @returns A promise that resolves to the SplitIO.SplitView value. + */ + split(featureFlagName: string): SplitViewAsync; + } +}