From 9cb63925f15926f4235dbaf1ae5c23a5424df631 Mon Sep 17 00:00:00 2001 From: Josh Crowther Date: Tue, 27 Jun 2017 16:11:33 -0700 Subject: [PATCH 1/5] style(*): add prettier automation --- package.json | 11 +- yarn.lock | 1215 +++++++++++++++++++++++++++++--------------------- 2 files changed, 710 insertions(+), 516 deletions(-) diff --git a/package.json b/package.json index e368f80daa5..fbedd608d9c 100644 --- a/package.json +++ b/package.json @@ -22,7 +22,8 @@ "build:package": "gulp build && cd dist/package && npm install --production && npm shrinkwrap && npm pack && shx mv *.tgz ../", "test": "gulp build && gulp build:tests && gulp test && gulp test:integration", "dev": "gulp dev", - "commitmsg": "validate-commit-msg" + "commitmsg": "validate-commit-msg", + "precommit": "lint-staged" }, "main": "index.js", "devDependencies": { @@ -69,8 +70,10 @@ "karma-mocha": "^1.3.0", "karma-spec-reporter": "^0.0.30", "karma-typescript": "^3.0.4", + "lint-staged": "^4.0.0", "merge2": "^1.0.3", "mkdirp": "^0.5.1", + "prettier": "^1.5.1", "require-dir": "^0.3.1", "rimraf": "^2.6.1", "shx": "^0.2.2", @@ -98,5 +101,11 @@ "cz-customizable": { "config": "./.cz-config.js" } + }, + "lint-staged": { + "**/*.ts": [ + "prettier --write {src,tests}/**/*.ts", + "git add" + ] } } diff --git a/yarn.lock b/yarn.lock index d163cbfe3e7..ac36e7d0d0a 100644 --- a/yarn.lock +++ b/yarn.lock @@ -28,8 +28,8 @@ resolved "https://registry.yarnpkg.com/@types/mocha/-/mocha-2.2.41.tgz#e27cf0817153eb9f2713b2d3f6c68f1e1c3ca608" "@types/node@^7.0.8": - version "7.0.15" - resolved "https://registry.yarnpkg.com/@types/node/-/node-7.0.15.tgz#628429289604c5f7e56c13f3a0422f3e59df1a17" + version "7.0.39" + resolved "https://registry.yarnpkg.com/@types/node/-/node-7.0.39.tgz#8aced4196387038113f6f9aa4014ab4c51edab3c" "@types/sinon@^1.16.35": version "1.16.36" @@ -57,7 +57,11 @@ abab@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/abab/-/abab-1.0.3.tgz#b81de5f7274ec4e756d797cd834f303642724e5d" -abbrev@1, abbrev@1.0.x: +abbrev@1: + version "1.1.0" + resolved "https://registry.yarnpkg.com/abbrev/-/abbrev-1.1.0.tgz#d0554c2256636e2f56e7c2e5ad183f859428d81f" + +abbrev@1.0.x: version "1.0.9" resolved "https://registry.yarnpkg.com/abbrev/-/abbrev-1.0.9.tgz#91b4792588a7738c25f35dd6f63752a2f8776135" @@ -81,8 +85,8 @@ acorn-globals@^3.1.0: acorn "^4.0.4" acorn@4.X, acorn@^4.0.3, acorn@^4.0.4: - version "4.0.11" - resolved "https://registry.yarnpkg.com/acorn/-/acorn-4.0.11.tgz#edcda3bd937e7556410d42ed5860f67399c794c0" + version "4.0.13" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-4.0.13.tgz#105495ae5361d697bd195c825192e1ad7f253787" acorn@^2.7.0: version "2.7.0" @@ -93,8 +97,8 @@ acorn@^3.0.0: resolved "https://registry.yarnpkg.com/acorn/-/acorn-3.3.0.tgz#45e37fb39e8da3f25baee3ff5369e2bb5f22017a" acorn@^5.0.0, acorn@^5.0.3: - version "5.0.3" - resolved "https://registry.yarnpkg.com/acorn/-/acorn-5.0.3.tgz#c460df08491463f028ccb82eab3730bf01087b3d" + version "5.1.1" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-5.1.1.tgz#53fe161111f912ab999ee887a90a0bc52822fd75" after@0.8.2: version "0.8.2" @@ -112,11 +116,11 @@ ajv@^4.9.1: json-stable-stringify "^1.0.1" ajv@^5.1.5: - version "5.2.0" - resolved "https://registry.yarnpkg.com/ajv/-/ajv-5.2.0.tgz#c1735024c5da2ef75cc190713073d44f098bf486" + version "5.2.2" + resolved "https://registry.yarnpkg.com/ajv/-/ajv-5.2.2.tgz#47c68d69e86f5d953103b0074a9430dc63da5e39" dependencies: co "^4.6.0" - fast-deep-equal "^0.1.0" + fast-deep-equal "^1.0.0" json-schema-traverse "^0.3.0" json-stable-stringify "^1.0.1" @@ -128,11 +132,15 @@ align-text@^0.1.1, align-text@^0.1.3: longest "^1.0.1" repeat-string "^1.5.2" -amdefine@1.0.0, amdefine@>=0.0.4, amdefine@^1.0.0: +amdefine@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/amdefine/-/amdefine-1.0.0.tgz#fd17474700cb5cc9c2b709f0be9d23ce3c198c33" -ansi-escapes@^1.1.0: +amdefine@>=0.0.4, amdefine@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/amdefine/-/amdefine-1.0.1.tgz#4a5282ac164729e93619bcfd3ad151f817ce91f5" + +ansi-escapes@^1.0.0, ansi-escapes@^1.1.0: version "1.4.0" resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-1.4.0.tgz#d3a8a83b319aa67793662b13e761c7911422306e" @@ -165,9 +173,13 @@ anymatch@^1.3.0: arrify "^1.0.0" micromatch "^2.1.5" +app-root-path@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/app-root-path/-/app-root-path-2.0.1.tgz#cd62dcf8e4fd5a417efc664d2e5b10653c651b46" + aproba@^1.0.3: - version "1.1.1" - resolved "https://registry.yarnpkg.com/aproba/-/aproba-1.1.1.tgz#95d3600f07710aa0e9298c726ad5ecf2eacbabab" + version "1.1.2" + resolved "https://registry.yarnpkg.com/aproba/-/aproba-1.1.2.tgz#45c6629094de4e96f693ef7eab74ae079c240fc1" archy@^1.0.0: version "1.0.0" @@ -202,11 +214,7 @@ arr-filter@^1.1.1: dependencies: make-iterator "^1.0.0" -arr-flatten@^1.0.1: - version "1.0.3" - resolved "https://registry.yarnpkg.com/arr-flatten/-/arr-flatten-1.0.3.tgz#a274ed85ac08849b6bd7847c4580745dc51adfb1" - -arr-flatten@^1.0.3: +arr-flatten@^1.0.1, arr-flatten@^1.0.3: version "1.1.0" resolved "https://registry.yarnpkg.com/arr-flatten/-/arr-flatten-1.1.0.tgz#36048bbff4e7b47e136644316c99669ea5ae91f1" @@ -301,9 +309,9 @@ assert-plus@^0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/assert-plus/-/assert-plus-0.2.0.tgz#d74e1b87e7affc0db8aadb7021f3fe48101ab234" -assert@^1.1.1, assert@~1.3.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/assert/-/assert-1.3.0.tgz#03939a622582a812cc202320a0b9a56c9b815849" +assert@^1.1.1: + version "1.4.1" + resolved "https://registry.yarnpkg.com/assert/-/assert-1.4.1.tgz#99912d591836b5a6f5b345c0f07eefc08fc65d91" dependencies: util "0.10.3" @@ -313,6 +321,12 @@ assert@~1.1.0: dependencies: util "0.10.3" +assert@~1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/assert/-/assert-1.3.0.tgz#03939a622582a812cc202320a0b9a56c9b815849" + dependencies: + util "0.10.3" + assertion-error@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/assertion-error/-/assertion-error-1.0.2.tgz#13ca515d86206da0bac66e834dd397d87581094c" @@ -342,21 +356,17 @@ async-settle@^1.0.0: dependencies: async-done "^1.2.2" -async@1.x, async@~1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/async/-/async-1.0.0.tgz#f8fc04ca3a13784ade9e1641af98578cfbd647a9" +async@1.x, async@^1.3.0, async@^1.4.0: + version "1.5.2" + resolved "https://registry.yarnpkg.com/async/-/async-1.5.2.tgz#ec6a61ae56480c0c3cb241c95618e20892f9672a" async@^0.9.0: version "0.9.2" resolved "https://registry.yarnpkg.com/async/-/async-0.9.2.tgz#aea74d5e61c1f899613bf64bda66d4c78f2fd17d" -async@^1.3.0, async@^1.4.0: - version "1.5.2" - resolved "https://registry.yarnpkg.com/async/-/async-1.5.2.tgz#ec6a61ae56480c0c3cb241c95618e20892f9672a" - async@^2.1.2, async@^2.1.4: - version "2.4.0" - resolved "https://registry.yarnpkg.com/async/-/async-2.4.0.tgz#4990200f18ea5b837c2cc4f8c031a6985c385611" + version "2.5.0" + resolved "https://registry.yarnpkg.com/async/-/async-2.5.0.tgz#843190fd6b7357a0b9e1c956edddd5ec8462b54d" dependencies: lodash "^4.14.0" @@ -364,6 +374,10 @@ async@~0.2.6: version "0.2.10" resolved "https://registry.yarnpkg.com/async/-/async-0.2.10.tgz#b6bbe0b0674b9d719708ca38de8c237cb526c3d1" +async@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/async/-/async-1.0.0.tgz#f8fc04ca3a13784ade9e1641af98578cfbd647a9" + asynckit@^0.4.0: version "0.4.0" resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" @@ -427,19 +441,19 @@ babel-code-frame@^6.22.0: js-tokens "^3.0.0" babel-core@^6.0.2, babel-core@^6.24.0, babel-core@^6.24.1: - version "6.24.1" - resolved "https://registry.yarnpkg.com/babel-core/-/babel-core-6.24.1.tgz#8c428564dce1e1f41fb337ec34f4c3b022b5ad83" + version "6.25.0" + resolved "https://registry.yarnpkg.com/babel-core/-/babel-core-6.25.0.tgz#7dd42b0463c742e9d5296deb3ec67a9322dad729" dependencies: babel-code-frame "^6.22.0" - babel-generator "^6.24.1" + babel-generator "^6.25.0" babel-helpers "^6.24.1" babel-messages "^6.23.0" babel-register "^6.24.1" babel-runtime "^6.22.0" - babel-template "^6.24.1" - babel-traverse "^6.24.1" - babel-types "^6.24.1" - babylon "^6.11.0" + babel-template "^6.25.0" + babel-traverse "^6.25.0" + babel-types "^6.25.0" + babylon "^6.17.2" convert-source-map "^1.1.0" debug "^2.1.1" json5 "^0.5.0" @@ -450,13 +464,13 @@ babel-core@^6.0.2, babel-core@^6.24.0, babel-core@^6.24.1: slash "^1.0.0" source-map "^0.5.0" -babel-generator@^6.24.1: - version "6.24.1" - resolved "https://registry.yarnpkg.com/babel-generator/-/babel-generator-6.24.1.tgz#e715f486c58ded25649d888944d52aa07c5d9497" +babel-generator@^6.25.0: + version "6.25.0" + resolved "https://registry.yarnpkg.com/babel-generator/-/babel-generator-6.25.0.tgz#33a1af70d5f2890aeb465a4a7793c1df6a9ea9fc" dependencies: babel-messages "^6.23.0" babel-runtime "^6.22.0" - babel-types "^6.24.1" + babel-types "^6.25.0" detect-indent "^4.0.0" jsesc "^1.3.0" lodash "^4.2.0" @@ -521,9 +535,9 @@ babel-helper-hoist-variables@^6.24.1: babel-runtime "^6.22.0" babel-types "^6.24.1" -babel-helper-mark-eval-scopes@^0.0.3: - version "0.0.3" - resolved "https://registry.yarnpkg.com/babel-helper-mark-eval-scopes/-/babel-helper-mark-eval-scopes-0.0.3.tgz#902f75aeb537336edc35eb9f52b6f09db7785328" +babel-helper-mark-eval-scopes@^0.1.1: + version "0.1.1" + resolved "https://registry.yarnpkg.com/babel-helper-mark-eval-scopes/-/babel-helper-mark-eval-scopes-0.1.1.tgz#4554345edf9f2549427bd2098e530253f8af2992" babel-helper-optimise-call-expression@^6.24.1: version "6.24.1" @@ -550,9 +564,9 @@ babel-helper-remap-async-to-generator@^6.24.1: babel-traverse "^6.24.1" babel-types "^6.24.1" -babel-helper-remove-or-void@^0.0.1: - version "0.0.1" - resolved "https://registry.yarnpkg.com/babel-helper-remove-or-void/-/babel-helper-remove-or-void-0.0.1.tgz#f602790e465acf2dfbe84fb3dd210c43a2dd7262" +babel-helper-remove-or-void@^0.1.1: + version "0.1.1" + resolved "https://registry.yarnpkg.com/babel-helper-remove-or-void/-/babel-helper-remove-or-void-0.1.1.tgz#9d7e1856dc6fafcb41b283a416730dc1844f66d7" babel-helper-replace-supers@^6.24.1: version "6.24.1" @@ -589,15 +603,17 @@ babel-plugin-check-es2015-constants@^6.22.0: babel-runtime "^6.22.0" babel-plugin-inline-replace-variables@^1.2.2: - version "1.2.2" - resolved "https://registry.yarnpkg.com/babel-plugin-inline-replace-variables/-/babel-plugin-inline-replace-variables-1.2.2.tgz#be3963d8c27d3abb35e20b53122aff13e2ce9a92" + version "1.3.1" + resolved "https://registry.yarnpkg.com/babel-plugin-inline-replace-variables/-/babel-plugin-inline-replace-variables-1.3.1.tgz#9fbb8dd43229c777695e14ea0d3d781f048fdc0f" + dependencies: + babylon "^6.17.0" babel-plugin-minify-dead-code-elimination@^0.1.4: - version "0.1.4" - resolved "https://registry.yarnpkg.com/babel-plugin-minify-dead-code-elimination/-/babel-plugin-minify-dead-code-elimination-0.1.4.tgz#18b6ecfab77c29caca061d8210fa3495001e4fa1" + version "0.1.7" + resolved "https://registry.yarnpkg.com/babel-plugin-minify-dead-code-elimination/-/babel-plugin-minify-dead-code-elimination-0.1.7.tgz#774f536f347b98393a27baa717872968813c342c" dependencies: - babel-helper-mark-eval-scopes "^0.0.3" - babel-helper-remove-or-void "^0.0.1" + babel-helper-mark-eval-scopes "^0.1.1" + babel-helper-remove-or-void "^0.1.1" lodash.some "^4.6.0" babel-plugin-syntax-async-functions@^6.8.0: @@ -818,8 +834,8 @@ babel-polyfill@^6.23.0: regenerator-runtime "^0.10.0" babel-preset-env@^1.2.1: - version "1.4.0" - resolved "https://registry.yarnpkg.com/babel-preset-env/-/babel-preset-env-1.4.0.tgz#c8e02a3bcc7792f23cded68e0355b9d4c28f0f7a" + version "1.6.0" + resolved "https://registry.yarnpkg.com/babel-preset-env/-/babel-preset-env-1.6.0.tgz#2de1c782a780a0a5d605d199c957596da43c44e4" dependencies: babel-plugin-check-es2015-constants "^6.22.0" babel-plugin-syntax-trailing-function-commas "^6.22.0" @@ -848,8 +864,9 @@ babel-preset-env@^1.2.1: babel-plugin-transform-es2015-unicode-regex "^6.22.0" babel-plugin-transform-exponentiation-operator "^6.22.0" babel-plugin-transform-regenerator "^6.22.0" - browserslist "^1.4.0" + browserslist "^2.1.2" invariant "^2.2.2" + semver "^5.3.0" babel-register@^6.24.1: version "6.24.1" @@ -870,42 +887,42 @@ babel-runtime@^6.18.0, babel-runtime@^6.22.0: core-js "^2.4.0" regenerator-runtime "^0.10.0" -babel-template@^6.24.1: - version "6.24.1" - resolved "https://registry.yarnpkg.com/babel-template/-/babel-template-6.24.1.tgz#04ae514f1f93b3a2537f2a0f60a5a45fb8308333" +babel-template@^6.24.1, babel-template@^6.25.0: + version "6.25.0" + resolved "https://registry.yarnpkg.com/babel-template/-/babel-template-6.25.0.tgz#665241166b7c2aa4c619d71e192969552b10c071" dependencies: babel-runtime "^6.22.0" - babel-traverse "^6.24.1" - babel-types "^6.24.1" - babylon "^6.11.0" + babel-traverse "^6.25.0" + babel-types "^6.25.0" + babylon "^6.17.2" lodash "^4.2.0" -babel-traverse@^6.24.1: - version "6.24.1" - resolved "https://registry.yarnpkg.com/babel-traverse/-/babel-traverse-6.24.1.tgz#ab36673fd356f9a0948659e7b338d5feadb31695" +babel-traverse@^6.24.1, babel-traverse@^6.25.0: + version "6.25.0" + resolved "https://registry.yarnpkg.com/babel-traverse/-/babel-traverse-6.25.0.tgz#2257497e2fcd19b89edc13c4c91381f9512496f1" dependencies: babel-code-frame "^6.22.0" babel-messages "^6.23.0" babel-runtime "^6.22.0" - babel-types "^6.24.1" - babylon "^6.15.0" + babel-types "^6.25.0" + babylon "^6.17.2" debug "^2.2.0" globals "^9.0.0" invariant "^2.2.0" lodash "^4.2.0" -babel-types@^6.19.0, babel-types@^6.24.1: - version "6.24.1" - resolved "https://registry.yarnpkg.com/babel-types/-/babel-types-6.24.1.tgz#a136879dc15b3606bda0d90c1fc74304c2ff0975" +babel-types@^6.19.0, babel-types@^6.24.1, babel-types@^6.25.0: + version "6.25.0" + resolved "https://registry.yarnpkg.com/babel-types/-/babel-types-6.25.0.tgz#70afb248d5660e5d18f811d91c8303b54134a18e" dependencies: babel-runtime "^6.22.0" esutils "^2.0.2" lodash "^4.2.0" to-fast-properties "^1.0.1" -babylon@^6.11.0, babylon@^6.15.0: - version "6.17.0" - resolved "https://registry.yarnpkg.com/babylon/-/babylon-6.17.0.tgz#37da948878488b9c4e3c4038893fa3314b3fc932" +babylon@^6.17.0, babylon@^6.17.2: + version "6.17.4" + resolved "https://registry.yarnpkg.com/babylon/-/babylon-6.17.4.tgz#3e8b7402b88d22c3423e137a1577883b15ff869a" bach@^1.0.0: version "1.1.0" @@ -925,17 +942,17 @@ backo2@1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/backo2/-/backo2-1.0.2.tgz#31ab1ac8b129363463e35b3ebb69f4dfcfba7947" -balanced-match@^0.4.1: - version "0.4.2" - resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-0.4.2.tgz#cb3f3e3c732dc0f01ee70b403f302e61d7709838" +balanced-match@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.0.tgz#89b4d199ab2bee49de164ea02b89ce462d71b767" base64-arraybuffer@0.1.5: version "0.1.5" resolved "https://registry.yarnpkg.com/base64-arraybuffer/-/base64-arraybuffer-0.1.5.tgz#73926771923b5a19747ad666aa5cd4bf9c6e9ce8" base64-js@^1.0.2: - version "1.2.0" - resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.2.0.tgz#a39992d723584811982be5e290bb6a53d86700f1" + version "1.2.1" + resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.2.1.tgz#a91947da1f4a516ea38e5b4ec0ec3773675e0886" base64-js@~0.0.4: version "0.0.8" @@ -984,8 +1001,8 @@ big.js@^3.1.3: resolved "https://registry.yarnpkg.com/big.js/-/big.js-3.1.3.tgz#4cada2193652eb3ca9ec8e55c9015669c9806978" binary-extensions@^1.0.0: - version "1.8.0" - resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-1.8.0.tgz#48ec8d16df4377eae5fa5884682480af4d95c774" + version "1.9.0" + resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-1.9.0.tgz#66506c16ce6f4d6928a5b3cd6a33ca41e941e37b" binaryextensions@~1.0.0: version "1.0.1" @@ -1006,23 +1023,23 @@ bluebird@^3.3.0: resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.5.0.tgz#791420d7f551eea2897453a8a77653f96606d67c" bn.js@^4.0.0, bn.js@^4.1.0, bn.js@^4.1.1, bn.js@^4.4.0: - version "4.11.6" - resolved "https://registry.yarnpkg.com/bn.js/-/bn.js-4.11.6.tgz#53344adb14617a13f6e8dd2ce28905d1c0ba3215" + version "4.11.7" + resolved "https://registry.yarnpkg.com/bn.js/-/bn.js-4.11.7.tgz#ddb048e50d9482790094c13eb3fcfc833ce7ab46" body-parser@^1.16.1: - version "1.17.1" - resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.17.1.tgz#75b3bc98ddd6e7e0d8ffe750dfaca5c66993fa47" + version "1.17.2" + resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.17.2.tgz#f8892abc8f9e627d42aedafbca66bf5ab99104ee" dependencies: bytes "2.4.0" content-type "~1.0.2" - debug "2.6.1" + debug "2.6.7" depd "~1.1.0" http-errors "~1.6.1" iconv-lite "0.4.15" on-finished "~2.3.0" qs "6.4.0" raw-body "~2.2.0" - type-is "~1.6.14" + type-is "~1.6.15" boom@2.x.x: version "2.10.1" @@ -1030,11 +1047,11 @@ boom@2.x.x: dependencies: hoek "2.x.x" -brace-expansion@^1.0.0: - version "1.1.7" - resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.7.tgz#3effc3c50e000531fb720eaff80f0ae8ef23cf59" +brace-expansion@^1.1.7: + version "1.1.8" + resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.8.tgz#c07b211c7c952ec1f8efd51a77ef0d1d3990a292" dependencies: - balanced-match "^0.4.1" + balanced-match "^1.0.0" concat-map "0.0.1" braces@^0.1.2: @@ -1210,21 +1227,17 @@ browserify@3.x: vm-browserify "~0.0.1" xtend "^3.0.0" -browserslist@^1.4.0: - version "1.7.7" - resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-1.7.7.tgz#0bd76704258be829b2398bb50e4b62d1a166b0b9" +browserslist@^2.1.2: + version "2.2.2" + resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-2.2.2.tgz#e9b4618b8a01c193f9786beea09f6fd10dbe31c3" dependencies: - caniuse-db "^1.0.30000639" - electron-to-chromium "^1.2.7" + caniuse-lite "^1.0.30000704" + electron-to-chromium "^1.3.16" buffer-equal-constant-time@1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz#f8e71132f7ffe6e01a5c9697a4c6f3e48d5cc819" -buffer-shims@~1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/buffer-shims/-/buffer-shims-1.0.0.tgz#9978ce317388c649ad8793028c3477ef044a8b51" - buffer-xor@^1.0.2: version "1.0.3" resolved "https://registry.yarnpkg.com/buffer-xor/-/buffer-xor-1.0.3.tgz#26e61ed1422fb70dd42e6e36729ed51d855fe8d9" @@ -1298,9 +1311,9 @@ camelcase@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-3.0.0.tgz#32fc4b9fcdaf845fcdf7e73bb97cac2261f0ab0a" -caniuse-db@^1.0.30000639: - version "1.0.30000665" - resolved "https://registry.yarnpkg.com/caniuse-db/-/caniuse-db-1.0.30000665.tgz#e84f4277935f295f546f8533cb0b410a8415b972" +caniuse-lite@^1.0.30000704: + version "1.0.30000704" + resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30000704.tgz#adb6ea01134515663682db93abab291d4c02946b" caseless@~0.12.0: version "0.12.0" @@ -1357,9 +1370,9 @@ child-process-promise@^2.2.1: node-version "^1.0.0" promise-polyfill "^6.0.1" -chokidar@^1.0.0, chokidar@^1.4.1, chokidar@^1.4.3, chokidar@^1.6.1: - version "1.6.1" - resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-1.6.1.tgz#2f4447ab5e96e50fb3d789fd90d4c72e0e4c70c2" +chokidar@^1.0.0, chokidar@^1.4.1, chokidar@^1.4.3, chokidar@^1.6.1, chokidar@^1.7.0: + version "1.7.0" + resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-1.7.0.tgz#798e689778151c8076b4b360e5edd28cda2bb468" dependencies: anymatch "^1.3.0" async-each "^1.0.0" @@ -1376,11 +1389,12 @@ ci-info@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-1.0.0.tgz#dc5285f2b4e251821683681c381c3388f46ec534" -cipher-base@^1.0.0, cipher-base@^1.0.1: - version "1.0.3" - resolved "https://registry.yarnpkg.com/cipher-base/-/cipher-base-1.0.3.tgz#eeabf194419ce900da3018c207d212f2a6df0a07" +cipher-base@^1.0.0, cipher-base@^1.0.1, cipher-base@^1.0.3: + version "1.0.4" + resolved "https://registry.yarnpkg.com/cipher-base/-/cipher-base-1.0.4.tgz#8760e4ecc272f4c363532f926d874aae2c1397de" dependencies: inherits "^2.0.1" + safe-buffer "^5.0.1" class-utils@^0.3.4: version "0.3.5" @@ -1392,12 +1406,23 @@ class-utils@^0.3.4: lazy-cache "^2.0.2" static-extend "^0.1.1" -cli-cursor@^1.0.1: +cli-cursor@^1.0.1, cli-cursor@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/cli-cursor/-/cli-cursor-1.0.2.tgz#64da3f7d56a54412e59794bd62dc35295e8f2987" dependencies: restore-cursor "^1.0.1" +cli-spinners@^0.1.2: + version "0.1.2" + resolved "https://registry.yarnpkg.com/cli-spinners/-/cli-spinners-0.1.2.tgz#bb764d88e185fb9e1e6a2a1f19772318f605e31c" + +cli-truncate@^0.2.1: + version "0.2.1" + resolved "https://registry.yarnpkg.com/cli-truncate/-/cli-truncate-0.2.1.tgz#9f15cfbb0705005369216c626ac7d05ab90dd574" + dependencies: + slice-ansi "0.0.4" + string-width "^1.0.1" + cli-width@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/cli-width/-/cli-width-2.1.0.tgz#b234ca209b29ef66fc518d9b98d5847b00edf00a" @@ -1434,6 +1459,10 @@ clone@^1.0.0, clone@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/clone/-/clone-1.0.2.tgz#260b7a99ebb1edfe247538175f783243cb19d149" +clone@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/clone/-/clone-2.1.1.tgz#d217d1e961118e3ac9a4b8bba3285553bf647cdb" + cloneable-readable@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/cloneable-readable/-/cloneable-readable-1.0.0.tgz#a6290d413f217a61232f95e458ff38418cfb0117" @@ -1509,12 +1538,16 @@ combined-stream@^1.0.5, combined-stream@~1.0.5: dependencies: delayed-stream "~1.0.0" -commander@2.9.0, commander@^2.8.1: +commander@2.9.0: version "2.9.0" resolved "https://registry.yarnpkg.com/commander/-/commander-2.9.0.tgz#9c99094176e12240cb22d6c5146098400fe0f7d4" dependencies: graceful-readlink ">= 1.0.0" +commander@^2.8.1, commander@^2.9.0: + version "2.11.0" + resolved "https://registry.yarnpkg.com/commander/-/commander-2.11.0.tgz#157152fd1e7a6c8d98a5b715cf376df928004563" + commander@~2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/commander/-/commander-2.1.0.tgz#d121bbae860d9992a3d517ba96f56588e47c6781" @@ -1543,7 +1576,15 @@ concat-map@0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" -concat-stream@^1.4.7, concat-stream@~1.4.1, concat-stream@~1.4.5: +concat-stream@^1.4.7: + version "1.6.0" + resolved "https://registry.yarnpkg.com/concat-stream/-/concat-stream-1.6.0.tgz#0aac662fd52be78964d5532f694784e70110acf7" + dependencies: + inherits "^2.0.3" + readable-stream "^2.2.2" + typedarray "^0.0.6" + +concat-stream@~1.4.1, concat-stream@~1.4.5: version "1.4.10" resolved "https://registry.yarnpkg.com/concat-stream/-/concat-stream-1.4.10.tgz#acc3bbf5602cb8cc980c6ac840fa7d8603e3ef36" dependencies: @@ -1558,11 +1599,11 @@ concat-with-sourcemaps@*, concat-with-sourcemaps@^1.0.0: source-map "^0.5.1" connect@^3.6.0: - version "3.6.1" - resolved "https://registry.yarnpkg.com/connect/-/connect-3.6.1.tgz#b7760693a74f0454face1d9378edb3f885b43227" + version "3.6.2" + resolved "https://registry.yarnpkg.com/connect/-/connect-3.6.2.tgz#694e8d20681bfe490282c8ab886be98f09f42fe7" dependencies: - debug "2.6.3" - finalhandler "1.0.1" + debug "2.6.7" + finalhandler "1.0.3" parseurl "~1.3.1" utils-merge "1.0.0" @@ -1597,8 +1638,8 @@ content-type@~1.0.2: resolved "https://registry.yarnpkg.com/content-type/-/content-type-1.0.2.tgz#b7d113aee7a8dd27bd21133c4dc2529df1721eed" conventional-commit-types@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/conventional-commit-types/-/conventional-commit-types-2.1.0.tgz#45d860386c9a2e6537ee91d8a1b61bd0411b3d04" + version "2.2.0" + resolved "https://registry.yarnpkg.com/conventional-commit-types/-/conventional-commit-types-2.2.0.tgz#5db95739d6c212acbe7b6f656a11b940baa68946" convert-source-map@1.X, convert-source-map@^1.1.0, convert-source-map@^1.1.1: version "1.5.0" @@ -1631,6 +1672,19 @@ core-util-is@~1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7" +cosmiconfig@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/cosmiconfig/-/cosmiconfig-1.1.0.tgz#0dea0f9804efdfb929fbb1b188e25553ea053d37" + dependencies: + graceful-fs "^4.1.2" + js-yaml "^3.4.3" + minimist "^1.2.0" + object-assign "^4.0.1" + os-homedir "^1.0.1" + parse-json "^2.2.0" + pinkie-promise "^2.0.0" + require-from-string "^1.1.0" + create-ecdh@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/create-ecdh/-/create-ecdh-4.0.0.tgz#888c723596cdf7612f6498233eebd7a35301737d" @@ -1638,21 +1692,25 @@ create-ecdh@^4.0.0: bn.js "^4.1.0" elliptic "^6.0.0" -create-hash@^1.1.0, create-hash@^1.1.1: - version "1.1.2" - resolved "https://registry.yarnpkg.com/create-hash/-/create-hash-1.1.2.tgz#51210062d7bb7479f6c65bb41a92208b1d61abad" +create-hash@^1.1.0, create-hash@^1.1.1, create-hash@^1.1.2: + version "1.1.3" + resolved "https://registry.yarnpkg.com/create-hash/-/create-hash-1.1.3.tgz#606042ac8b9262750f483caddab0f5819172d8fd" dependencies: cipher-base "^1.0.1" inherits "^2.0.1" - ripemd160 "^1.0.0" - sha.js "^2.3.6" + ripemd160 "^2.0.0" + sha.js "^2.4.0" -create-hmac@^1.1.0, create-hmac@^1.1.2: - version "1.1.4" - resolved "https://registry.yarnpkg.com/create-hmac/-/create-hmac-1.1.4.tgz#d3fb4ba253eb8b3f56e39ea2fbcb8af747bd3170" +create-hmac@^1.1.0, create-hmac@^1.1.2, create-hmac@^1.1.4: + version "1.1.6" + resolved "https://registry.yarnpkg.com/create-hmac/-/create-hmac-1.1.6.tgz#acb9e221a4e17bdb076e90657c42b93e3726cf06" dependencies: + cipher-base "^1.0.3" create-hash "^1.1.0" inherits "^2.0.1" + ripemd160 "^2.0.0" + safe-buffer "^5.0.1" + sha.js "^2.4.8" cross-env@^5.0.1: version "5.0.1" @@ -1692,8 +1750,8 @@ crypto-browserify@3.3.0: sha.js "2.2.6" crypto-browserify@^3.0.0, crypto-browserify@^3.11.0: - version "3.11.0" - resolved "https://registry.yarnpkg.com/crypto-browserify/-/crypto-browserify-3.11.0.tgz#3652a0906ab9b2a7e0c3ce66a408e957a2485522" + version "3.11.1" + resolved "https://registry.yarnpkg.com/crypto-browserify/-/crypto-browserify-3.11.1.tgz#948945efc6757a400d6e5e5af47194d10064279f" dependencies: browserify-cipher "^1.0.0" browserify-sign "^4.0.0" @@ -1770,6 +1828,10 @@ dashdash@^1.12.0: dependencies: assert-plus "^1.0.0" +date-fns@^1.27.2: + version "1.28.5" + resolved "https://registry.yarnpkg.com/date-fns/-/date-fns-1.28.5.tgz#257cfc45d322df45ef5658665967ee841cd73faf" + date-format@^0.0.0: version "0.0.0" resolved "https://registry.yarnpkg.com/date-format/-/date-format-0.0.0.tgz#09206863ab070eb459acea5542cbd856b11966b3" @@ -1790,10 +1852,11 @@ dateformat@^2.0.0: resolved "https://registry.yarnpkg.com/dateformat/-/dateformat-2.0.0.tgz#2743e3abb5c3fc2462e527dca445e04e9f4dee17" debug-fabulous@0.1.X: - version "0.1.0" - resolved "https://registry.yarnpkg.com/debug-fabulous/-/debug-fabulous-0.1.0.tgz#ad0ea07a5d519324fb55842a8f34ee59c7f8ff6c" + version "0.1.1" + resolved "https://registry.yarnpkg.com/debug-fabulous/-/debug-fabulous-0.1.1.tgz#1b970878c9fa4fbd1c88306eab323c830c58f1d6" dependencies: - debug "2.X" + debug "2.3.0" + memoizee "^0.4.5" object-assign "4.1.0" debug@2.2.0: @@ -1802,6 +1865,12 @@ debug@2.2.0: dependencies: ms "0.7.1" +debug@2.3.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/debug/-/debug-2.3.0.tgz#3912dc55d7167fc3af17d2b85c13f93deaedaa43" + dependencies: + ms "0.7.2" + debug@2.3.3: version "2.3.3" resolved "https://registry.yarnpkg.com/debug/-/debug-2.3.3.tgz#40c453e67e6e13c901ddec317af8986cda9eff8c" @@ -1814,29 +1883,17 @@ debug@2.6.0: dependencies: ms "0.7.2" -debug@2.6.1: - version "2.6.1" - resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.1.tgz#79855090ba2c4e3115cc7d8769491d58f0491351" - dependencies: - ms "0.7.2" - -debug@2.6.3: - version "2.6.3" - resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.3.tgz#0f7eb8c30965ec08c72accfa0130c8b79984141d" - dependencies: - ms "0.7.2" - -debug@2.X, debug@^2.1.1, debug@^2.2.0: - version "2.6.6" - resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.6.tgz#a9fa6fbe9ca43cf1e79f73b75c0189cbb7d6db5a" +debug@2.6.7: + version "2.6.7" + resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.7.tgz#92bad1f6d05bbb6bba22cca88bcd0ec894c2861e" dependencies: - ms "0.7.3" + ms "2.0.0" debug@^0.7.2: version "0.7.4" resolved "https://registry.yarnpkg.com/debug/-/debug-0.7.4.tgz#06e1ea8082c2cb14e39806e22e2f6f757f92af39" -debug@^2.3.3: +debug@^2.1.1, debug@^2.2.0, debug@^2.3.3: version "2.6.8" resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.8.tgz#e731531ca2ede27d188222427da17821d68ff4fc" dependencies: @@ -1846,11 +1903,11 @@ decamelize@^1.0.0, decamelize@^1.1.1, decamelize@^1.1.2: version "1.2.0" resolved "https://registry.yarnpkg.com/decamelize/-/decamelize-1.2.0.tgz#f6534d15148269b20352e7bee26f501f9a191290" -decomment@^0.8.7: - version "0.8.7" - resolved "https://registry.yarnpkg.com/decomment/-/decomment-0.8.7.tgz#248f2116f6ce3a8e8dd1b63e5b35aba5ca054ca5" +decomment@^0.9.0: + version "0.9.0" + resolved "https://registry.yarnpkg.com/decomment/-/decomment-0.9.0.tgz#00d1ae9130c6018a99323bd1612f29fcf375c3a2" dependencies: - esprima "3.1" + esprima "^4.0.0" deep-eql@^0.1.3: version "0.1.3" @@ -1863,10 +1920,10 @@ deep-equal@~0.1.0: resolved "https://registry.yarnpkg.com/deep-equal/-/deep-equal-0.1.2.tgz#b246c2b80a570a47c11be1d9bd1070ec878b87ce" deep-extend@~0.4.0: - version "0.4.1" - resolved "https://registry.yarnpkg.com/deep-extend/-/deep-extend-0.4.1.tgz#efe4113d08085f4e6f9687759810f807469e2253" + version "0.4.2" + resolved "https://registry.yarnpkg.com/deep-extend/-/deep-extend-0.4.2.tgz#48b699c27e334bf89f10892be432f6e4c7d34a7f" -deep-is@~0.1.2, deep-is@~0.1.3: +deep-is@~0.1.3: version "0.1.3" resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.3.tgz#b369d6fb5dbc13eecf524f91b070feedc357cf34" @@ -1952,10 +2009,14 @@ di@^0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/di/-/di-0.0.1.tgz#806649326ceaa7caa3306d75d985ea2748ba913c" -diff@3.2.0, diff@^3.1.0, diff@^3.2.0: +diff@3.2.0: version "3.2.0" resolved "https://registry.yarnpkg.com/diff/-/diff-3.2.0.tgz#c9ce393a4b7cbd0b058a725c93df299027868ff9" +diff@^3.1.0, diff@^3.2.0: + version "3.3.0" + resolved "https://registry.yarnpkg.com/diff/-/diff-3.3.0.tgz#056695150d7aa93237ca7e378ac3b1682b7963b9" + diffie-hellman@^5.0.0: version "5.0.2" resolved "https://registry.yarnpkg.com/diffie-hellman/-/diffie-hellman-5.0.2.tgz#b5835739270cfe26acf632099fded2a07f209e5e" @@ -2028,9 +2089,13 @@ ee-first@1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/ee-first/-/ee-first-1.1.1.tgz#590c61156b0ae2f4f0255732a158b266bc56b21d" -electron-to-chromium@^1.2.7: - version "1.3.9" - resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.3.9.tgz#db1cba2a26aebcca2f7f5b8b034554468609157d" +electron-to-chromium@^1.3.16: + version "1.3.16" + resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.3.16.tgz#d0e026735754770901ae301a21664cba45d92f7d" + +elegant-spinner@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/elegant-spinner/-/elegant-spinner-1.0.1.tgz#db043521c95d7e303fd8f345bedc3349cfb0729e" elliptic@^6.0.0: version "6.4.0" @@ -2103,23 +2168,14 @@ engine.io@1.8.3: engine.io-parser "1.3.2" ws "1.1.2" -enhanced-resolve@^3.0.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-3.1.0.tgz#9f4b626f577245edcf4b2ad83d86e17f4f421dec" - dependencies: - graceful-fs "^4.1.2" - memory-fs "^0.4.0" - object-assign "^4.0.1" - tapable "^0.2.5" - -enhanced-resolve@^3.1.0: - version "3.3.0" - resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-3.3.0.tgz#950964ecc7f0332a42321b673b38dc8ff15535b3" +enhanced-resolve@^3.1.0, enhanced-resolve@^3.3.0: + version "3.4.1" + resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-3.4.1.tgz#0421e339fd71419b3da13d129b3979040230476e" dependencies: graceful-fs "^4.1.2" memory-fs "^0.4.0" object-assign "^4.0.1" - tapable "^0.2.5" + tapable "^0.2.7" enhanced-resolve@~0.9.0: version "0.9.1" @@ -2145,9 +2201,9 @@ error-ex@^1.2.0: dependencies: is-arrayish "^0.2.1" -es5-ext@^0.10.14, es5-ext@^0.10.9, es5-ext@~0.10.14: - version "0.10.15" - resolved "https://registry.yarnpkg.com/es5-ext/-/es5-ext-0.10.15.tgz#c330a5934c1ee21284a7c081a86e5fd937c91ea6" +es5-ext@^0.10.13, es5-ext@^0.10.14, es5-ext@^0.10.9, es5-ext@~0.10.14, es5-ext@~0.10.2: + version "0.10.24" + resolved "https://registry.yarnpkg.com/es5-ext/-/es5-ext-0.10.24.tgz#a55877c9924bc0c8d9bd3c2cbe17495ac1709b14" dependencies: es6-iterator "2" es6-symbol "~3.1" @@ -2176,8 +2232,8 @@ es6-object-assign@^1.0.3: resolved "https://registry.yarnpkg.com/es6-object-assign/-/es6-object-assign-1.1.0.tgz#c2c3582656247c39ea107cb1e6652b6f9f24523c" es6-promise@^4.0.5: - version "4.1.0" - resolved "https://registry.yarnpkg.com/es6-promise/-/es6-promise-4.1.0.tgz#dda03ca8f9f89bc597e689842929de7ba8cebdf0" + version "4.1.1" + resolved "https://registry.yarnpkg.com/es6-promise/-/es6-promise-4.1.1.tgz#8811e90915d9a0dba36274f0b242dbda78f9c92a" es6-set@~0.1.5: version "0.1.5" @@ -2213,17 +2269,6 @@ escape-string-regexp@1.0.5, escape-string-regexp@^1.0.0, escape-string-regexp@^1 version "1.0.5" resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" -escodegen@1.7.x: - version "1.7.1" - resolved "https://registry.yarnpkg.com/escodegen/-/escodegen-1.7.1.tgz#30ecfcf66ca98dc67cd2fd162abeb6eafa8ce6fc" - dependencies: - esprima "^1.2.2" - estraverse "^1.9.1" - esutils "^2.0.2" - optionator "^0.5.0" - optionalDependencies: - source-map "~0.2.0" - escodegen@1.8.x, escodegen@^1.6.1: version "1.8.1" resolved "https://registry.yarnpkg.com/escodegen/-/escodegen-1.8.1.tgz#5a5b53af4693110bebb0867aa3430dd3b70a1018" @@ -2264,21 +2309,13 @@ esprima-fb@3001.1.0-dev-harmony-fb, esprima-fb@^3001.1.0-dev-harmony-fb: version "3001.1.0-dev-harmony-fb" resolved "https://registry.yarnpkg.com/esprima-fb/-/esprima-fb-3001.0001.0000-dev-harmony-fb.tgz#b77d37abcd38ea0b77426bb8bc2922ce6b426411" -esprima@2.5.x: - version "2.5.0" - resolved "https://registry.yarnpkg.com/esprima/-/esprima-2.5.0.tgz#f387a46fd344c1b1a39baf8c20bfb43b6d0058cc" - esprima@2.7.x, esprima@^2.7.1: version "2.7.3" resolved "https://registry.yarnpkg.com/esprima/-/esprima-2.7.3.tgz#96e3b70d5779f6ad49cd032673d1c312767ba581" -esprima@3.1, esprima@^3.1.1: - version "3.1.3" - resolved "https://registry.yarnpkg.com/esprima/-/esprima-3.1.3.tgz#fdca51cee6133895e3c88d535ce49dbff62a4633" - -esprima@^1.2.2: - version "1.2.5" - resolved "https://registry.yarnpkg.com/esprima/-/esprima-1.2.5.tgz#0993502feaf668138325756f30f9a51feeec11e9" +esprima@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.0.tgz#4499eddcd1110e0b218bacf2fa7f7f59f55ca804" esprima@~1.0.2, esprima@~1.0.4: version "1.0.4" @@ -2323,7 +2360,7 @@ esutils@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/esutils/-/esutils-1.0.0.tgz#8151d358e20c8acc7fb745e7472c0025fe496570" -event-emitter@~0.3.5: +event-emitter@^0.3.4, event-emitter@~0.3.5: version "0.3.5" resolved "https://registry.yarnpkg.com/event-emitter/-/event-emitter-0.3.5.tgz#df8c69eef1647923c7157b9ce83840610b02cc39" dependencies: @@ -2360,6 +2397,18 @@ execa@^0.6.0: signal-exit "^3.0.0" strip-eof "^1.0.0" +execa@^0.7.0: + version "0.7.0" + resolved "https://registry.yarnpkg.com/execa/-/execa-0.7.0.tgz#944becd34cc41ee32a63a9faf27ad5a65fc59777" + dependencies: + cross-spawn "^5.0.1" + get-stream "^3.0.0" + is-stream "^1.1.0" + npm-run-path "^2.0.0" + p-finally "^1.0.0" + signal-exit "^3.0.0" + strip-eof "^1.0.0" + exit-hook@^1.0.0: version "1.1.1" resolved "https://registry.yarnpkg.com/exit-hook/-/exit-hook-1.1.1.tgz#f05ca233b48c05d54fff07765df8507e95c02ff8" @@ -2403,12 +2452,18 @@ expand-range@^1.8.1: dependencies: fill-range "^2.1.0" -expand-tilde@^1.2.1, expand-tilde@^1.2.2: +expand-tilde@^1.2.2: version "1.2.2" resolved "https://registry.yarnpkg.com/expand-tilde/-/expand-tilde-1.2.2.tgz#0b81eba897e5a3d31d1c3d102f8f01441e559449" dependencies: os-homedir "^1.0.1" +expand-tilde@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/expand-tilde/-/expand-tilde-2.0.2.tgz#97e801aa052df02454de46b02bf621642cdc8502" + dependencies: + homedir-polyfill "^1.0.1" + extend-shallow@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-2.0.1.tgz#51af7d614ad9a9f610ea1bafbb989d6b1c56890f" @@ -2461,13 +2516,9 @@ fancy-log@^1.1.0: chalk "^1.1.1" time-stamp "^1.0.0" -fast-deep-equal@^0.1.0: - version "0.1.0" - resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-0.1.0.tgz#5c6f4599aba6b333ee3342e2ed978672f1001f8d" - -fast-levenshtein@~1.0.0: - version "1.0.7" - resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-1.0.7.tgz#0178dcdee023b92905193af0959e8a7639cfdcb9" +fast-deep-equal@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-1.0.0.tgz#96256a3bc975595eb36d82e9929d060d893439ff" fast-levenshtein@~2.0.4: version "2.0.6" @@ -2479,7 +2530,7 @@ faye-websocket@0.9.3: dependencies: websocket-driver ">=0.5.1" -figures@^1.3.5: +figures@^1.3.5, figures@^1.7.0: version "1.7.0" resolved "https://registry.yarnpkg.com/figures/-/figures-1.7.0.tgz#cbe1e3affcf1cd44b80cadfed28dc793a9701d2e" dependencies: @@ -2490,16 +2541,9 @@ filename-regex@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/filename-regex/-/filename-regex-2.0.1.tgz#c1c4b9bee3e09725ddb106b75c1e301fe2f18b26" -fileset@0.2.x: - version "0.2.1" - resolved "https://registry.yarnpkg.com/fileset/-/fileset-0.2.1.tgz#588ef8973c6623b2a76df465105696b96aac8067" - dependencies: - glob "5.x" - minimatch "2.x" - filesize@^3.5.6: - version "3.5.9" - resolved "https://registry.yarnpkg.com/filesize/-/filesize-3.5.9.tgz#9e3dd8a9b124f5b2f1fb2ee9cd13a86c707bb222" + version "3.5.10" + resolved "https://registry.yarnpkg.com/filesize/-/filesize-3.5.10.tgz#fc8fa23ddb4ef9e5e0ab6e1e64f679a24a56761f" fill-range@^2.1.0: version "2.2.3" @@ -2520,11 +2564,11 @@ fill-range@^4.0.0: repeat-string "^1.6.1" to-regex-range "^2.1.0" -finalhandler@1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/finalhandler/-/finalhandler-1.0.1.tgz#bcd15d1689c0e5ed729b6f7f541a6df984117db8" +finalhandler@1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/finalhandler/-/finalhandler-1.0.3.tgz#ef47e77950e999780e86022a560e3217e0d0cc89" dependencies: - debug "2.6.3" + debug "2.6.7" encodeurl "~1.0.1" escape-html "~1.0.3" on-finished "~2.3.0" @@ -2572,15 +2616,13 @@ findup@0.1.5: commander "~2.1.0" fined@^1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/fined/-/fined-1.0.2.tgz#5b28424b760d7598960b7ef8480dff8ad3660e97" + version "1.1.0" + resolved "https://registry.yarnpkg.com/fined/-/fined-1.1.0.tgz#b37dc844b76a2f5e7081e884f7c0ae344f153476" dependencies: - expand-tilde "^1.2.1" - lodash.assignwith "^4.0.7" - lodash.isempty "^4.2.1" - lodash.isplainobject "^4.0.4" - lodash.isstring "^4.0.1" - lodash.pick "^4.2.1" + expand-tilde "^2.0.2" + is-plain-object "^2.0.3" + object.defaults "^1.1.0" + object.pick "^1.2.0" parse-filepath "^1.0.1" first-chunk-stream@^1.0.0: @@ -2654,11 +2696,11 @@ fs.realpath@^1.0.0: resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" fsevents@^1.0.0: - version "1.1.1" - resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-1.1.1.tgz#f19fd28f43eeaf761680e519a203c4d0b3d31aff" + version "1.1.2" + resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-1.1.2.tgz#3282b713fb3ad80ede0e9fcf4611b5aa6fc033f4" dependencies: nan "^2.3.0" - node-pre-gyp "^0.6.29" + node-pre-gyp "^0.6.36" fstream-ignore@^1.0.5: version "1.0.5" @@ -2677,7 +2719,7 @@ fstream@^1.0.0, fstream@^1.0.10, fstream@^1.0.2: mkdirp ">=0.5 0" rimraf "2" -gauge@~2.7.1: +gauge@~2.7.3: version "2.7.4" resolved "https://registry.yarnpkg.com/gauge/-/gauge-2.7.4.tgz#2c03405c7538c39d7eb37b317022e325fb018bf7" dependencies: @@ -2768,7 +2810,18 @@ glob-watcher@^3.0.0: lodash.debounce "^4.0.6" object.defaults "^1.0.0" -glob@5.x, glob@^5.0.15, glob@^5.0.3, glob@~5.0.0: +glob@7.1.1: + version "7.1.1" + resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.1.tgz#805211df04faaf1c63a3600306cdf5ade50b2ec8" + dependencies: + fs.realpath "^1.0.0" + inflight "^1.0.4" + inherits "2" + minimatch "^3.0.2" + once "^1.3.0" + path-is-absolute "^1.0.0" + +glob@^5.0.15, glob@^5.0.3, glob@~5.0.0: version "5.0.15" resolved "https://registry.yarnpkg.com/glob/-/glob-5.0.15.tgz#1bc936b9e02f4a603fcc222ecf7633d30b8b93b1" dependencies: @@ -2778,14 +2831,14 @@ glob@5.x, glob@^5.0.15, glob@^5.0.3, glob@~5.0.0: once "^1.3.0" path-is-absolute "^1.0.0" -glob@7.1.1, glob@^7.0.0, glob@^7.0.5, glob@^7.1.1: - version "7.1.1" - resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.1.tgz#805211df04faaf1c63a3600306cdf5ade50b2ec8" +glob@^7.0.0, glob@^7.0.5, glob@^7.1.1: + version "7.1.2" + resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.2.tgz#c19c9df9a028702d678612384a6552404c636d15" dependencies: fs.realpath "^1.0.0" inflight "^1.0.4" inherits "2" - minimatch "^3.0.2" + minimatch "^3.0.4" once "^1.3.0" path-is-absolute "^1.0.0" @@ -2813,8 +2866,8 @@ global-prefix@^0.1.4: which "^1.2.12" globals@^9.0.0: - version "9.17.0" - resolved "https://registry.yarnpkg.com/globals/-/globals-9.17.0.tgz#0c0ca696d9b9bb694d2e5470bd37777caad50286" + version "9.18.0" + resolved "https://registry.yarnpkg.com/globals/-/globals-9.18.0.tgz#aa3896b3e69b487f17e31ed2143d69a8e30c2d8a" glogg@^1.0.0: version "1.0.0" @@ -2916,8 +2969,8 @@ gulp-file@^0.3.0: through2 "^0.4.1" gulp-header@^1.8.8: - version "1.8.8" - resolved "https://registry.yarnpkg.com/gulp-header/-/gulp-header-1.8.8.tgz#4509c64677aab56b5ee8e4669a79b1655933a49e" + version "1.8.9" + resolved "https://registry.yarnpkg.com/gulp-header/-/gulp-header-1.8.9.tgz#c9f10fee0632d81e939789c6ecf45a151bf3098b" dependencies: concat-with-sourcemaps "*" gulp-util "*" @@ -2933,12 +2986,12 @@ gulp-if@^2.0.2: through2 "^2.0.1" gulp-istanbul@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/gulp-istanbul/-/gulp-istanbul-1.1.1.tgz#e094d98f42bfa4d9a8e5366f414ed9a09a3c6537" + version "1.1.2" + resolved "https://registry.yarnpkg.com/gulp-istanbul/-/gulp-istanbul-1.1.2.tgz#af65fa28bfdb3576daab95dcfaa732a6a27c5a07" dependencies: gulp-util "^3.0.1" istanbul "^0.4.0" - istanbul-threshold-checker "^0.1.0" + istanbul-threshold-checker "^0.2.1" lodash "^4.0.0" through2 "^2.0.0" vinyl-sourcemaps-apply "^0.2.1" @@ -3000,31 +3053,31 @@ gulp-sourcemaps@^2.4.1: vinyl "1.X" gulp-strip-comments@^2.4.5: - version "2.4.5" - resolved "https://registry.yarnpkg.com/gulp-strip-comments/-/gulp-strip-comments-2.4.5.tgz#76a5587078e6a03332925a452d2b744c1bb6cda5" + version "2.5.1" + resolved "https://registry.yarnpkg.com/gulp-strip-comments/-/gulp-strip-comments-2.5.1.tgz#d4a3e3c23c79f6eb05f475a3602596c9e4da104e" dependencies: - decomment "^0.8.7" + decomment "^0.9.0" gulp-util "^3.0.8" - through2 "^2.0.1" + through2 "^2.0.3" gulp-typescript@^3.1.6: - version "3.1.6" - resolved "https://registry.yarnpkg.com/gulp-typescript/-/gulp-typescript-3.1.6.tgz#6c67b84364cf3589a9ad6fdea2e3c0bc525c435e" + version "3.2.1" + resolved "https://registry.yarnpkg.com/gulp-typescript/-/gulp-typescript-3.2.1.tgz#52cd77e9c6844e3b9a8bddd88e884ceb46a5db79" dependencies: gulp-util "~3.0.7" source-map "~0.5.3" through2 "~2.0.1" vinyl-fs "~2.4.3" -gulp-util@*, gulp-util@3.0.7, gulp-util@^3.0.0, gulp-util@^3.0.1: - version "3.0.7" - resolved "https://registry.yarnpkg.com/gulp-util/-/gulp-util-3.0.7.tgz#78925c4b8f8b49005ac01a011c557e6218941cbb" +gulp-util@*, gulp-util@^3.0.0, gulp-util@^3.0.1, gulp-util@^3.0.7, gulp-util@^3.0.8, gulp-util@~3.0.7: + version "3.0.8" + resolved "https://registry.yarnpkg.com/gulp-util/-/gulp-util-3.0.8.tgz#0054e1e744502e27c04c187c3ecc505dd54bbb4f" dependencies: array-differ "^1.0.0" array-uniq "^1.0.2" beeper "^1.0.0" chalk "^1.0.0" - dateformat "^1.0.11" + dateformat "^2.0.0" fancy-log "^1.1.0" gulplog "^1.0.0" has-gulplog "^0.1.0" @@ -3039,28 +3092,15 @@ gulp-util@*, gulp-util@3.0.7, gulp-util@^3.0.0, gulp-util@^3.0.1: through2 "^2.0.0" vinyl "^0.5.0" -gulp-util@^2.2.14, gulp-util@~2.2.14, gulp-util@~2.2.5: - version "2.2.20" - resolved "https://registry.yarnpkg.com/gulp-util/-/gulp-util-2.2.20.tgz#d7146e5728910bd8f047a6b0b1e549bc22dbd64c" - dependencies: - chalk "^0.5.0" - dateformat "^1.0.7-1.2.3" - lodash._reinterpolate "^2.4.1" - lodash.template "^2.4.1" - minimist "^0.2.0" - multipipe "^0.1.0" - through2 "^0.5.0" - vinyl "^0.2.1" - -gulp-util@^3.0.7, gulp-util@^3.0.8, gulp-util@~3.0.7: - version "3.0.8" - resolved "https://registry.yarnpkg.com/gulp-util/-/gulp-util-3.0.8.tgz#0054e1e744502e27c04c187c3ecc505dd54bbb4f" +gulp-util@3.0.7: + version "3.0.7" + resolved "https://registry.yarnpkg.com/gulp-util/-/gulp-util-3.0.7.tgz#78925c4b8f8b49005ac01a011c557e6218941cbb" dependencies: array-differ "^1.0.0" array-uniq "^1.0.2" beeper "^1.0.0" chalk "^1.0.0" - dateformat "^2.0.0" + dateformat "^1.0.11" fancy-log "^1.1.0" gulplog "^1.0.0" has-gulplog "^0.1.0" @@ -3075,9 +3115,22 @@ gulp-util@^3.0.7, gulp-util@^3.0.8, gulp-util@~3.0.7: through2 "^2.0.0" vinyl "^0.5.0" +gulp-util@^2.2.14, gulp-util@~2.2.14, gulp-util@~2.2.5: + version "2.2.20" + resolved "https://registry.yarnpkg.com/gulp-util/-/gulp-util-2.2.20.tgz#d7146e5728910bd8f047a6b0b1e549bc22dbd64c" + dependencies: + chalk "^0.5.0" + dateformat "^1.0.7-1.2.3" + lodash._reinterpolate "^2.4.1" + lodash.template "^2.4.1" + minimist "^0.2.0" + multipipe "^0.1.0" + through2 "^0.5.0" + vinyl "^0.2.1" + gulp@gulpjs/gulp#4.0: version "4.0.0-alpha.2" - resolved "https://codeload.github.com/gulpjs/gulp/tar.gz/3f5aba28718dc19e4bf600fb88f128158ff2ff83" + resolved "https://codeload.github.com/gulpjs/gulp/tar.gz/6d71a658c61edb3090221579d8f97dbe086ba2ed" dependencies: glob-watcher "^3.0.0" gulp-cli "^1.0.0" @@ -3097,8 +3150,8 @@ gzip-size@^3.0.0: duplexer "^0.1.1" handlebars@^4.0.1: - version "4.0.8" - resolved "https://registry.yarnpkg.com/handlebars/-/handlebars-4.0.8.tgz#22b875cd3f0e6cbea30314f144e82bc7a72ff420" + version "4.0.10" + resolved "https://registry.yarnpkg.com/handlebars/-/handlebars-4.0.10.tgz#3d30c718b09a3d96f23ea4cc1f403c4d3ba9ff4f" dependencies: async "^1.4.0" optimist "^0.6.1" @@ -3169,12 +3222,19 @@ has-values@^0.1.4: version "0.1.4" resolved "https://registry.yarnpkg.com/has-values/-/has-values-0.1.4.tgz#6d61de95d91dfca9b9a02089ad384bff8f62b771" -hash.js@^1.0.0, hash.js@^1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/hash.js/-/hash.js-1.0.3.tgz#1332ff00156c0a0ffdd8236013d07b77a0451573" +hash-base@^2.0.0: + version "2.0.2" + resolved "https://registry.yarnpkg.com/hash-base/-/hash-base-2.0.2.tgz#66ea1d856db4e8a5470cadf6fce23ae5244ef2e1" dependencies: inherits "^2.0.1" +hash.js@^1.0.0, hash.js@^1.0.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/hash.js/-/hash.js-1.1.3.tgz#340dedbe6290187151c1ea1d777a3448935df846" + dependencies: + inherits "^2.0.3" + minimalistic-assert "^1.0.0" + hawk@~3.1.3: version "3.1.3" resolved "https://registry.yarnpkg.com/hawk/-/hawk-3.1.3.tgz#078444bd7c1640b0fe540d2c9b73d59678e8e1c4" @@ -3203,15 +3263,15 @@ home-or-tmp@^2.0.0: os-homedir "^1.0.0" os-tmpdir "^1.0.1" -homedir-polyfill@^1.0.0: +homedir-polyfill@^1.0.0, homedir-polyfill@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/homedir-polyfill/-/homedir-polyfill-1.0.1.tgz#4c2bbc8a758998feebf5ed68580f76d46768b4bc" dependencies: parse-passwd "^1.0.0" hosted-git-info@^2.1.4: - version "2.4.2" - resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.4.2.tgz#0076b9f46a270506ddbaaea56496897460612a67" + version "2.5.0" + resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.5.0.tgz#6d60e34b3abbc8313062c3b798ef8d901a07af3c" html-encoding-sniffer@^1.0.1: version "1.0.1" @@ -3255,8 +3315,8 @@ https-browserify@0.0.1, https-browserify@~0.0.0: resolved "https://registry.yarnpkg.com/https-browserify/-/https-browserify-0.0.1.tgz#3f91365cabe60b77ed0ebba24b454e3e09d95a82" husky@^0.13.3: - version "0.13.3" - resolved "https://registry.yarnpkg.com/husky/-/husky-0.13.3.tgz#bc2066080badc8b8fe3516e881f5bc68a57052ff" + version "0.13.4" + resolved "https://registry.yarnpkg.com/husky/-/husky-0.13.4.tgz#48785c5028de3452a51c48c12c4f94b2124a1407" dependencies: chalk "^1.1.3" find-parent-dir "^0.3.0" @@ -3281,6 +3341,10 @@ indent-string@^2.1.0: dependencies: repeating "^2.0.0" +indent-string@^3.0.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/indent-string/-/indent-string-3.2.0.tgz#4a5fd6d27cc332f37e5419a504dbb837105c9289" + indexof@0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/indexof/-/indexof-0.0.1.tgz#82dc336d232b9062179d05ab3293a66059fd435d" @@ -3292,7 +3356,7 @@ inflight@^1.0.4: once "^1.3.0" wrappy "1" -inherits@2, inherits@2.0.3, inherits@^2.0.1, inherits@~2.0.0, inherits@~2.0.1: +inherits@2, inherits@2.0.3, inherits@^2.0.1, inherits@^2.0.3, inherits@~2.0.0, inherits@~2.0.1, inherits@~2.0.3: version "2.0.3" resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de" @@ -3404,26 +3468,24 @@ is-data-descriptor@^0.1.4: kind-of "^3.0.2" is-descriptor@^0.1.0: - version "0.1.5" - resolved "https://registry.yarnpkg.com/is-descriptor/-/is-descriptor-0.1.5.tgz#e3fb8b4ab65f3a37373388e18b401d78c58cbea7" + version "0.1.6" + resolved "https://registry.yarnpkg.com/is-descriptor/-/is-descriptor-0.1.6.tgz#366d8240dde487ca51823b1ab9f07a10a78251ca" dependencies: is-accessor-descriptor "^0.1.6" is-data-descriptor "^0.1.4" - kind-of "^3.0.2" - lazy-cache "^2.0.2" + kind-of "^5.0.0" is-descriptor@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/is-descriptor/-/is-descriptor-1.0.0.tgz#d6ec686f238f6b02f23757abe12cf6b2ea2790f9" + version "1.0.1" + resolved "https://registry.yarnpkg.com/is-descriptor/-/is-descriptor-1.0.1.tgz#2c6023599bde2de9d5d2c8b9a9d94082036b6ef2" dependencies: is-accessor-descriptor "^0.1.6" is-data-descriptor "^0.1.4" - kind-of "^3.0.2" - lazy-cache "^2.0.2" + kind-of "^5.0.0" is-dotfile@^1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/is-dotfile/-/is-dotfile-1.0.2.tgz#2c132383f39199f8edc268ca01b9b007d205cc4d" + version "1.0.3" + resolved "https://registry.yarnpkg.com/is-dotfile/-/is-dotfile-1.0.3.tgz#a6a2f32ffd2dfb04f5ca25ecd0f6b83cf798a1e1" is-equal-shallow@^0.1.3: version "0.1.3" @@ -3471,7 +3533,7 @@ is-number@^0.1.1: version "0.1.1" resolved "https://registry.yarnpkg.com/is-number/-/is-number-0.1.1.tgz#69a7af116963d47206ec9bd9b48a14216f1e3806" -is-number@^2.0.2, is-number@^2.1.0: +is-number@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/is-number/-/is-number-2.1.0.tgz#01fcbbb393463a548f2f466cce16dece49db908f" dependencies: @@ -3489,11 +3551,11 @@ is-odd@^1.0.0: dependencies: is-number "^3.0.0" -is-plain-object@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/is-plain-object/-/is-plain-object-2.0.1.tgz#4d7ca539bc9db9b737b8acb612f2318ef92f294f" +is-plain-object@^2.0.1, is-plain-object@^2.0.3: + version "2.0.4" + resolved "https://registry.yarnpkg.com/is-plain-object/-/is-plain-object-2.0.4.tgz#2c163b3fafb1b606d9d17928f05c2a1c38e07677" dependencies: - isobject "^1.0.0" + isobject "^3.0.1" is-posix-bracket@^0.1.0: version "0.1.1" @@ -3503,7 +3565,7 @@ is-primitive@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/is-primitive/-/is-primitive-2.0.0.tgz#207bab91638499c07b2adf240a41a87210034575" -is-promise@^2.1.0: +is-promise@^2.1, is-promise@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/is-promise/-/is-promise-2.1.0.tgz#79a2a9ece7f096e80f36d2b2f3bc16c1ff4bf3fa" @@ -3573,41 +3635,22 @@ isobject@^2.0.0, isobject@^2.1.0: dependencies: isarray "1.0.0" -isobject@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/isobject/-/isobject-3.0.0.tgz#39565217f3661789e8a0a0c080d5f7e6bc46e1a0" +isobject@^3.0.0, isobject@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/isobject/-/isobject-3.0.1.tgz#4e431e92b11a9731636aa1f9c8d1ccbcfdab78df" isstream@0.1.x, isstream@~0.1.2: version "0.1.2" resolved "https://registry.yarnpkg.com/isstream/-/isstream-0.1.2.tgz#47e63f7af55afa6f92e1500e690eb8b8529c099a" -istanbul-threshold-checker@^0.1.0: - version "0.1.0" - resolved "https://registry.yarnpkg.com/istanbul-threshold-checker/-/istanbul-threshold-checker-0.1.0.tgz#0e1442c017cb27a85f781734fefd2126405ca39c" - dependencies: - istanbul "0.3.*" - lodash "3.6.*" - -istanbul@0.3.*: - version "0.3.22" - resolved "https://registry.yarnpkg.com/istanbul/-/istanbul-0.3.22.tgz#3e164d85021fe19c985d1f0e7ef0c3e22d012eb6" +istanbul-threshold-checker@^0.2.1: + version "0.2.1" + resolved "https://registry.yarnpkg.com/istanbul-threshold-checker/-/istanbul-threshold-checker-0.2.1.tgz#c5dc94e8f2cc5cd3ffd335452f84b553c4248331" dependencies: - abbrev "1.0.x" - async "1.x" - escodegen "1.7.x" - esprima "2.5.x" - fileset "0.2.x" - handlebars "^4.0.1" - js-yaml "3.x" - mkdirp "0.5.x" - nopt "3.x" - once "1.x" - resolve "1.1.x" - supports-color "^3.1.0" - which "^1.1.1" - wordwrap "^1.0.0" + istanbul "~0.4.5" + lodash "~4.17.2" -istanbul@0.4.5, istanbul@^0.4.0: +istanbul@0.4.5, istanbul@^0.4.0, istanbul@~0.4.5: version "0.4.5" resolved "https://registry.yarnpkg.com/istanbul/-/istanbul-0.4.5.tgz#65c7d73d4c4da84d4f3ac310b918fb0b8033733b" dependencies: @@ -3633,12 +3676,6 @@ istextorbinary@1.0.2: binaryextensions "~1.0.0" textextensions "~1.0.0" -jodid25519@^1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/jodid25519/-/jodid25519-1.0.2.tgz#06d4912255093419477d425633606e0e90782967" - dependencies: - jsbn "~0.1.0" - joi@^6.10.1: version "6.10.1" resolved "https://registry.yarnpkg.com/joi/-/joi-6.10.1.tgz#4d50c318079122000fe5f16af1ff8e1917b77e06" @@ -3649,15 +3686,15 @@ joi@^6.10.1: topo "1.x.x" js-tokens@^3.0.0: - version "3.0.1" - resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-3.0.1.tgz#08e9f132484a2c45a30907e9dc4d5567b7f114d7" + version "3.0.2" + resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-3.0.2.tgz#9866df395102130e38f7f996bceb65443209c25b" -js-yaml@3.x: - version "3.8.3" - resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.8.3.tgz#33a05ec481c850c8875929166fe1beb61c728766" +js-yaml@3.x, js-yaml@^3.4.3: + version "3.9.0" + resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.9.0.tgz#4ffbbf25c2ac963b8299dc74da7e3740de1c18ce" dependencies: argparse "^1.0.7" - esprima "^3.1.1" + esprima "^4.0.0" jsbn@~0.1.0: version "0.1.1" @@ -3696,8 +3733,8 @@ jsesc@~0.5.0: resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-0.5.0.tgz#e7dee66e35d6fc16f710fe91d5cf69f70f08911d" json-loader@^0.5.4: - version "0.5.4" - resolved "https://registry.yarnpkg.com/json-loader/-/json-loader-0.5.4.tgz#8baa1365a632f58a3c46d20175fc6002c96e37de" + version "0.5.7" + resolved "https://registry.yarnpkg.com/json-loader/-/json-loader-0.5.7.tgz#dca14a70235ff82f0ac9a3abeb60d337a365185d" json-schema-traverse@^0.3.0: version "0.3.1" @@ -3734,13 +3771,13 @@ jsonparse@0.0.5: resolved "https://registry.yarnpkg.com/jsonparse/-/jsonparse-0.0.5.tgz#330542ad3f0a654665b778f3eb2d9a9fa507ac64" jsonwebtoken@^7.3.0: - version "7.4.0" - resolved "https://registry.yarnpkg.com/jsonwebtoken/-/jsonwebtoken-7.4.0.tgz#515bf2bba070ec615bad97fd2e945027eb476946" + version "7.4.1" + resolved "https://registry.yarnpkg.com/jsonwebtoken/-/jsonwebtoken-7.4.1.tgz#7ca324f5215f8be039cd35a6c45bb8cb74a448fb" dependencies: joi "^6.10.1" jws "^3.1.4" lodash.once "^4.0.0" - ms "^0.7.1" + ms "^2.0.0" xtend "^4.0.1" jsprim@^1.2.2: @@ -3770,8 +3807,8 @@ jws@^3.1.4: safe-buffer "^5.0.1" karma-chrome-launcher@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/karma-chrome-launcher/-/karma-chrome-launcher-2.1.0.tgz#c9769b655d80485236d88e4fbd77a768ff834e77" + version "2.2.0" + resolved "https://registry.yarnpkg.com/karma-chrome-launcher/-/karma-chrome-launcher-2.2.0.tgz#cf1b9d07136cc18fe239327d24654c3dbc368acf" dependencies: fs-access "^1.0.0" which "^1.2.1" @@ -3884,13 +3921,7 @@ karma@^1.7.0: tmp "0.0.31" useragent "^2.1.12" -kind-of@^3.0.2, kind-of@^3.1.0: - version "3.2.0" - resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-3.2.0.tgz#b58abe4d5c044ad33726a8c1525b48cf891bff07" - dependencies: - is-buffer "^1.1.5" - -kind-of@^3.0.3, kind-of@^3.2.0: +kind-of@^3.0.2, kind-of@^3.0.3, kind-of@^3.1.0, kind-of@^3.2.0: version "3.2.2" resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-3.2.2.tgz#31ea21a734bab9bbb0f32466d893aea51e4a3c64" dependencies: @@ -3902,6 +3933,10 @@ kind-of@^4.0.0: dependencies: is-buffer "^1.1.5" +kind-of@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-5.0.0.tgz#9038420f740b2e836ce48b34617bcb855947f2a9" + last-run@^1.1.0: version "1.1.1" resolved "https://registry.yarnpkg.com/last-run/-/last-run-1.1.1.tgz#45b96942c17b1c79c772198259ba943bebf8ca5b" @@ -3931,13 +3966,6 @@ lcid@^1.0.0: dependencies: invert-kv "^1.0.0" -levn@~0.2.5: - version "0.2.5" - resolved "https://registry.yarnpkg.com/levn/-/levn-0.2.5.tgz#ba8d339d0ca4a610e3a3f145b9caf48807155054" - dependencies: - prelude-ls "~1.1.0" - type-check "~0.3.1" - levn@~0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/levn/-/levn-0.3.0.tgz#3b09924edf9f083c0490fdd4c0bc4421e04764ee" @@ -3965,6 +3993,67 @@ liftoff@^2.3.0: rechoir "^0.6.2" resolve "^1.1.7" +lint-staged@^4.0.0: + version "4.0.2" + resolved "https://registry.yarnpkg.com/lint-staged/-/lint-staged-4.0.2.tgz#8e83e11e9e1656c09b6117f6db0d55fd4960a1c0" + dependencies: + app-root-path "^2.0.0" + cosmiconfig "^1.1.0" + execa "^0.7.0" + listr "^0.12.0" + lodash.chunk "^4.2.0" + minimatch "^3.0.0" + npm-which "^3.0.1" + p-map "^1.1.1" + staged-git-files "0.0.4" + +listr-silent-renderer@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/listr-silent-renderer/-/listr-silent-renderer-1.1.1.tgz#924b5a3757153770bf1a8e3fbf74b8bbf3f9242e" + +listr-update-renderer@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/listr-update-renderer/-/listr-update-renderer-0.2.0.tgz#ca80e1779b4e70266807e8eed1ad6abe398550f9" + dependencies: + chalk "^1.1.3" + cli-truncate "^0.2.1" + elegant-spinner "^1.0.1" + figures "^1.7.0" + indent-string "^3.0.0" + log-symbols "^1.0.2" + log-update "^1.0.2" + strip-ansi "^3.0.1" + +listr-verbose-renderer@^0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/listr-verbose-renderer/-/listr-verbose-renderer-0.4.0.tgz#44dc01bb0c34a03c572154d4d08cde9b1dc5620f" + dependencies: + chalk "^1.1.3" + cli-cursor "^1.0.2" + date-fns "^1.27.2" + figures "^1.7.0" + +listr@^0.12.0: + version "0.12.0" + resolved "https://registry.yarnpkg.com/listr/-/listr-0.12.0.tgz#6bce2c0f5603fa49580ea17cd6a00cc0e5fa451a" + dependencies: + chalk "^1.1.3" + cli-truncate "^0.2.1" + figures "^1.7.0" + indent-string "^2.1.0" + is-promise "^2.1.0" + is-stream "^1.1.0" + listr-silent-renderer "^1.1.1" + listr-update-renderer "^0.2.0" + listr-verbose-renderer "^0.4.0" + log-symbols "^1.0.2" + log-update "^1.0.2" + ora "^0.2.3" + p-map "^1.1.1" + rxjs "^5.0.0-beta.11" + stream-to-observable "^0.1.0" + strip-ansi "^3.0.1" + load-json-file@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/load-json-file/-/load-json-file-1.1.0.tgz#956905708d58b4bab4c2261b04f59f31c99374c0" @@ -4082,9 +4171,9 @@ lodash._shimkeys@~2.4.1: dependencies: lodash._objecttypes "~2.4.1" -lodash.assignwith@^4.0.7: +lodash.chunk@^4.2.0: version "4.2.0" - resolved "https://registry.yarnpkg.com/lodash.assignwith/-/lodash.assignwith-4.2.0.tgz#127a97f02adc41751a954d24b0de17e100e038eb" + resolved "https://registry.yarnpkg.com/lodash.chunk/-/lodash.chunk-4.2.0.tgz#66e5ce1f76ed27b4303d8c6512e8d1216e8106bc" lodash.clone@^4.3.2: version "4.5.0" @@ -4131,10 +4220,6 @@ lodash.isarray@^3.0.0: version "3.0.4" resolved "https://registry.yarnpkg.com/lodash.isarray/-/lodash.isarray-3.0.4.tgz#79e4eb88c36a8122af86f844aa9bcd851b5fbb55" -lodash.isempty@^4.2.1: - version "4.4.0" - resolved "https://registry.yarnpkg.com/lodash.isempty/-/lodash.isempty-4.4.0.tgz#6f86cbedd8be4ec987be9aaf33c9684db1b31e7e" - lodash.isequal@^4.0.0: version "4.5.0" resolved "https://registry.yarnpkg.com/lodash.isequal/-/lodash.isequal-4.5.0.tgz#415c4478f2bcc30120c22ce10ed3226f7d3e18e0" @@ -4181,10 +4266,6 @@ lodash.once@^4.0.0: version "4.1.1" resolved "https://registry.yarnpkg.com/lodash.once/-/lodash.once-4.1.1.tgz#0dd3971213c7c56df880977d504c88fb471a97ac" -lodash.pick@^4.2.1: - version "4.4.0" - resolved "https://registry.yarnpkg.com/lodash.pick/-/lodash.pick-4.4.0.tgz#52f05610fff9ded422611441ed1fc123a03001b3" - lodash.restparam@^3.0.0: version "3.6.1" resolved "https://registry.yarnpkg.com/lodash.restparam/-/lodash.restparam-3.6.1.tgz#936a4e309ef330a7645ed4145986c85ae5b20805" @@ -4243,18 +4324,27 @@ lodash.values@~2.4.1: dependencies: lodash.keys "~2.4.1" -lodash@3.6.*: - version "3.6.0" - resolved "https://registry.yarnpkg.com/lodash/-/lodash-3.6.0.tgz#5266a8f49dd989be4f9f681b6f2a0c55285d0d9a" - lodash@^3.8.0: version "3.10.1" resolved "https://registry.yarnpkg.com/lodash/-/lodash-3.10.1.tgz#5bf45e8e49ba4189e17d482789dfd15bd140b7b6" -lodash@^4.0.0, lodash@^4.14.0, lodash@^4.17.4, lodash@^4.2.0, lodash@^4.3.0, lodash@^4.5.0: +lodash@^4.0.0, lodash@^4.14.0, lodash@^4.17.4, lodash@^4.2.0, lodash@^4.3.0, lodash@^4.5.0, lodash@~4.17.2: version "4.17.4" resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.4.tgz#78203a4d1c328ae1d86dca6460e369b57f4055ae" +log-symbols@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/log-symbols/-/log-symbols-1.0.2.tgz#376ff7b58ea3086a0f09facc74617eca501e1a18" + dependencies: + chalk "^1.0.0" + +log-update@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/log-update/-/log-update-1.0.2.tgz#19929f64c4093d2d2e7075a1dad8af59c296b8d1" + dependencies: + ansi-escapes "^1.0.0" + cli-cursor "^1.0.2" + log4js@^0.6.31: version "0.6.38" resolved "https://registry.yarnpkg.com/log4js/-/log4js-0.6.38.tgz#2c494116695d6fb25480943d3fc872e662a522fd" @@ -4291,26 +4381,36 @@ loud-rejection@^1.0.0: currently-unhandled "^0.4.1" signal-exit "^3.0.0" -lru-cache@2, lru-cache@2.2.x: +lru-cache@2: + version "2.7.3" + resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-2.7.3.tgz#6d4524e8b955f95d4f5b58851ce21dd72fb4e952" + +lru-cache@2.2.x: version "2.2.4" resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-2.2.4.tgz#6c658619becf14031d0d0b594b16042ce4dc063d" lru-cache@^4.0.1: - version "4.0.2" - resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-4.0.2.tgz#1d17679c069cda5d040991a09dbc2c0db377e55e" + version "4.1.1" + resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-4.1.1.tgz#622e32e82488b49279114a4f9ecf45e7cd6bba55" dependencies: - pseudomap "^1.0.1" - yallist "^2.0.0" + pseudomap "^1.0.2" + yallist "^2.1.2" + +lru-queue@0.1: + version "0.1.0" + resolved "https://registry.yarnpkg.com/lru-queue/-/lru-queue-0.1.0.tgz#2738bd9f0d3cf4f84490c5736c48699ac632cda3" + dependencies: + es5-ext "~0.10.2" magic-string@^0.19.0: - version "0.19.0" - resolved "https://registry.yarnpkg.com/magic-string/-/magic-string-0.19.0.tgz#198948217254e3e0b93080e01146b7c73b2a06b2" + version "0.19.1" + resolved "https://registry.yarnpkg.com/magic-string/-/magic-string-0.19.1.tgz#14d768013caf2ec8fdea16a49af82fc377e75201" dependencies: vlq "^0.2.1" make-error@^1.1.1: - version "1.2.3" - resolved "https://registry.yarnpkg.com/make-error/-/make-error-1.2.3.tgz#6c4402df732e0977ac6faf754a5074b3d2b1d19d" + version "1.3.0" + resolved "https://registry.yarnpkg.com/make-error/-/make-error-1.3.0.tgz#52ad3a339ccf10ce62b4040b708fe707244b8b96" make-iterator@^0.1.1: version "0.1.1" @@ -4352,6 +4452,19 @@ media-typer@0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/media-typer/-/media-typer-0.3.0.tgz#8710d7af0aa626f8fffa1ce00168545263255748" +memoizee@^0.4.5: + version "0.4.5" + resolved "https://registry.yarnpkg.com/memoizee/-/memoizee-0.4.5.tgz#1bc3ea1e4be056dd475d521979d7be3d5e5b21c8" + dependencies: + d "1" + es5-ext "^0.10.13" + es6-weak-map "^2.0.1" + event-emitter "^0.3.4" + is-promise "^2.1" + lru-queue "0.1" + next-tick "1" + timers-ext "0.1" + memory-fs@^0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/memory-fs/-/memory-fs-0.2.0.tgz#f2bb25368bc121e391c2520de92969caee0a0290" @@ -4392,8 +4505,8 @@ merge-stream@^1.0.0: readable-stream "^2.0.1" merge2@^1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/merge2/-/merge2-1.0.3.tgz#fa44f8b2262615ab72f0808a401d478a70e394db" + version "1.1.0" + resolved "https://registry.yarnpkg.com/merge2/-/merge2-1.1.0.tgz#99fb32b35e9fad840146004e13a56b7549a524db" micromatch@^2.1.5, micromatch@^2.3.7: version "2.3.11" @@ -4449,8 +4562,8 @@ mime-types@^2.1.12, mime-types@~2.1.11, mime-types@~2.1.15, mime-types@~2.1.7: mime-db "~1.27.0" mime@^1.3.4: - version "1.3.4" - resolved "https://registry.yarnpkg.com/mime/-/mime-1.3.4.tgz#115f9e3b6b3daf2959983cb38f149a2d40eb5d53" + version "1.3.6" + resolved "https://registry.yarnpkg.com/mime/-/mime-1.3.6.tgz#591d84d3653a6b0b4a3b9df8de5aa8108e72e5e0" minimalistic-assert@^1.0.0: version "1.0.0" @@ -4467,19 +4580,13 @@ minimatch@0.3: lru-cache "2" sigmund "~1.0.0" -"minimatch@2 || 3", minimatch@^3.0.0, minimatch@^3.0.2, minimatch@^3.0.3: - version "3.0.3" - resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.3.tgz#2a4e4090b96b2db06a9d7df01055a62a77c9b774" - dependencies: - brace-expansion "^1.0.0" - -minimatch@2.x: - version "2.0.10" - resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-2.0.10.tgz#8d087c39c6b38c001b97fca7ce6d0e1e80afbac7" +"minimatch@2 || 3", minimatch@^3.0.0, minimatch@^3.0.2, minimatch@^3.0.3, minimatch@^3.0.4: + version "3.0.4" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083" dependencies: - brace-expansion "^1.0.0" + brace-expansion "^1.1.7" -minimist@0.0.8, minimist@~0.0.1: +minimist@0.0.8: version "0.0.8" resolved "https://registry.yarnpkg.com/minimist/-/minimist-0.0.8.tgz#857fcabfc3397d2625b8228262e86aa7a011b05d" @@ -4491,7 +4598,7 @@ minimist@^0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/minimist/-/minimist-0.2.0.tgz#4dffe525dae2b864c66c2e23c6271d7afdecefce" -minimist@~0.0.7, minimist@~0.0.9: +minimist@~0.0.1, minimist@~0.0.7, minimist@~0.0.9: version "0.0.10" resolved "https://registry.yarnpkg.com/minimist/-/minimist-0.0.10.tgz#de3f98543dbf96082be48ad1a0c7cda836301dcf" @@ -4509,8 +4616,8 @@ mkdirp@0.5.1, mkdirp@0.5.x, "mkdirp@>=0.5 0", mkdirp@^0.5.0, mkdirp@^0.5.1, mkdi minimist "0.0.8" mocha@^3.0.0: - version "3.3.0" - resolved "https://registry.yarnpkg.com/mocha/-/mocha-3.3.0.tgz#d29b7428d3f52c82e2e65df1ecb7064e1aabbfb5" + version "3.4.2" + resolved "https://registry.yarnpkg.com/mocha/-/mocha-3.4.2.tgz#d0ef4d332126dbf18d0d640c9b382dd48be97594" dependencies: browser-stdout "1.3.0" commander "2.9.0" @@ -4553,11 +4660,7 @@ ms@0.7.2: version "0.7.2" resolved "https://registry.yarnpkg.com/ms/-/ms-0.7.2.tgz#ae25cf2512b3885a1d95d7f037868d8431124765" -ms@0.7.3, ms@^0.7.1: - version "0.7.3" - resolved "https://registry.yarnpkg.com/ms/-/ms-0.7.3.tgz#708155a5e44e33f5fd0fc53e81d0d40a91be1fff" - -ms@2.0.0: +ms@2.0.0, ms@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8" @@ -4604,7 +4707,7 @@ negotiator@0.6.1: version "0.6.1" resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.1.tgz#2b327184e8992101177b28563fb5e7102acd0ca9" -next-tick@^1.0.0: +next-tick@1, next-tick@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/next-tick/-/next-tick-1.0.0.tgz#ca86d1fe8828169b0120208e3dc8424b9db8342c" @@ -4664,9 +4767,9 @@ node-libs-browser@^2.0.0: util "^0.10.3" vm-browserify "0.0.4" -node-pre-gyp@^0.6.29: - version "0.6.34" - resolved "https://registry.yarnpkg.com/node-pre-gyp/-/node-pre-gyp-0.6.34.tgz#94ad1c798a11d7fc67381b50d47f8cc18d9799f7" +node-pre-gyp@^0.6.36: + version "0.6.36" + resolved "https://registry.yarnpkg.com/node-pre-gyp/-/node-pre-gyp-0.6.36.tgz#db604112cb74e0d477554e9b505b17abddfab786" dependencies: mkdirp "^0.5.1" nopt "^4.0.1" @@ -4696,8 +4799,8 @@ nopt@^4.0.1: osenv "^0.1.4" normalize-package-data@^2.3.2, normalize-package-data@^2.3.4: - version "2.3.8" - resolved "https://registry.yarnpkg.com/normalize-package-data/-/normalize-package-data-2.3.8.tgz#d819eda2a9dedbd1ffa563ea4071d936782295bb" + version "2.4.0" + resolved "https://registry.yarnpkg.com/normalize-package-data/-/normalize-package-data-2.4.0.tgz#12f95a307d58352075a04907b84ac8be98ac012f" dependencies: hosted-git-info "^2.1.4" is-builtin-module "^1.0.0" @@ -4720,19 +4823,33 @@ now-and-later@^1.0.0: dependencies: once "^1.3.2" +npm-path@^2.0.2: + version "2.0.3" + resolved "https://registry.yarnpkg.com/npm-path/-/npm-path-2.0.3.tgz#15cff4e1c89a38da77f56f6055b24f975dfb2bbe" + dependencies: + which "^1.2.10" + npm-run-path@^2.0.0, npm-run-path@^2.0.2: version "2.0.2" resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-2.0.2.tgz#35a9232dfa35d7067b4cb2ddf2357b1871536c5f" dependencies: path-key "^2.0.0" +npm-which@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/npm-which/-/npm-which-3.0.1.tgz#9225f26ec3a285c209cae67c3b11a6b4ab7140aa" + dependencies: + commander "^2.9.0" + npm-path "^2.0.2" + which "^1.2.10" + npmlog@^4.0.2: - version "4.0.2" - resolved "https://registry.yarnpkg.com/npmlog/-/npmlog-4.0.2.tgz#d03950e0e78ce1527ba26d2a7592e9348ac3e75f" + version "4.1.2" + resolved "https://registry.yarnpkg.com/npmlog/-/npmlog-4.1.2.tgz#08a7f2a8bf734604779a9efa4ad5cc717abb954b" dependencies: are-we-there-yet "~1.1.2" console-control-strings "~1.1.0" - gauge "~2.7.1" + gauge "~2.7.3" set-blocking "~2.0.0" null-check@^1.0.0: @@ -4744,8 +4861,8 @@ number-is-nan@^1.0.0: resolved "https://registry.yarnpkg.com/number-is-nan/-/number-is-nan-1.0.1.tgz#097b602b53422a522c1afb8790318336941a011d" "nwmatcher@>= 1.3.9 < 2.0.0": - version "1.3.9" - resolved "https://registry.yarnpkg.com/nwmatcher/-/nwmatcher-1.3.9.tgz#8bab486ff7fa3dfd086656bbe8b17116d3692d2a" + version "1.4.1" + resolved "https://registry.yarnpkg.com/nwmatcher/-/nwmatcher-1.4.1.tgz#7ae9b07b0ea804db7e25f05cb5fe4097d4e4949f" oauth-sign@~0.8.1: version "0.8.2" @@ -4794,7 +4911,7 @@ object.defaults@^0.3.0: for-own "^0.1.3" isobject "^1.0.0" -object.defaults@^1.0.0: +object.defaults@^1.0.0, object.defaults@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/object.defaults/-/object.defaults-1.1.0.tgz#3a7f868334b407dea06da16d88d5cd29e435fecf" dependencies: @@ -4857,17 +4974,6 @@ optimist@~0.3.5: dependencies: wordwrap "~0.0.2" -optionator@^0.5.0: - version "0.5.0" - resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.5.0.tgz#b75a8995a2d417df25b6e4e3862f50aa88651368" - dependencies: - deep-is "~0.1.2" - fast-levenshtein "~1.0.0" - levn "~0.2.5" - prelude-ls "~1.1.1" - type-check "~0.3.1" - wordwrap "~0.0.2" - optionator@^0.8.1: version "0.8.2" resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.8.2.tgz#364c5e409d3f4d6301d6c0b4c05bba50180aeb64" @@ -4883,6 +4989,15 @@ options@>=0.0.5: version "0.0.6" resolved "https://registry.yarnpkg.com/options/-/options-0.0.6.tgz#ec22d312806bb53e731773e7cdaefcf1c643128f" +ora@^0.2.3: + version "0.2.3" + resolved "https://registry.yarnpkg.com/ora/-/ora-0.2.3.tgz#37527d220adcd53c39b73571d754156d5db657a4" + dependencies: + chalk "^1.1.1" + cli-cursor "^1.0.2" + cli-spinners "^0.1.2" + object-assign "^4.0.1" + ordered-read-streams@^0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/ordered-read-streams/-/ordered-read-streams-0.3.0.tgz#7137e69b3298bb342247a1bbee3881c80e2fd78b" @@ -4935,6 +5050,10 @@ p-finally@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/p-finally/-/p-finally-1.0.0.tgz#3fbcfb15b899a44123b34b6dcc18b724336a2cae" +p-map@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/p-map/-/p-map-1.1.1.tgz#05f5e4ae97a068371bc2a5cc86bfbdbc19c4ae7a" + pad@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/pad/-/pad-1.1.0.tgz#7a7d185200ebac32f9f12ee756c3a1d087b3190b" @@ -5042,6 +5161,10 @@ path-key@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/path-key/-/path-key-2.0.1.tgz#411cadb574c5a140d3a4b1910d40d80cc9f40b40" +path-parse@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.5.tgz#3c1adf871ea9cd6c9431b6ea2bd74a0ff055c4c1" + path-platform@^0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/path-platform/-/path-platform-0.0.1.tgz#b5585d7c3c463d89aa0060d86611cf1afd617e2a" @@ -5075,10 +5198,14 @@ pbkdf2-compat@2.0.1: resolved "https://registry.yarnpkg.com/pbkdf2-compat/-/pbkdf2-compat-2.0.1.tgz#b6e0c8fa99494d94e0511575802a59a5c142f288" pbkdf2@^3.0.3: - version "3.0.9" - resolved "https://registry.yarnpkg.com/pbkdf2/-/pbkdf2-3.0.9.tgz#f2c4b25a600058b3c3773c086c37dbbee1ffe693" + version "3.0.12" + resolved "https://registry.yarnpkg.com/pbkdf2/-/pbkdf2-3.0.12.tgz#be36785c5067ea48d806ff923288c5f750b6b8a2" dependencies: - create-hmac "^1.1.2" + create-hash "^1.1.2" + create-hmac "^1.1.4" + ripemd160 "^2.0.1" + safe-buffer "^5.0.1" + sha.js "^2.4.8" performance-now@^0.2.0: version "0.2.0" @@ -5106,7 +5233,7 @@ posix-character-classes@^0.1.0: version "0.1.1" resolved "https://registry.yarnpkg.com/posix-character-classes/-/posix-character-classes-0.1.1.tgz#01eac0fe3b5af71a2a6c02feabb8c1fef7e00eab" -prelude-ls@~1.1.0, prelude-ls@~1.1.1, prelude-ls@~1.1.2: +prelude-ls@~1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.1.2.tgz#21932a549f5e52ffd9a827f570e04be62a97da54" @@ -5114,6 +5241,10 @@ preserve@^0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/preserve/-/preserve-0.2.0.tgz#815ed1f6ebc65926f865b310c0713bcb3315ce4b" +prettier@^1.5.1: + version "1.5.3" + resolved "https://registry.yarnpkg.com/prettier/-/prettier-1.5.3.tgz#59dadc683345ec6b88f88b94ed4ae7e1da394bfe" + pretty-hrtime@^1.0.0: version "1.0.3" resolved "https://registry.yarnpkg.com/pretty-hrtime/-/pretty-hrtime-1.0.3.tgz#b7e3ea42435a4c9b2759d99e0f201eb195802ee1" @@ -5150,7 +5281,7 @@ prr@~0.0.0: version "0.0.0" resolved "https://registry.yarnpkg.com/prr/-/prr-0.0.0.tgz#1a84b85908325501411853d0081ee3fa86e2926a" -pseudomap@^1.0.1: +pseudomap@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/pseudomap/-/pseudomap-1.0.2.tgz#f052a28da70e618917ef0a8ac34c1ae5a68286b3" @@ -5197,15 +5328,17 @@ querystring@0.2.0: resolved "https://registry.yarnpkg.com/querystring/-/querystring-0.2.0.tgz#b209849203bb25df820da756e747005878521620" randomatic@^1.1.3: - version "1.1.6" - resolved "https://registry.yarnpkg.com/randomatic/-/randomatic-1.1.6.tgz#110dcabff397e9dcff7c0789ccc0a49adf1ec5bb" + version "1.1.7" + resolved "https://registry.yarnpkg.com/randomatic/-/randomatic-1.1.7.tgz#c7abe9cc8b87c0baa876b19fde83fd464797e38c" dependencies: - is-number "^2.0.2" - kind-of "^3.0.2" + is-number "^3.0.0" + kind-of "^4.0.0" randombytes@^2.0.0, randombytes@^2.0.1: - version "2.0.3" - resolved "https://registry.yarnpkg.com/randombytes/-/randombytes-2.0.3.tgz#674c99760901c3c4112771a31e521dc349cc09ec" + version "2.0.5" + resolved "https://registry.yarnpkg.com/randombytes/-/randombytes-2.0.5.tgz#dc009a246b8d09a177b4b7a0ae77bc570f4b1b79" + dependencies: + safe-buffer "^5.1.0" range-parser@^1.2.0: version "1.2.0" @@ -5261,16 +5394,16 @@ readable-stream@^1.0.27-1, readable-stream@^1.1.7, readable-stream@~1.1.10, read isarray "0.0.1" string_decoder "~0.10.x" -readable-stream@^2.0.0, readable-stream@^2.0.1, readable-stream@^2.0.2, readable-stream@^2.0.4, readable-stream@^2.0.5, readable-stream@^2.0.6, readable-stream@^2.1.4, readable-stream@^2.1.5, readable-stream@^2.2.6: - version "2.2.9" - resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.2.9.tgz#cf78ec6f4a6d1eb43d26488cac97f042e74b7fc8" +readable-stream@^2.0.0, readable-stream@^2.0.1, readable-stream@^2.0.2, readable-stream@^2.0.4, readable-stream@^2.0.5, readable-stream@^2.0.6, readable-stream@^2.1.4, readable-stream@^2.1.5, readable-stream@^2.2.2, readable-stream@^2.2.6: + version "2.3.3" + resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.3.tgz#368f2512d79f9d46fdfc71349ae7878bbc1eb95c" dependencies: - buffer-shims "~1.0.0" core-util-is "~1.0.0" - inherits "~2.0.1" + inherits "~2.0.3" isarray "~1.0.0" process-nextick-args "~1.0.6" - string_decoder "~1.0.0" + safe-buffer "~5.1.1" + string_decoder "~1.0.3" util-deprecate "~1.0.1" readable-stream@~2.0.0: @@ -5368,8 +5501,8 @@ remap-istanbul@^0.8.4: through2 "2.0.1" remove-trailing-separator@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/remove-trailing-separator/-/remove-trailing-separator-1.0.1.tgz#615ebb96af559552d4bf4057c8436d486ab63cc4" + version "1.0.2" + resolved "https://registry.yarnpkg.com/remove-trailing-separator/-/remove-trailing-separator-1.0.2.tgz#69b062d978727ad14dc6b56ba4ab772fd8d70511" repeat-element@^1.1.2: version "1.1.2" @@ -5433,13 +5566,17 @@ request@^2.79.0, request@^2.81.0: uuid "^3.0.0" require-dir@^0.3.1: - version "0.3.1" - resolved "https://registry.yarnpkg.com/require-dir/-/require-dir-0.3.1.tgz#b5a8e28bae0343bb0d0cc38ab1f531e1931b264a" + version "0.3.2" + resolved "https://registry.yarnpkg.com/require-dir/-/require-dir-0.3.2.tgz#c1d5c75e9fbffde9f2e6b33e383db4f594b5a6a9" require-directory@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42" +require-from-string@^1.1.0: + version "1.2.1" + resolved "https://registry.yarnpkg.com/require-from-string/-/require-from-string-1.2.1.tgz#529c9ccef27380adfec9a2f965b649bbee636418" + require-main-filename@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-1.0.1.tgz#97f717b69d48784f5f526a6c5aa8ffdda055a4d1" @@ -5463,10 +5600,16 @@ resolve@0.6.3, resolve@~0.6.1, resolve@~0.6.3: version "0.6.3" resolved "https://registry.yarnpkg.com/resolve/-/resolve-0.6.3.tgz#dd957982e7e736debdf53b58a4dd91754575dd46" -resolve@1.1.7, resolve@1.1.x, resolve@^1.1.6, resolve@^1.1.7, resolve@~1.1.6: +resolve@1.1.7, resolve@1.1.x, resolve@~1.1.6: version "1.1.7" resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.1.7.tgz#203114d82ad2c5ed9e8e0411b3932875e889e97b" +resolve@^1.1.6, resolve@^1.1.7: + version "1.3.3" + resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.3.3.tgz#655907c3469a8680dc2de3a275a8fdd69691f0e5" + dependencies: + path-parse "^1.0.5" + resolve@~0.3.0: version "0.3.1" resolved "https://registry.yarnpkg.com/resolve/-/resolve-0.3.1.tgz#34c63447c664c70598d1c9b126fc43b2a24310a4" @@ -5505,9 +5648,12 @@ ripemd160@0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/ripemd160/-/ripemd160-0.2.0.tgz#2bf198bde167cacfa51c0a928e84b68bbe171fce" -ripemd160@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/ripemd160/-/ripemd160-1.0.1.tgz#93a4bbd4942bc574b69a8fa57c71de10ecca7d6e" +ripemd160@^2.0.0, ripemd160@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/ripemd160/-/ripemd160-2.0.1.tgz#0f4584295c53a3628af7e6d79aca21ce57d1c6e7" + dependencies: + hash-base "^2.0.0" + inherits "^2.0.1" ruglify@~1.0.0: version "1.0.0" @@ -5526,17 +5672,23 @@ rx@^4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/rx/-/rx-4.1.0.tgz#a5f13ff79ef3b740fe30aa803fb09f98805d4782" -safe-buffer@^5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.0.1.tgz#d263ca54696cd8a306b5ca6551e92de57918fbe7" +rxjs@^5.0.0-beta.11: + version "5.4.2" + resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-5.4.2.tgz#2a3236fcbf03df57bae06fd6972fd99e5c08fcf7" + dependencies: + symbol-observable "^1.0.1" + +safe-buffer@^5.0.1, safe-buffer@^5.1.0, safe-buffer@~5.1.0, safe-buffer@~5.1.1: + version "5.1.1" + resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.1.tgz#893312af69b2123def71f57889001671eeb2c853" samsam@1.x, samsam@^1.1.3: version "1.2.1" resolved "https://registry.yarnpkg.com/samsam/-/samsam-1.2.1.tgz#edd39093a3184370cb859243b2bdf255e7d8ea67" sax@^1.2.1: - version "1.2.2" - resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.2.tgz#fd8631a23bc7826bef5d871bdb87378c95647828" + version "1.2.4" + resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.4.tgz#2816234e2378bddc4e5354fab5caa895df7100d9" semver-greatest-satisfied-range@^1.0.0: version "1.0.0" @@ -5550,8 +5702,8 @@ semver-regex@1.0.0, semver-regex@^1.0.0: resolved "https://registry.yarnpkg.com/semver-regex/-/semver-regex-1.0.0.tgz#92a4969065f9c70c694753d55248fc68f8f652c9" "semver@2 || 3 || 4 || 5", semver@^5.3.0: - version "5.3.0" - resolved "https://registry.yarnpkg.com/semver/-/semver-5.3.0.tgz#9b2ce5d3de02d17c6012ad326aa6b4d0cf54f94f" + version "5.4.1" + resolved "https://registry.yarnpkg.com/semver/-/semver-5.4.1.tgz#e059c09d8571f0540823733433505d3a2f00b18e" semver@^4.2.0, semver@~4.3.3: version "4.3.6" @@ -5592,7 +5744,7 @@ sha.js@2.2.6: version "2.2.6" resolved "https://registry.yarnpkg.com/sha.js/-/sha.js-2.2.6.tgz#17ddeddc5f722fb66501658895461977867315ba" -sha.js@^2.3.6: +sha.js@^2.4.0, sha.js@^2.4.8: version "2.4.8" resolved "https://registry.yarnpkg.com/sha.js/-/sha.js-2.4.8.tgz#37068c2c476b6baf402d14a49c67f597921f634f" dependencies: @@ -5616,7 +5768,7 @@ shell-quote@~0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/shell-quote/-/shell-quote-0.0.1.tgz#1a41196f3c0333c482323593d6886ecf153dd986" -shelljs@0.7.7, shelljs@^0.7.3: +shelljs@0.7.7: version "0.7.7" resolved "https://registry.yarnpkg.com/shelljs/-/shelljs-0.7.7.tgz#b2f5c77ef97148f4b4f6e22682e10bba8667cff1" dependencies: @@ -5624,6 +5776,14 @@ shelljs@0.7.7, shelljs@^0.7.3: interpret "^1.0.0" rechoir "^0.6.2" +shelljs@^0.7.3: + version "0.7.8" + resolved "https://registry.yarnpkg.com/shelljs/-/shelljs-0.7.8.tgz#decbcf874b0d1e5fb72e14b164a9683048e9acb3" + dependencies: + glob "^7.0.0" + interpret "^1.0.0" + rechoir "^0.6.2" + shx@^0.2.2: version "0.2.2" resolved "https://registry.yarnpkg.com/shx/-/shx-0.2.2.tgz#0a304d020b0edf1306ad81570e80f0346df58a39" @@ -5641,8 +5801,8 @@ signal-exit@^3.0.0: resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.2.tgz#b5fdc08f1287ea1178628e415e25132b73646c6d" sinon@^2.1.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/sinon/-/sinon-2.2.0.tgz#3b1b42ff5defcbf51a52a62aca6d61171b9fd262" + version "2.3.8" + resolved "https://registry.yarnpkg.com/sinon/-/sinon-2.3.8.tgz#31de06fed8fba3a671e576dd96d0a5863796f25c" dependencies: diff "^3.1.0" formatio "1.2.0" @@ -5657,6 +5817,10 @@ slash@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/slash/-/slash-1.0.0.tgz#c41f2f6c39fc16d1cd17ad4b5d896114ae470d55" +slice-ansi@0.0.4: + version "0.0.4" + resolved "https://registry.yarnpkg.com/slice-ansi/-/slice-ansi-0.0.4.tgz#edbf8903f66f7ce2f8eafd6ceed65e264c831b35" + snapdragon-node@^2.0.1: version "2.1.1" resolved "https://registry.yarnpkg.com/snapdragon-node/-/snapdragon-node-2.1.1.tgz#6c175f86ff14bdb0724563e8f3c1b021a286853b" @@ -5844,8 +6008,8 @@ sprintf-js@~1.0.2: resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" sshpk@^1.7.0: - version "1.13.0" - resolved "https://registry.yarnpkg.com/sshpk/-/sshpk-1.13.0.tgz#ff2a3e4fd04497555fed97b39a0fd82fafb3a33c" + version "1.13.1" + resolved "https://registry.yarnpkg.com/sshpk/-/sshpk-1.13.1.tgz#512df6da6287144316dc4c18fe1cf1d940739be3" dependencies: asn1 "~0.2.3" assert-plus "^1.0.0" @@ -5854,14 +6018,21 @@ sshpk@^1.7.0: optionalDependencies: bcrypt-pbkdf "^1.0.0" ecc-jsbn "~0.1.1" - jodid25519 "^1.0.0" jsbn "~0.1.0" tweetnacl "~0.14.0" -stack-trace@0.0.9, stack-trace@0.0.x: +stack-trace@0.0.9: version "0.0.9" resolved "https://registry.yarnpkg.com/stack-trace/-/stack-trace-0.0.9.tgz#a8f6eaeca90674c333e7c43953f275b451510695" +stack-trace@0.0.x: + version "0.0.10" + resolved "https://registry.yarnpkg.com/stack-trace/-/stack-trace-0.0.10.tgz#547c70b347e8d32b4e108ea1a2a159e5fdde19c0" + +staged-git-files@0.0.4: + version "0.0.4" + resolved "https://registry.yarnpkg.com/staged-git-files/-/staged-git-files-0.0.4.tgz#d797e1b551ca7a639dec0237dc6eb4bb9be17d35" + static-extend@^0.1.1: version "0.1.2" resolved "https://registry.yarnpkg.com/static-extend/-/static-extend-0.1.2.tgz#60809c39cbff55337226fd5e0b520f341f1fb5c6" @@ -5905,8 +6076,8 @@ stream-exhaust@^1.0.1: resolved "https://registry.yarnpkg.com/stream-exhaust/-/stream-exhaust-1.0.1.tgz#c0c4455e54ce5a179ca8736e73334b4e7fd67553" stream-http@^2.0.0, stream-http@^2.3.1: - version "2.7.0" - resolved "https://registry.yarnpkg.com/stream-http/-/stream-http-2.7.0.tgz#cec1f4e3b494bc4a81b451808970f8b20b4ed5f6" + version "2.7.2" + resolved "https://registry.yarnpkg.com/stream-http/-/stream-http-2.7.2.tgz#40a050ec8dc3b53b33d9909415c02c0bf1abfbad" dependencies: builtin-status-codes "^3.0.0" inherits "^2.0.1" @@ -5918,9 +6089,13 @@ stream-shift@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/stream-shift/-/stream-shift-1.0.0.tgz#d5c752825e5367e786f78e18e445ea223a155952" +stream-to-observable@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/stream-to-observable/-/stream-to-observable-0.1.0.tgz#45bf1d9f2d7dc09bed81f1c307c430e68b84cffe" + streamroller@^0.4.0: - version "0.4.0" - resolved "https://registry.yarnpkg.com/streamroller/-/streamroller-0.4.0.tgz#a273f1f91994549a2ddd112ccaa2d1dd23cb758c" + version "0.4.1" + resolved "https://registry.yarnpkg.com/streamroller/-/streamroller-0.4.1.tgz#d435bd5974373abd9bd9068359513085106cc05f" dependencies: date-format "^0.0.0" debug "^0.7.2" @@ -5943,11 +6118,11 @@ string_decoder@~0.0.0: version "0.0.1" resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-0.0.1.tgz#f5472d0a8d1650ec823752d24e6fd627b39bf141" -string_decoder@~1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.0.0.tgz#f06f41157b664d86069f84bdbdc9b0d8ab281667" +string_decoder@~1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.0.3.tgz#0fc67d7c141825de94282dd536bec6b9bce860ab" dependencies: - buffer-shims "~1.0.0" + safe-buffer "~5.1.0" stringstream@~0.0.4: version "0.0.5" @@ -6032,6 +6207,10 @@ supports-color@^4.0.0: dependencies: has-flag "^2.0.0" +symbol-observable@^1.0.1: + version "1.0.4" + resolved "https://registry.yarnpkg.com/symbol-observable/-/symbol-observable-1.0.4.tgz#29bf615d4aa7121bdd898b22d4b3f9bc4e2aa03d" + symbol-tree@^3.2.1: version "3.2.2" resolved "https://registry.yarnpkg.com/symbol-tree/-/symbol-tree-3.2.2.tgz#ae27db38f660a7ae2e1c3b7d1bc290819b8519e6" @@ -6046,9 +6225,9 @@ tapable@^0.1.8, tapable@~0.1.8: version "0.1.10" resolved "https://registry.yarnpkg.com/tapable/-/tapable-0.1.10.tgz#29c35707c2b70e50d07482b5d202e8ed446dafd4" -tapable@^0.2.5, tapable@~0.2.5: - version "0.2.6" - resolved "https://registry.yarnpkg.com/tapable/-/tapable-0.2.6.tgz#206be8e188860b514425375e6f1ae89bfb01fd8d" +tapable@^0.2.7, tapable@~0.2.5: + version "0.2.7" + resolved "https://registry.yarnpkg.com/tapable/-/tapable-0.2.7.tgz#e46c0daacbb2b8a98b9b0cea0f4052105817ed5c" tar-pack@^3.4.0: version "3.4.0" @@ -6152,8 +6331,8 @@ tildify@^1.0.0: os-homedir "^1.0.0" time-stamp@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/time-stamp/-/time-stamp-1.0.1.tgz#9f4bd23559c9365966f3302dbba2b07c6b99b151" + version "1.1.0" + resolved "https://registry.yarnpkg.com/time-stamp/-/time-stamp-1.1.0.tgz#764a5a11af50561921b133f3b44e618687e0f5c3" timers-browserify@^1.0.1: version "1.4.2" @@ -6173,6 +6352,13 @@ timers-browserify@~1.0.1: dependencies: process "~0.5.1" +timers-ext@0.1: + version "0.1.2" + resolved "https://registry.yarnpkg.com/timers-ext/-/timers-ext-0.1.2.tgz#61cc47a76c1abd3195f14527f978d58ae94c5204" + dependencies: + es5-ext "~0.10.14" + next-tick "1" + tmp@0.0.29, tmp@^0.0.29: version "0.0.29" resolved "https://registry.yarnpkg.com/tmp/-/tmp-0.0.29.tgz#f25125ff0dd9da3ccb0c2dd371ee1288bb9128c0" @@ -6292,7 +6478,7 @@ tweetnacl@^0.14.3, tweetnacl@~0.14.0: version "0.14.5" resolved "https://registry.yarnpkg.com/tweetnacl/-/tweetnacl-0.14.5.tgz#5ae68177f192d4456269d108afa93ff8743f4f64" -type-check@~0.3.1, type-check@~0.3.2: +type-check@~0.3.2: version "0.3.2" resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.3.2.tgz#5884cab512cf1d355e3fb784f30804b2b520db72" dependencies: @@ -6310,14 +6496,14 @@ type-detect@^4.0.0: version "4.0.3" resolved "https://registry.yarnpkg.com/type-detect/-/type-detect-4.0.3.tgz#0e3f2670b44099b0b46c284d136a7ef49c74c2ea" -type-is@~1.6.14: +type-is@~1.6.15: version "1.6.15" resolved "https://registry.yarnpkg.com/type-is/-/type-is-1.6.15.tgz#cab10fb4909e441c82842eafe1ad646c81804410" dependencies: media-typer "0.3.0" mime-types "~2.1.15" -typedarray@~0.0.5: +typedarray@^0.0.6, typedarray@~0.0.5: version "0.0.6" resolved "https://registry.yarnpkg.com/typedarray/-/typedarray-0.0.6.tgz#867ac74e3864187b1d3d47d996a78ec5c8830777" @@ -6363,7 +6549,7 @@ uglify-to-browserify@~1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/uglify-to-browserify/-/uglify-to-browserify-1.0.2.tgz#6e0924d6bda6b5afe349e39a6d632850a0f882b7" -uglifyjs-webpack-plugin@^0.4.4: +uglifyjs-webpack-plugin@^0.4.6: version "0.4.6" resolved "https://registry.yarnpkg.com/uglifyjs-webpack-plugin/-/uglifyjs-webpack-plugin-0.4.6.tgz#b951f4abb6bd617e66f63eb891498e391763e309" dependencies: @@ -6468,8 +6654,8 @@ user-home@^1.1.1: resolved "https://registry.yarnpkg.com/user-home/-/user-home-1.1.1.tgz#2b5be23a32b63a7c9deb8d0f28d485724a3df190" useragent@^2.1.12: - version "2.1.13" - resolved "https://registry.yarnpkg.com/useragent/-/useragent-2.1.13.tgz#bba43e8aa24d5ceb83c2937473e102e21df74c10" + version "2.2.1" + resolved "https://registry.yarnpkg.com/useragent/-/useragent-2.2.1.tgz#cf593ef4f2d175875e8bb658ea92e18a4fd06d8e" dependencies: lru-cache "2.2.x" tmp "0.0.x" @@ -6489,8 +6675,8 @@ utils-merge@1.0.0: resolved "https://registry.yarnpkg.com/utils-merge/-/utils-merge-1.0.0.tgz#0294fb922bb9375153541c4f7096231f287c8af8" uuid@^3.0.0: - version "3.0.1" - resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.0.1.tgz#6544bba2dfda8c1cf17e629a3a305e2bb1fee6c1" + version "3.1.0" + resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.1.0.tgz#3dd3d3e790abc24d7b0d3a034ffababe28ebbc04" v8flags@^2.0.10, v8flags@^2.0.9: version "2.1.1" @@ -6509,8 +6695,8 @@ vali-date@^1.0.0: resolved "https://registry.yarnpkg.com/vali-date/-/vali-date-1.0.0.tgz#1b904a59609fb328ef078138420934f6b86709a6" validate-commit-msg@^2.12.1: - version "2.12.1" - resolved "https://registry.yarnpkg.com/validate-commit-msg/-/validate-commit-msg-2.12.1.tgz#612b61bc9f09f0fee5130de3648870d01cdddf1d" + version "2.13.1" + resolved "https://registry.yarnpkg.com/validate-commit-msg/-/validate-commit-msg-2.13.1.tgz#71386ea45349c4970e091296f132dcf67dc50714" dependencies: conventional-commit-types "^2.0.0" find-parent-dir "^0.3.0" @@ -6587,14 +6773,13 @@ vinyl@^0.5.0: replace-ext "0.0.1" vinyl@^2.0.0, vinyl@^2.0.1: - version "2.0.2" - resolved "https://registry.yarnpkg.com/vinyl/-/vinyl-2.0.2.tgz#0a3713d8d4e9221c58f10ca16c0116c9e25eda7c" + version "2.1.0" + resolved "https://registry.yarnpkg.com/vinyl/-/vinyl-2.1.0.tgz#021f9c2cf951d6b939943c89eb5ee5add4fd924c" dependencies: - clone "^1.0.0" + clone "^2.1.1" clone-buffer "^1.0.0" clone-stats "^1.0.0" cloneable-readable "^1.0.0" - is-stream "^1.1.0" remove-trailing-separator "^1.0.1" replace-ext "^1.0.0" @@ -6620,12 +6805,12 @@ watchpack@^0.2.1: chokidar "^1.0.0" graceful-fs "^4.1.2" -watchpack@^1.3.1: - version "1.3.1" - resolved "https://registry.yarnpkg.com/watchpack/-/watchpack-1.3.1.tgz#7d8693907b28ce6013e7f3610aa2a1acf07dad87" +watchpack@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/watchpack/-/watchpack-1.4.0.tgz#4a1472bcbb952bd0a9bb4036801f954dfb39faac" dependencies: async "^2.1.2" - chokidar "^1.4.3" + chokidar "^1.7.0" graceful-fs "^4.1.2" webidl-conversions@^3.0.0: @@ -6683,15 +6868,15 @@ webpack@^1.12.9: webpack-core "~0.6.9" webpack@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/webpack/-/webpack-3.0.0.tgz#ee9bcebf21247f7153cb410168cab45e3a59d4d7" + version "3.3.0" + resolved "https://registry.yarnpkg.com/webpack/-/webpack-3.3.0.tgz#ce2f9e076566aba91f74887133a883fd7da187bc" dependencies: acorn "^5.0.0" acorn-dynamic-import "^2.0.0" ajv "^5.1.5" ajv-keywords "^2.0.0" async "^2.1.2" - enhanced-resolve "^3.0.0" + enhanced-resolve "^3.3.0" escope "^3.6.0" interpret "^1.0.0" json-loader "^0.5.4" @@ -6704,8 +6889,8 @@ webpack@^3.0.0: source-map "^0.5.3" supports-color "^3.1.0" tapable "~0.2.5" - uglifyjs-webpack-plugin "^0.4.4" - watchpack "^1.3.1" + uglifyjs-webpack-plugin "^0.4.6" + watchpack "^1.4.0" webpack-sources "^1.0.1" yargs "^6.0.0" @@ -6726,8 +6911,8 @@ whatwg-encoding@^1.0.1: iconv-lite "0.4.13" whatwg-url@^4.3.0: - version "4.7.1" - resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-4.7.1.tgz#df4dc2e3f25a63b1fa5b32ed6d6c139577d690de" + version "4.8.0" + resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-4.8.0.tgz#d2981aa9148c1e00a41c5a6131166ab4683bbcc0" dependencies: tr46 "~0.0.3" webidl-conversions "^3.0.0" @@ -6736,17 +6921,17 @@ which-module@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/which-module/-/which-module-1.0.0.tgz#bba63ca861948994ff307736089e3b96026c2a4f" -which@^1.1.1, which@^1.2.1, which@^1.2.12, which@^1.2.9: +which@^1.1.1, which@^1.2.1, which@^1.2.10, which@^1.2.12, which@^1.2.9: version "1.2.14" resolved "https://registry.yarnpkg.com/which/-/which-1.2.14.tgz#9a87c4378f03e827cecaf1acdf56c736c01c14e5" dependencies: isexe "^2.0.0" wide-align@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/wide-align/-/wide-align-1.1.0.tgz#40edde802a71fea1f070da3e62dcda2e7add96ad" + version "1.1.2" + resolved "https://registry.yarnpkg.com/wide-align/-/wide-align-1.1.2.tgz#571e0f1b0604636ebc0dfc21b0339bbe31341710" dependencies: - string-width "^1.0.1" + string-width "^1.0.2" window-size@0.1.0: version "0.1.0" @@ -6849,7 +7034,7 @@ y18n@^3.2.0, y18n@^3.2.1: version "3.2.1" resolved "https://registry.yarnpkg.com/y18n/-/y18n-3.2.1.tgz#6d15fba884c08679c0d77e88e7759e811e07fa41" -yallist@^2.0.0: +yallist@^2.1.2: version "2.1.2" resolved "https://registry.yarnpkg.com/yallist/-/yallist-2.1.2.tgz#1c11f9218f076089a47dd512f93c6699a6a81d52" From 5a28b055a8ca88597cb66d37d16eefda6e203f5f Mon Sep 17 00:00:00 2001 From: Josh Crowther Date: Thu, 6 Jul 2017 15:40:58 -0700 Subject: [PATCH 2/5] fix(deps): fix issue where new typescript version was causing errors in the build Something breaks in version 2.4.1 of typescript. We are pinning to 2.3.0 till those things can be triaged and fixed --- yarn-error.log | 147 +++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 147 insertions(+) create mode 100644 yarn-error.log diff --git a/yarn-error.log b/yarn-error.log new file mode 100644 index 00000000000..f837eb7e355 --- /dev/null +++ b/yarn-error.log @@ -0,0 +1,147 @@ +Arguments: + /Users/jshcrowthe/.nvm/versions/node/v6.11.0/bin/node /Users/jshcrowthe/.nvm/versions/node/v6.11.0/bin/yarn + +PATH: + /usr/local/git/current/bin:/usr/local/bin:/usr/bin:/bin:/usr/local/sbin:/usr/sbin:/sbin:/Users/jshcrowthe/google-cloud-sdk/bin:/Users/jshcrowthe/.nvm/versions/node/v6.11.0/bin:/Users/jshcrowthe/.bin:/Users/jshcrowthe/.yarn-config/global/node_modules/.bin + +Yarn version: + 0.27.5 + +Node version: + 6.11.0 + +Platform: + darwin x64 + +npm manifest: + { + "name": "firebase", + "version": "4.1.4", + "description": "Firebase JavaScript library for web and Node.js", + "author": "Firebase (https://firebase.google.com/)", + "license": "Apache-2.0", + "homepage": "https://firebase.google.com/", + "keywords": [ + "authentication", + "database", + "Firebase", + "firebase", + "realtime", + "storage" + ], + "repository": { + "type": "git", + "url": "https://github.com/firebase/firebase-js-sdk.git" + }, + "scripts": { + "build": "gulp build", + "build:package": "gulp build && cd dist/package && npm install --production && npm shrinkwrap && npm pack && shx mv *.tgz ../", + "test": "gulp build && gulp build:tests && gulp test && gulp test:integration", + "dev": "gulp dev", + "commitmsg": "validate-commit-msg", + "precommit": "lint-staged" + }, + "main": "index.js", + "devDependencies": { + "@types/chai": "^3.4.35", + "@types/mocha": "^2.2.39", + "@types/node": "^7.0.8", + "@types/sinon": "^1.16.35", + "awesome-typescript-loader": "^3.2.1", + "babel-cli": "^6.23.0", + "babel-core": "^6.24.0", + "babel-plugin-add-module-exports": "^0.2.1", + "babel-plugin-inline-replace-variables": "^1.2.2", + "babel-plugin-minify-dead-code-elimination": "^0.1.4", + "babel-preset-env": "^1.2.1", + "chai": "^3.5.0", + "child-process-promise": "^2.2.1", + "cross-env": "^5.0.1", + "cz-customizable": "^5.0.0", + "filesize": "^3.5.6", + "git-rev-sync": "^1.9.0", + "google-closure-compiler-js": "^20170218.0.0", + "gulp": "gulpjs/gulp#4.0", + "gulp-babel": "^6.1.2", + "gulp-browserify": "^0.5.1", + "gulp-clone": "^1.0.0", + "gulp-concat": "^2.6.1", + "gulp-env": "^0.4.0", + "gulp-file": "^0.3.0", + "gulp-header": "^1.8.8", + "gulp-if": "^2.0.2", + "gulp-istanbul": "^1.1.1", + "gulp-mocha": "^4.1.0", + "gulp-rename": "^1.2.2", + "gulp-replace": "^0.5.4", + "gulp-sourcemaps": "^2.4.1", + "gulp-strip-comments": "^2.4.5", + "gulp-typescript": "^3.1.6", + "gzip-size": "^3.0.0", + "husky": "^0.13.3", + "jsdom": "^9.12.0", + "karma": "^1.7.0", + "karma-chrome-launcher": "^2.0.0", + "karma-firefox-launcher": "^1.0.1", + "karma-mocha": "^1.3.0", + "karma-spec-reporter": "^0.0.30", + "karma-typescript": "^3.0.4", + "lint-staged": "^4.0.0", + "merge2": "^1.0.3", + "mkdirp": "^0.5.1", + "prettier": "^1.5.1", + "require-dir": "^0.3.1", + "rimraf": "^2.6.1", + "shx": "^0.2.2", + "sinon": "^2.1.0", + "through2": "^2.0.3", + <<<<<<< HEAD + "ts-node": "^3.3.0", + "typescript": "^2.4.2", + ======= + "ts-loader": "^2.1.0", + "ts-node": "2.1.1", + "typescript": "2.3.0", + >>>>>>> fix(deps): fix issue where new typescript version was causing errors in the build + "validate-commit-msg": "^2.12.1", + "vinyl-named": "^1.1.0", + "webpack": "^3.0.0", + "webpack-stream": "^3.2.0", + "wrapper-webpack-plugin": "^0.1.11" + }, + "dependencies": { + "dom-storage": "^2.0.2", + "faye-websocket": "0.9.3", + "jsonwebtoken": "^7.3.0", + "promise-polyfill": "^6.0.2", + "xmlhttprequest": "^1.8.0" + }, + "config": { + "commitizen": { + "path": "./node_modules/cz-customizable" + }, + "cz-customizable": { + "config": "./.cz-config.js" + } + }, + "lint-staged": { + "**/*.ts": [ + "prettier --write {src,tests}/**/*.ts", + "git add" + ] + } + } + +yarn manifest: + No manifest + +Lockfile: + No lockfile + +Trace: + SyntaxError: /Users/jshcrowthe/Repos/firebase-js-sdk/package.json: Unexpected token < in JSON at position 2526 + at Object.parse (native) + at /Users/jshcrowthe/.nvm/versions/node/v6.11.0/lib/node_modules/yarn/lib/util/fs.js:554:57 + at next (native) + at step (/Users/jshcrowthe/.nvm/versions/node/v6.11.0/lib/node_modules/yarn/node_modules/babel-runtime/helpers/asyncToGenerator.js:17:30) + at /Users/jshcrowthe/.nvm/versions/node/v6.11.0/lib/node_modules/yarn/node_modules/babel-runtime/helpers/asyncToGenerator.js:28:13 From 511aeb5808d99df3f33f950a915c86d426b5f9a5 Mon Sep 17 00:00:00 2001 From: Josh Crowther Date: Thu, 6 Jul 2017 15:42:58 -0700 Subject: [PATCH 3/5] WIP: remove tslint.json Temporarily removing this file as we don't pass the checks --- tslint.json | 17 ----------------- 1 file changed, 17 deletions(-) delete mode 100644 tslint.json diff --git a/tslint.json b/tslint.json deleted file mode 100644 index dbcb5c222c5..00000000000 --- a/tslint.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "extends": "tslint:recommended", - "rules": { - "quotemark": false, - "trailing-comma": false, - "interface-name": false, - "member-access": false, - "ordered-imports": false, - "no-console": false, - "max-classes-per-file": false, - "object-literal-key-quotes": false, - "member-ordering": false, - "object-literal-sort-keys": false, - "max-line-length": [true, 80], - "no-string-literal": false - } -} From 55cac088326a261eb8dddc13045ca2bded2fb4aa Mon Sep 17 00:00:00 2001 From: Josh Crowther Date: Sat, 8 Jul 2017 14:02:17 -0700 Subject: [PATCH 4/5] refactor(style): refaactor to prefer single quotes over double quotes --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index fbedd608d9c..5389b13856f 100644 --- a/package.json +++ b/package.json @@ -104,7 +104,7 @@ }, "lint-staged": { "**/*.ts": [ - "prettier --write {src,tests}/**/*.ts", + "prettier --write --single-quote", "git add" ] } From f95e1dcd304243c2205b0d92798e024a79c1917b Mon Sep 17 00:00:00 2001 From: Josh Crowther Date: Tue, 25 Jul 2017 15:28:12 -0700 Subject: [PATCH 5/5] style(*): run prettier on codebase --- src/app.ts | 7 +- src/app/errors.ts | 38 +- src/app/firebase_app.ts | 268 +- src/app/subscribe.ts | 43 +- src/database.ts | 32 +- src/database/api/DataSnapshot.ts | 32 +- src/database/api/Database.ts | 22 +- src/database/api/Query.ts | 302 ++- src/database/api/Reference.ts | 131 +- src/database/api/TransactionResult.ts | 7 +- src/database/api/internal.ts | 14 +- src/database/api/onDisconnect.ts | 74 +- src/database/api/test_access.ts | 31 +- src/database/core/AuthTokenProvider.ts | 46 +- src/database/core/CompoundWrite.ts | 43 +- src/database/core/PersistentConnection.ts | 328 ++- src/database/core/ReadonlyRestClient.ts | 176 +- src/database/core/Repo.ts | 317 ++- src/database/core/RepoInfo.ts | 20 +- src/database/core/RepoManager.ts | 35 +- src/database/core/Repo_transaction.ts | 379 ++- src/database/core/ServerActions.ts | 45 +- src/database/core/SnapshotHolder.ts | 2 +- src/database/core/SparseSnapshotTree.ts | 13 +- src/database/core/SyncPoint.ts | 81 +- src/database/core/SyncTree.ts | 350 ++- src/database/core/WriteTree.ts | 265 +- src/database/core/operation/AckUserWrite.ts | 33 +- src/database/core/operation/ListenComplete.ts | 5 +- src/database/core/operation/Merge.ts | 33 +- src/database/core/operation/Operation.ts | 35 +- src/database/core/operation/Overwrite.ts | 20 +- src/database/core/snap/ChildrenNode.ts | 188 +- src/database/core/snap/IndexMap.ts | 112 +- src/database/core/snap/LeafNode.ts | 62 +- src/database/core/snap/Node.ts | 19 +- src/database/core/snap/childSet.ts | 54 +- src/database/core/snap/comparators.ts | 2 +- src/database/core/snap/indexes/Index.ts | 10 +- src/database/core/snap/indexes/KeyIndex.ts | 22 +- src/database/core/snap/indexes/PathIndex.ts | 28 +- .../core/snap/indexes/PriorityIndex.ts | 11 +- src/database/core/snap/indexes/ValueIndex.ts | 10 +- src/database/core/snap/nodeFromJSON.ts | 55 +- src/database/core/snap/snap.ts | 30 +- src/database/core/stats/StatsCollection.ts | 4 +- src/database/core/stats/StatsListener.ts | 8 +- src/database/core/stats/StatsManager.ts | 5 +- src/database/core/stats/StatsReporter.ts | 9 +- .../core/storage/DOMStorageWrapper.ts | 3 +- src/database/core/storage/storage.ts | 16 +- src/database/core/util/CountedSet.ts | 2 +- src/database/core/util/EventEmitter.ts | 28 +- src/database/core/util/ImmutableTree.ts | 101 +- src/database/core/util/NextPushId.ts | 7 +- src/database/core/util/OnlineMonitor.ts | 46 +- src/database/core/util/Path.ts | 69 +- src/database/core/util/ServerValues.ts | 51 +- src/database/core/util/SortedMap.ts | 243 +- src/database/core/util/Tree.ts | 62 +- src/database/core/util/VisibilityMonitor.ts | 29 +- src/database/core/util/libs/parser.ts | 58 +- src/database/core/util/util.ts | 279 ++- src/database/core/util/validation.ts | 388 ++- src/database/core/view/CacheNode.ts | 15 +- src/database/core/view/Change.ts | 30 +- .../core/view/ChildChangeAccumulator.ts | 68 +- src/database/core/view/CompleteChildSource.ts | 51 +- src/database/core/view/Event.ts | 36 +- src/database/core/view/EventGenerator.ts | 96 +- src/database/core/view/EventQueue.ts | 16 +- src/database/core/view/EventRegistration.ts | 93 +- src/database/core/view/QueryParams.ts | 14 +- src/database/core/view/View.ts | 137 +- src/database/core/view/ViewCache.ts | 50 +- src/database/core/view/ViewProcessor.ts | 632 +++-- .../core/view/filter/IndexedFilter.ts | 79 +- .../core/view/filter/LimitedFilter.ts | 149 +- src/database/core/view/filter/NodeFilter.ts | 19 +- src/database/core/view/filter/RangedFilter.ts | 40 +- .../realtime/BrowserPollConnection.ts | 238 +- src/database/realtime/Connection.ts | 98 +- src/database/realtime/Transport.ts | 26 +- src/database/realtime/TransportManager.ts | 34 +- src/database/realtime/WebSocketConnection.ts | 66 +- .../realtime/polling/PacketReceiver.ts | 4 +- src/firebase-browser.ts | 2 +- src/firebase-node.ts | 15 +- src/firebase-react-native.ts | 8 +- src/messaging.ts | 10 +- .../controllers/controller-interface.ts | 49 +- src/messaging/controllers/sw-controller.ts | 225 +- .../controllers/window-controller.ts | 168 +- .../helpers/array-buffer-to-base64.ts | 6 +- src/messaging/models/db-interface.ts | 28 +- src/messaging/models/default-sw.ts | 2 +- src/messaging/models/errors.ts | 99 +- src/messaging/models/fcm-details.ts | 80 +- src/messaging/models/token-details-model.ts | 192 +- src/messaging/models/token-manager.ts | 269 +- src/messaging/models/vapid-details-model.ts | 132 +- src/messaging/models/worker-page-message.ts | 2 +- src/storage.ts | 40 +- src/storage/implementation/args.ts | 34 +- src/storage/implementation/authwrapper.ts | 83 +- src/storage/implementation/backoff.ts | 14 +- src/storage/implementation/blob.ts | 37 +- src/storage/implementation/blobbuilder.d.ts | 8 +- src/storage/implementation/error.ts | 171 +- src/storage/implementation/failrequest.ts | 4 +- src/storage/implementation/fs.ts | 8 +- src/storage/implementation/json.ts | 2 +- src/storage/implementation/location.ts | 21 +- src/storage/implementation/metadata.ts | 66 +- src/storage/implementation/object.ts | 12 +- src/storage/implementation/observer.ts | 26 +- src/storage/implementation/path.ts | 13 +- .../implementation/promise_external.ts | 11 +- src/storage/implementation/request.ts | 146 +- src/storage/implementation/requestinfo.ts | 26 +- src/storage/implementation/requestmaker.ts | 15 +- src/storage/implementation/requestmap.ts | 4 +- src/storage/implementation/requests.ts | 185 +- src/storage/implementation/string.ts | 73 +- src/storage/implementation/taskenums.ts | 6 +- src/storage/implementation/type.ts | 1 - src/storage/implementation/xhrio.ts | 11 +- src/storage/implementation/xhrio_network.ts | 25 +- src/storage/implementation/xhriopool.ts | 4 +- src/storage/metadata.ts | 42 +- src/storage/reference.ts | 80 +- src/storage/service.ts | 33 +- src/storage/task.ts | 357 +-- src/storage/tasksnapshot.ts | 21 +- src/utils/Sha1.ts | 74 +- src/utils/assert.ts | 9 +- src/utils/constants.ts | 2 +- src/utils/crypt.ts | 137 +- src/utils/deep_copy.ts | 36 +- src/utils/environment.ts | 22 +- src/utils/globalScope.ts | 18 +- src/utils/hash.ts | 8 +- src/utils/json.ts | 1 - src/utils/jwt.ts | 32 +- src/utils/nodePatches.ts | 107 +- src/utils/obj.ts | 16 +- src/utils/promise.ts | 7 +- src/utils/utf8.ts | 7 +- src/utils/util.ts | 11 +- src/utils/validation.ts | 66 +- tests/app/errors.test.ts | 77 +- tests/app/firebase_app.test.ts | 156 +- tests/app/subscribe.test.ts | 60 +- tests/database/browser/connection.test.ts | 62 +- .../database/browser/crawler_support.test.ts | 35 +- tests/database/compound_write.test.ts | 299 ++- tests/database/database.test.ts | 19 +- tests/database/datasnapshot.test.ts | 165 +- tests/database/helpers/EventAccumulator.ts | 15 +- tests/database/helpers/events.ts | 58 +- tests/database/helpers/util.ts | 59 +- tests/database/info.test.ts | 208 +- tests/database/node.test.ts | 309 ++- tests/database/node/connection.test.ts | 64 +- tests/database/order.test.ts | 356 +-- tests/database/order_by.test.ts | 95 +- tests/database/path.test.ts | 48 +- tests/database/promise.test.ts | 281 ++- tests/database/query.test.ts | 2158 ++++++++++------- tests/database/repoinfo.test.ts | 8 +- tests/database/sortedmap.test.ts | 333 +-- tests/database/sparsesnapshottree.test.ts | 186 +- tests/database/transaction.test.ts | 1416 ++++++----- .../browser/array-buffer-to-base64.test.ts | 93 +- tests/messaging/browser/constructor.test.ts | 16 +- tests/messaging/browser/db-helper.ts | 4 +- tests/messaging/browser/db-token-manager.ts | 14 +- tests/messaging/browser/delete-token.test.ts | 129 +- tests/messaging/browser/get-sw-reg.test.ts | 151 +- tests/messaging/browser/get-token.test.ts | 133 +- tests/messaging/browser/make-fake-app.ts | 2 +- .../browser/make-fake-subscription.ts | 29 +- tests/messaging/browser/make-fake-sw-reg.ts | 2 +- .../token-details-model-delete.test.ts | 122 +- .../browser/token-details-model-get.test.ts | 182 +- .../browser/token-details-model-save.test.ts | 155 +- .../token-manager-create-token.test.ts | 294 +-- .../token-manager-delete-token_test.test.ts | 102 +- .../token-manager-get-saved-token.test.ts | 314 +-- .../vapid-details-model-delete.test.ts | 75 +- .../browser/vapid-details-model-get.test.ts | 57 +- .../browser/vapid-details-model-save.test.ts | 59 +- .../binary/browser/binary_namespace.test.ts | 48 +- .../binary/node/binary_namespace.test.ts | 36 +- .../browser/messaging_namespace.test.ts | 28 +- tests/package/module_namespace.test.ts | 34 +- tests/package/utils/definitions/app.ts | 20 +- tests/package/utils/definitions/auth.ts | 30 +- tests/package/utils/definitions/database.ts | 32 +- tests/package/utils/definitions/firebase.ts | 51 +- tests/package/utils/definitions/messaging.ts | 40 +- tests/package/utils/definitions/storage.ts | 42 +- tests/package/utils/validator.ts | 27 +- tests/storage/browser/blob_test.ts | 39 +- tests/storage/browser/reference_test.ts | 399 +-- tests/storage/browser/request_test.ts | 176 +- tests/storage/browser/requests_test.ts | 517 ++-- tests/storage/browser/service_test.ts | 282 ++- tests/storage/browser/string_test.ts | 255 +- tests/storage/browser/task_test.ts | 486 ++-- tests/storage/browser/testshared.ts | 45 +- tests/storage/browser/xhrio.ts | 39 +- tests/utils/deep_copy.test.ts | 77 +- 213 files changed, 13292 insertions(+), 8459 deletions(-) diff --git a/src/app.ts b/src/app.ts index cfde5f16228..df7844be945 100644 --- a/src/app.ts +++ b/src/app.ts @@ -14,12 +14,9 @@ * limitations under the License. */ // Import the createFirebaseNamespace function -import { - createFirebaseNamespace, - FirebaseNamespace -} from './app/firebase_app'; +import { createFirebaseNamespace, FirebaseNamespace } from './app/firebase_app'; // Export a single instance of firebase app const firebase: FirebaseNamespace = createFirebaseNamespace(); -export default firebase; \ No newline at end of file +export default firebase; diff --git a/src/app/errors.ts b/src/app/errors.ts index ca642b474c4..45ef89dfaf6 100644 --- a/src/app/errors.ts +++ b/src/app/errors.ts @@ -53,7 +53,7 @@ * } * } */ -export type ErrorList = {[code: string]: string}; +export type ErrorList = { [code: string]: string }; const ERROR_NAME = 'FirebaseError'; @@ -61,8 +61,8 @@ export interface StringLike { toString: () => string; } -let captureStackTrace: (obj: Object, fn?: Function) => void = - (Error as any).captureStackTrace; +let captureStackTrace: (obj: Object, fn?: Function) => void = (Error as any) + .captureStackTrace; // Export for faking in tests export function patchCapture(captureFake?: any): any { @@ -73,24 +73,23 @@ export function patchCapture(captureFake?: any): any { export interface FirebaseError { // Unique code for error - format is service/error-code-string - code: string, + code: string; // Developer-friendly error message. - message: string, + message: string; // Always 'FirebaseError' - name: string, + name: string; // Where available - stack backtrace in a string - stack: string, + stack: string; } export class FirebaseError implements FirebaseError { public stack: string; public name: string; - constructor(public code: string, - public message: string) { + constructor(public code: string, public message: string) { let stack: string; // We want the stack value, if implemented by Error if (captureStackTrace) { @@ -116,31 +115,32 @@ FirebaseError.prototype.constructor = FirebaseError; export class ErrorFactory { // Matches {$name}, by default. - public pattern = /\{\$([^}]+)}/g + public pattern = /\{\$([^}]+)}/g; - constructor(private service: string, - private serviceName: string, - private errors: ErrorList) { + constructor( + private service: string, + private serviceName: string, + private errors: ErrorList + ) { // empty } - create(code: T, data?: {[prop: string]: StringLike}): FirebaseError { + create(code: T, data?: { [prop: string]: StringLike }): FirebaseError { if (data === undefined) { data = {}; } - let template = this.errors[(code as string)]; + let template = this.errors[code as string]; let fullCode = this.service + '/' + code; let message: string; if (template === undefined) { - message = "Error"; + message = 'Error'; } else { message = template.replace(this.pattern, (match, key) => { let value = data![key]; - return value !== undefined ? value.toString() - : '<' + key + '?>'; + return value !== undefined ? value.toString() : '<' + key + '?>'; }); } @@ -159,4 +159,4 @@ export class ErrorFactory { return err; } -} \ No newline at end of file +} diff --git a/src/app/firebase_app.ts b/src/app/firebase_app.ts index 292dce3122c..7d9d89fa7f5 100644 --- a/src/app/firebase_app.ts +++ b/src/app/firebase_app.ts @@ -13,35 +13,30 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -import { - createSubscribe, - Observer, - Subscribe -} from './subscribe'; -import { - ErrorFactory, - FirebaseError -} from './errors'; +import { createSubscribe, Observer, Subscribe } from './subscribe'; +import { ErrorFactory, FirebaseError } from './errors'; import { PromiseImpl } from '../utils/promise'; import { patchProperty, deepCopy, deepExtend } from '../utils/deep_copy'; -export interface FirebaseAuthTokenData { accessToken: string; } +export interface FirebaseAuthTokenData { + accessToken: string; +} export interface FirebaseAppInternals { - getToken(refreshToken?: boolean): Promise< FirebaseAuthTokenData | null >; - getUid(): string|null; - addAuthTokenListener(fn: (token: string|null) => void): void; - removeAuthTokenListener(fn: (token: string|null) => void): void; + getToken(refreshToken?: boolean): Promise; + getUid(): string | null; + addAuthTokenListener(fn: (token: string | null) => void): void; + removeAuthTokenListener(fn: (token: string | null) => void): void; } -export type FirebaseOptions = { - apiKey?: string, - authDomain?: string, - databaseURL?: string, - projectId?: string, - storageBucket?: string, - messagingSenderId?: string - [name: string]: any +export type FirebaseOptions = { + apiKey?: string; + authDomain?: string; + databaseURL?: string; + projectId?: string; + storageBucket?: string; + messagingSenderId?: string; + [name: string]: any; }; // An instance of the firebase.App @@ -88,23 +83,30 @@ export type AppHook = (event: string, app: FirebaseApp) => void; * function. */ export interface FirebaseServiceFactory { - (app: FirebaseApp, extendApp?: (props: {[prop: string]: any}) => void, - instanceString?: string): FirebaseService; + ( + app: FirebaseApp, + extendApp?: (props: { [prop: string]: any }) => void, + instanceString?: string + ): FirebaseService; } /** * All ServiceNamespaces extend from FirebaseServiceNamespace */ -export interface FirebaseServiceNamespace { +export interface FirebaseServiceNamespace { (app?: FirebaseApp): T; } export interface FirebaseErrorFactory { - create(code: T, data?: {[prop: string]: any}): FirebaseError; + create(code: T, data?: { [prop: string]: any }): FirebaseError; } export interface FirebaseErrorFactoryClass { - new (service: string, serviceName: string, errors: {[code: string]: string}): FirebaseErrorFactory; + new ( + service: string, + serviceName: string, + errors: { [code: string]: string } + ): FirebaseErrorFactory; } export interface FirebaseNamespace { @@ -164,11 +166,12 @@ export interface FirebaseNamespace { * multiple instances per app. If not specified, the default is false. */ registerService( - name: string, - createService: FirebaseServiceFactory, - serviceProperties?: {[prop: string]: any}, - appHook?: AppHook, - allowMultipleInstances?: boolean): FirebaseServiceNamespace; + name: string, + createService: FirebaseServiceFactory, + serviceProperties?: { [prop: string]: any }, + appHook?: AppHook, + allowMultipleInstances?: boolean + ): FirebaseServiceNamespace; /** * Just used for testing to start from a fresh namespace. @@ -180,15 +183,16 @@ export interface FirebaseNamespace { * @prop props The top level properties of this object are copied to the * namespace. */ - extendNamespace(props: {[prop: string]: any}): void; + extendNamespace(props: { [prop: string]: any }): void; /** * Create a Subscribe function. A proxy Observer is created so that * events can be sent to single Observer to be fanned out automatically. */ createSubscribe( - executor: (observer: Observer) => void, - onNoObservers?: (observer: Observer) => void): Subscribe; + executor: (observer: Observer) => void, + onNoObservers?: (observer: Observer) => void + ): Subscribe; /** * Utility exposed for internal testing. @@ -203,7 +207,7 @@ export interface FirebaseNamespace { /** * Service factories for each registered service. */ - factories: {[name: string]: FirebaseServiceFactory}; + factories: { [name: string]: FirebaseServiceFactory }; /* * Convert service name to factory name to use. @@ -214,7 +218,7 @@ export interface FirebaseNamespace { * Use to construct all thrown FirebaseError's. */ ErrorFactory: FirebaseErrorFactoryClass; - } + }; } const contains = function(obj, key) { @@ -237,28 +241,32 @@ class FirebaseAppImpl implements FirebaseApp { private isDeleted_ = false; private services_: { [name: string]: { - [serviceName: string]: FirebaseService - } + [serviceName: string]: FirebaseService; + }; } = {}; public INTERNAL; - constructor(options: FirebaseOptions, - name: string, - private firebase_: FirebaseNamespace) { + constructor( + options: FirebaseOptions, + name: string, + private firebase_: FirebaseNamespace + ) { this.name_ = name; this.options_ = deepCopy(options); this.INTERNAL = { - 'getUid': () => null, - 'getToken': () => PromiseImpl.resolve(null), - 'addAuthTokenListener': (callback: (token: string|null) => void) => { + getUid: () => null, + getToken: () => PromiseImpl.resolve(null), + addAuthTokenListener: (callback: (token: string | null) => void) => { tokenListeners.push(callback); // Make sure callback is called, asynchronously, in the absence of the auth module setTimeout(() => callback(null), 0); }, - 'removeAuthTokenListener': (callback) => { - tokenListeners = tokenListeners.filter(listener => listener !== callback); - }, + removeAuthTokenListener: callback => { + tokenListeners = tokenListeners.filter( + listener => listener !== callback + ); + } }; } @@ -273,21 +281,23 @@ class FirebaseAppImpl implements FirebaseApp { } delete(): Promise { - return new PromiseImpl((resolve) => { + return new PromiseImpl(resolve => { this.checkDestroyed_(); resolve(); }) .then(() => { this.firebase_.INTERNAL.removeApp(this.name_); let services: FirebaseService[] = []; - Object.keys(this.services_).forEach((serviceKey) => { - Object.keys(this.services_[serviceKey]).forEach((instanceKey) => { + Object.keys(this.services_).forEach(serviceKey => { + Object.keys(this.services_[serviceKey]).forEach(instanceKey => { services.push(this.services_[serviceKey][instanceKey]); }); }); - return PromiseImpl.all(services.map((service) => { - return service.INTERNAL!.delete(); - })); + return PromiseImpl.all( + services.map(service => { + return service.INTERNAL!.delete(); + }) + ); }) .then((): void => { this.isDeleted_ = true; @@ -309,7 +319,10 @@ class FirebaseAppImpl implements FirebaseApp { * The service name is passed to this already * @internal */ - _getService(name: string, instanceIdentifier: string = DEFAULT_ENTRY_NAME): FirebaseService { + _getService( + name: string, + instanceIdentifier: string = DEFAULT_ENTRY_NAME + ): FirebaseService { this.checkDestroyed_(); if (!this.services_[name]) { @@ -321,8 +334,15 @@ class FirebaseAppImpl implements FirebaseApp { * If a custom instance has been defined (i.e. not '[DEFAULT]') * then we will pass that instance on, otherwise we pass `null` */ - const instanceSpecifier = instanceIdentifier !== DEFAULT_ENTRY_NAME ? instanceIdentifier : undefined; - const service = this.firebase_.INTERNAL.factories[name](this, this.extendApp.bind(this), instanceSpecifier); + const instanceSpecifier = + instanceIdentifier !== DEFAULT_ENTRY_NAME + ? instanceIdentifier + : undefined; + const service = this.firebase_.INTERNAL.factories[name]( + this, + this.extendApp.bind(this), + instanceSpecifier + ); this.services_[name][instanceIdentifier] = service; } @@ -333,7 +353,7 @@ class FirebaseAppImpl implements FirebaseApp { * Callback function used to extend an App instance at the time * of service instance creation. */ - private extendApp(props: {[name: string]: any}): void { + private extendApp(props: { [name: string]: any }): void { // Copy the object onto the FirebaseAppImpl prototype deepExtend(this, props); @@ -360,17 +380,16 @@ class FirebaseAppImpl implements FirebaseApp { */ private checkDestroyed_(): void { if (this.isDeleted_) { - error('app-deleted', {'name': this.name_}); + error('app-deleted', { name: this.name_ }); } } -}; +} // Prevent dead-code elimination of these methods w/o invalid property // copying. -FirebaseAppImpl.prototype.name && - FirebaseAppImpl.prototype.options || +(FirebaseAppImpl.prototype.name && FirebaseAppImpl.prototype.options) || FirebaseAppImpl.prototype.delete || - console.log("dc"); + console.log('dc'); /** * Return a firebase namespace object. @@ -380,31 +399,31 @@ FirebaseAppImpl.prototype.name && * in unit tests. */ export function createFirebaseNamespace(): FirebaseNamespace { - let apps_: {[name: string]: FirebaseApp} = {}; - let factories: {[service: string]: FirebaseServiceFactory} = {}; - let appHooks: {[service: string]: AppHook} = {}; + let apps_: { [name: string]: FirebaseApp } = {}; + let factories: { [service: string]: FirebaseServiceFactory } = {}; + let appHooks: { [service: string]: AppHook } = {}; // A namespace is a plain JavaScript Object. let namespace = { // Hack to prevent Babel from modifying the object returned // as the firebase namespace. - '__esModule': true, - 'initializeApp': initializeApp, - 'app': app as any, - 'apps': null as any, - 'Promise': PromiseImpl, - 'SDK_VERSION': '${JSCORE_VERSION}', - 'INTERNAL': { - 'registerService': registerService, - 'createFirebaseNamespace': createFirebaseNamespace, - 'extendNamespace': extendNamespace, - 'createSubscribe': createSubscribe, - 'ErrorFactory': ErrorFactory, - 'removeApp': removeApp, - 'factories': factories, - 'useAsService': useAsService, - 'Promise': PromiseImpl, - 'deepExtend': deepExtend, + __esModule: true, + initializeApp: initializeApp, + app: app as any, + apps: null as any, + Promise: PromiseImpl, + SDK_VERSION: '${JSCORE_VERSION}', + INTERNAL: { + registerService: registerService, + createFirebaseNamespace: createFirebaseNamespace, + extendNamespace: extendNamespace, + createSubscribe: createSubscribe, + ErrorFactory: ErrorFactory, + removeApp: removeApp, + factories: factories, + useAsService: useAsService, + Promise: PromiseImpl, + deepExtend: deepExtend } }; @@ -441,7 +460,7 @@ export function createFirebaseNamespace(): FirebaseNamespace { function app(name?: string): FirebaseApp { name = name || DEFAULT_ENTRY_NAME; if (!contains(apps_, name)) { - error('no-app', {'name': name}); + error('no-app', { name: name }); } return apps_[name]; } @@ -456,14 +475,18 @@ export function createFirebaseNamespace(): FirebaseNamespace { name = DEFAULT_ENTRY_NAME; } else { if (typeof name !== 'string' || name === '') { - error('bad-app-name', {'name': name + ''}); + error('bad-app-name', { name: name + '' }); } } - if (contains(apps_, name) ) { - error('duplicate-app', {'name': name}); + if (contains(apps_, name)) { + error('duplicate-app', { name: name }); } - let app = new FirebaseAppImpl(options, name!, namespace as FirebaseNamespace); + let app = new FirebaseAppImpl( + options, + name!, + namespace as FirebaseNamespace + ); apps_[name!] = app; callAppHooks(app, 'create'); @@ -476,7 +499,7 @@ export function createFirebaseNamespace(): FirebaseNamespace { */ function getApps(): FirebaseApp[] { // Make a copy so caller cannot mutate the apps list. - return Object.keys(apps_).map((name) => apps_[name]); + return Object.keys(apps_).map(name => apps_[name]); } /* @@ -487,20 +510,20 @@ export function createFirebaseNamespace(): FirebaseNamespace { * TODO: Implement serviceProperties. */ function registerService( - name: string, - createService: FirebaseServiceFactory, - serviceProperties?: {[prop: string]: any}, - appHook?: AppHook, - allowMultipleInstances?: boolean): - FirebaseServiceNamespace { + name: string, + createService: FirebaseServiceFactory, + serviceProperties?: { [prop: string]: any }, + appHook?: AppHook, + allowMultipleInstances?: boolean + ): FirebaseServiceNamespace { // Cannot re-register a service that already exists if (factories[name]) { - error('duplicate-service', {'name': name}); + error('duplicate-service', { name: name }); } // Capture the service factory for later service instantiation factories[name] = createService; - + // Capture the appHook, if passed if (appHook) { appHooks[name] = appHook; @@ -513,10 +536,10 @@ export function createFirebaseNamespace(): FirebaseNamespace { // The Service namespace is an accessor function ... const serviceNamespace = (appArg: FirebaseApp = app()) => { - if (typeof(appArg as any)[name] !== 'function') { + if (typeof (appArg as any)[name] !== 'function') { // Invalid argument. // This happens in the following case: firebase.storage('gs:/') - error('invalid-app-argument', {'name': name}); + error('invalid-app-argument', { name: name }); } // Forward service instance lookup to the FirebaseApp. @@ -535,7 +558,7 @@ export function createFirebaseNamespace(): FirebaseNamespace { FirebaseAppImpl.prototype[name] = function(...args) { const serviceFxn = this._getService.bind(this, name); return serviceFxn.apply(this, allowMultipleInstances ? args : []); - } + }; return serviceNamespace; } @@ -545,12 +568,12 @@ export function createFirebaseNamespace(): FirebaseNamespace { * * firebase.INTERNAL.extendNamespace() */ - function extendNamespace(props: {[prop: string]: any}): void { + function extendNamespace(props: { [prop: string]: any }): void { deepExtend(namespace, props); } function callAppHooks(app: FirebaseApp, eventName: string) { - Object.keys(factories).forEach((serviceName) => { + Object.keys(factories).forEach(serviceName => { // Ignore virtual services let factoryName = useAsService(app, serviceName); if (factoryName === null) { @@ -579,28 +602,37 @@ export function createFirebaseNamespace(): FirebaseNamespace { return (namespace as any) as FirebaseNamespace; } -type AppError = 'no-app'|'bad-app-name'|'duplicate-app'|'app-deleted'| - 'duplicate-service'|'sa-not-supported'|'invalid-app-argument'; +type AppError = + | 'no-app' + | 'bad-app-name' + | 'duplicate-app' + | 'app-deleted' + | 'duplicate-service' + | 'sa-not-supported' + | 'invalid-app-argument'; -function error(code: AppError, args?: {[name: string]: any}) { +function error(code: AppError, args?: { [name: string]: any }) { throw appErrors.create(code, args); } // TypeScript does not support non-string indexes! // let errors: {[code: AppError: string} = { -let errors: {[code: string]: string} = { - 'no-app': 'No Firebase App \'{$name}\' has been created - ' + - 'call Firebase App.initializeApp()', - 'bad-app-name': 'Illegal App name: \'{$name}', - 'duplicate-app': 'Firebase App named \'{$name}\' already exists', - 'app-deleted': 'Firebase App named \'{$name}\' already deleted', - 'duplicate-service': 'Firebase service named \'{$name}\' already registered', - 'sa-not-supported': 'Initializing the Firebase SDK with a service ' + - 'account is only allowed in a Node.js environment. On client ' + - 'devices, you should instead initialize the SDK with an api key and ' + - 'auth domain', - 'invalid-app-argument': 'firebase.{$name}() takes either no argument or a ' + - 'Firebase App instance.' +let errors: { [code: string]: string } = { + 'no-app': + "No Firebase App '{$name}' has been created - " + + 'call Firebase App.initializeApp()', + 'bad-app-name': "Illegal App name: '{$name}", + 'duplicate-app': "Firebase App named '{$name}' already exists", + 'app-deleted': "Firebase App named '{$name}' already deleted", + 'duplicate-service': "Firebase service named '{$name}' already registered", + 'sa-not-supported': + 'Initializing the Firebase SDK with a service ' + + 'account is only allowed in a Node.js environment. On client ' + + 'devices, you should instead initialize the SDK with an api key and ' + + 'auth domain', + 'invalid-app-argument': + 'firebase.{$name}() takes either no argument or a ' + + 'Firebase App instance.' }; let appErrors = new ErrorFactory('app', 'Firebase', errors); diff --git a/src/app/subscribe.ts b/src/app/subscribe.ts index d44b7e9869f..c7d3b607fc4 100644 --- a/src/app/subscribe.ts +++ b/src/app/subscribe.ts @@ -64,9 +64,10 @@ export type Executor = (observer: Observer) => void; * as a proxy. * @param onNoObservers Callback when count of Observers goes to zero. */ -export function createSubscribe(executor: Executor, - onNoObservers?: Executor) -: Subscribe { +export function createSubscribe( + executor: Executor, + onNoObservers?: Executor +): Subscribe { let proxy = new ObserverProxy(executor, onNoObservers); return proxy.subscribe.bind(proxy); } @@ -75,10 +76,10 @@ export function createSubscribe(executor: Executor, * Implement fan-out for any number of Observers attached via a subscribe * function. */ -class ObserverProxy implements Observer{ - private observers: Array>|undefined = []; +class ObserverProxy implements Observer { + private observers: Array> | undefined = []; private unsubscribes: Unsubscribe[] = []; - private onNoObservers: Executor|undefined; + private onNoObservers: Executor | undefined; private observerCount = 0; // Micro-task scheduling by calling task.then(). private task = PromiseImpl.resolve(); @@ -99,7 +100,7 @@ class ObserverProxy implements Observer{ .then(() => { executor(this); }) - .catch((e) => { + .catch(e => { this.error(e); }); } @@ -130,15 +131,19 @@ class ObserverProxy implements Observer{ * - We require that no event is sent to a subscriber sychronously to their * call to subscribe(). */ - subscribe(nextOrObserver: PartialObserver | Function, - error?: ErrorFn, - complete?: CompleteFn) - : Unsubscribe { + subscribe( + nextOrObserver: PartialObserver | Function, + error?: ErrorFn, + complete?: CompleteFn + ): Unsubscribe { let observer: Observer; - if (nextOrObserver === undefined && error === undefined && - complete === undefined) { - throw new Error("Missing Observer."); + if ( + nextOrObserver === undefined && + error === undefined && + complete === undefined + ) { + throw new Error('Missing Observer.'); } // Assemble an Observer object when passed as callback functions. @@ -148,7 +153,7 @@ class ObserverProxy implements Observer{ observer = { next: (nextOrObserver as any) as NextFn, error: error, - complete: complete, + complete: complete } as Observer; } @@ -191,7 +196,7 @@ class ObserverProxy implements Observer{ // any unsubscribed Observer. private unsubscribeOne(i: number) { if (this.observers === undefined || this.observers[i] === undefined) { - return; + return; } delete this.observers[i]; @@ -228,7 +233,7 @@ class ObserverProxy implements Observer{ // Ignore exceptions raised in Observers or missing methods of an // Observer. // Log error to console. b/31404806 - if (typeof console !== "undefined" && console.error) { + if (typeof console !== 'undefined' && console.error) { console.error(e); } } @@ -271,7 +276,7 @@ export function async(fn: Function, onError?: ErrorFn): Function { * Return true if the object passed in implements any of the named methods. */ function implementsAnyMethods(obj: any, methods: string[]): boolean { - if (typeof(obj) !== 'object' || obj === null) { + if (typeof obj !== 'object' || obj === null) { return false; } @@ -286,4 +291,4 @@ function implementsAnyMethods(obj: any, methods: string[]): boolean { function noop(): void { // do nothing -} \ No newline at end of file +} diff --git a/src/database.ts b/src/database.ts index 24705b42143..1f394735a5d 100644 --- a/src/database.ts +++ b/src/database.ts @@ -15,15 +15,15 @@ */ import firebase from './app'; -import { FirebaseApp, FirebaseNamespace } from "./app/firebase_app"; -import { Database } from "./database/api/Database"; -import { Query } from "./database/api/Query"; -import { Reference } from "./database/api/Reference"; -import { enableLogging } from "./database/core/util/util"; -import { RepoManager } from "./database/core/RepoManager"; +import { FirebaseApp, FirebaseNamespace } from './app/firebase_app'; +import { Database } from './database/api/Database'; +import { Query } from './database/api/Query'; +import { Reference } from './database/api/Reference'; +import { enableLogging } from './database/core/util/util'; +import { RepoManager } from './database/core/RepoManager'; import * as INTERNAL from './database/api/internal'; import * as TEST_ACCESS from './database/api/test_access'; -import { isNodeSdk } from "./utils/environment"; +import { isNodeSdk } from './utils/environment'; export function registerDatabase(instance: FirebaseNamespace) { // Register the Database Service with the 'firebase' namespace. @@ -52,21 +52,21 @@ export function registerDatabase(instance: FirebaseNamespace) { */ declare module './app/firebase_app' { interface FirebaseApp { - database?(): Database + database?(): Database; } } declare module './app/firebase_app' { interface FirebaseNamespace { database?: { - (app?: FirebaseApp): Database, - Database, - enableLogging, - INTERNAL, - Query, - Reference, - ServerValue, - } + (app?: FirebaseApp): Database; + Database; + enableLogging; + INTERNAL; + Query; + Reference; + ServerValue; + }; } } diff --git a/src/database/api/DataSnapshot.ts b/src/database/api/DataSnapshot.ts index 9d5c4d5cf26..c189d729f31 100644 --- a/src/database/api/DataSnapshot.ts +++ b/src/database/api/DataSnapshot.ts @@ -33,10 +33,11 @@ export class DataSnapshot { * @param {!Reference} ref_ The ref of the location this snapshot came from. * @param {!Index} index_ The iteration order for this snapshot */ - constructor(private readonly node_: Node, - private readonly ref_: Reference, - private readonly index_: Index) { - } + constructor( + private readonly node_: Node, + private readonly ref_: Reference, + private readonly index_: Index + ) {} /** * Retrieves the snapshot contents as JSON. Returns null if the snapshot is @@ -91,7 +92,11 @@ export class DataSnapshot { const childPath = new Path(childPathString); const childRef = this.ref_.child(childPath); - return new DataSnapshot(this.node_.getChild(childPath), childRef, PRIORITY_INDEX); + return new DataSnapshot( + this.node_.getChild(childPath), + childRef, + PRIORITY_INDEX + ); } /** @@ -117,7 +122,7 @@ export class DataSnapshot { validateArgCount('DataSnapshot.getPriority', 0, 0, arguments.length); // typecast here because we never return deferred values or internal priorities (MAX_PRIORITY) - return (this.node_.getPriority().val() as string | number | null); + return this.node_.getPriority().val() as string | number | null; } /** @@ -132,13 +137,14 @@ export class DataSnapshot { validateArgCount('DataSnapshot.forEach', 1, 1, arguments.length); validateCallback('DataSnapshot.forEach', 1, action, false); - if (this.node_.isLeafNode()) - return false; + if (this.node_.isLeafNode()) return false; - const childrenNode = (this.node_ as ChildrenNode); + const childrenNode = this.node_ as ChildrenNode; // Sanitize the return value to a boolean. ChildrenNode.forEachChild has a weird return type... return !!childrenNode.forEachChild(this.index_, (key, node) => { - return action(new DataSnapshot(node, this.ref_.child(key), PRIORITY_INDEX)); + return action( + new DataSnapshot(node, this.ref_.child(key), PRIORITY_INDEX) + ); }); } @@ -149,10 +155,8 @@ export class DataSnapshot { hasChildren(): boolean { validateArgCount('DataSnapshot.hasChildren', 0, 0, arguments.length); - if (this.node_.isLeafNode()) - return false; - else - return !this.node_.isEmpty(); + if (this.node_.isLeafNode()) return false; + else return !this.node_.isEmpty(); } get key() { diff --git a/src/database/api/Database.ts b/src/database/api/Database.ts index 0e6a4a0dd74..fef2cdc804e 100644 --- a/src/database/api/Database.ts +++ b/src/database/api/Database.ts @@ -35,7 +35,7 @@ export class Database implements FirebaseService { private root_: Reference; static readonly ServerValue = { - 'TIMESTAMP': { + TIMESTAMP: { '.sv': 'timestamp' } }; @@ -46,7 +46,9 @@ export class Database implements FirebaseService { */ constructor(private repo_: Repo) { if (!(repo_ instanceof Repo)) { - fatal('Don\'t call new Database() directly - please use firebase.database().'); + fatal( + "Don't call new Database() directly - please use firebase.database()." + ); } /** @type {Reference} */ @@ -88,9 +90,15 @@ export class Database implements FirebaseService { const repoInfo = parsedURL.repoInfo; if (repoInfo.host !== ((this.repo_ as any).repoInfo_ as RepoInfo).host) { - fatal(apiName + ': Host name does not match the current database: ' + - '(found ' + repoInfo.host + ' but expected ' + - ((this.repo_ as any).repoInfo_ as RepoInfo).host + ')'); + fatal( + apiName + + ': Host name does not match the current database: ' + + '(found ' + + repoInfo.host + + ' but expected ' + + ((this.repo_ as any).repoInfo_ as RepoInfo).host + + ')' + ); } return this.ref(parsedURL.path.toString()); @@ -121,8 +129,7 @@ export class Database implements FirebaseService { export class DatabaseInternals { /** @param {!Database} database */ - constructor(public database: Database) { - } + constructor(public database: Database) {} /** @return {Promise} */ delete(): Promise { @@ -136,4 +143,3 @@ export class DatabaseInternals { return PromiseImpl.resolve(); } } - diff --git a/src/database/api/Query.ts b/src/database/api/Query.ts index a7d76ec6ab1..69b87f08c15 100644 --- a/src/database/api/Query.ts +++ b/src/database/api/Query.ts @@ -26,20 +26,29 @@ import { validateEventType, validatePathString, validateFirebaseDataArg, - validateKey, + validateKey } from '../core/util/validation'; -import { errorPrefix, validateArgCount, validateCallback, validateContextObject } from '../../utils/validation'; -import { ValueEventRegistration, ChildEventRegistration, EventRegistration } from '../core/view/EventRegistration'; +import { + errorPrefix, + validateArgCount, + validateCallback, + validateContextObject +} from '../../utils/validation'; +import { + ValueEventRegistration, + ChildEventRegistration, + EventRegistration +} from '../core/view/EventRegistration'; import { Deferred, attachDummyErrorHandler } from '../../utils/promise'; import { Repo } from '../core/Repo'; import { QueryParams } from '../core/view/QueryParams'; import { Reference } from './Reference'; import { DataSnapshot } from './DataSnapshot'; -let __referenceConstructor: new(repo: Repo, path: Path) => Query; +let __referenceConstructor: new (repo: Repo, path: Path) => Query; export interface SnapshotCallback { - (a: DataSnapshot, b?: string): any + (a: DataSnapshot, b?: string): any; } /** @@ -58,7 +67,12 @@ export class Query { return __referenceConstructor; } - constructor(public repo: Repo, public path: Path, private queryParams_: QueryParams, private orderByCalled_: boolean) {} + constructor( + public repo: Repo, + public path: Path, + private queryParams_: QueryParams, + private orderByCalled_: boolean + ) {} /** * Validates start/end values for queries. @@ -76,15 +90,17 @@ export class Query { } if (params.getIndex() === KEY_INDEX) { - const tooManyArgsError = 'Query: When ordering by key, you may only pass one argument to ' + + const tooManyArgsError = + 'Query: When ordering by key, you may only pass one argument to ' + 'startAt(), endAt(), or equalTo().'; - const wrongArgTypeError = 'Query: When ordering by key, the argument passed to startAt(), endAt(),' + + const wrongArgTypeError = + 'Query: When ordering by key, the argument passed to startAt(), endAt(),' + 'or equalTo() must be a string.'; if (params.hasStart()) { const startName = params.getIndexStartName(); if (startName != MIN_NAME) { throw new Error(tooManyArgsError); - } else if (typeof(startNode) !== 'string') { + } else if (typeof startNode !== 'string') { throw new Error(wrongArgTypeError); } } @@ -92,24 +108,34 @@ export class Query { const endName = params.getIndexEndName(); if (endName != MAX_NAME) { throw new Error(tooManyArgsError); - } else if (typeof(endNode) !== 'string') { + } else if (typeof endNode !== 'string') { throw new Error(wrongArgTypeError); } } - } - else if (params.getIndex() === PRIORITY_INDEX) { - if ((startNode != null && !isValidPriority(startNode)) || - (endNode != null && !isValidPriority(endNode))) { - throw new Error('Query: When ordering by priority, the first argument passed to startAt(), ' + - 'endAt(), or equalTo() must be a valid priority value (null, a number, or a string).'); + } else if (params.getIndex() === PRIORITY_INDEX) { + if ( + (startNode != null && !isValidPriority(startNode)) || + (endNode != null && !isValidPriority(endNode)) + ) { + throw new Error( + 'Query: When ordering by priority, the first argument passed to startAt(), ' + + 'endAt(), or equalTo() must be a valid priority value (null, a number, or a string).' + ); } } else { - assert((params.getIndex() instanceof PathIndex) || - (params.getIndex() === VALUE_INDEX), 'unknown index type.'); - if ((startNode != null && typeof startNode === 'object') || - (endNode != null && typeof endNode === 'object')) { - throw new Error('Query: First argument passed to startAt(), endAt(), or equalTo() cannot be ' + - 'an object.'); + assert( + params.getIndex() instanceof PathIndex || + params.getIndex() === VALUE_INDEX, + 'unknown index type.' + ); + if ( + (startNode != null && typeof startNode === 'object') || + (endNode != null && typeof endNode === 'object') + ) { + throw new Error( + 'Query: First argument passed to startAt(), endAt(), or equalTo() cannot be ' + + 'an object.' + ); } } } @@ -120,9 +146,14 @@ export class Query { * @private */ private static validateLimit_(params: QueryParams) { - if (params.hasStart() && params.hasEnd() && params.hasLimit() && !params.hasAnchoredLimit()) { + if ( + params.hasStart() && + params.hasEnd() && + params.hasLimit() && + !params.hasAnchoredLimit() + ) { throw new Error( - 'Query: Can\'t combine startAt(), endAt(), and limit(). Use limitToFirst() or limitToLast() instead.' + "Query: Can't combine startAt(), endAt(), and limit(). Use limitToFirst() or limitToLast() instead." ); } } @@ -134,7 +165,7 @@ export class Query { */ private validateNoPreviousOrderByCall_(fnName: string) { if (this.orderByCalled_ === true) { - throw new Error(fnName + ': You can\'t combine multiple orderBy calls.'); + throw new Error(fnName + ": You can't combine multiple orderBy calls."); } } @@ -153,7 +184,7 @@ export class Query { // This is a slight hack. We cannot goog.require('fb.api.Firebase'), since Firebase requires fb.api.Query. // However, we will always export 'Firebase' to the global namespace, so it's guaranteed to exist by the time this // method gets called. - return (new Query.__referenceConstructor(this.repo, this.path) as Reference); + return new Query.__referenceConstructor(this.repo, this.path) as Reference; } /** @@ -163,13 +194,21 @@ export class Query { * @param {Object=} context * @return {!function(DataSnapshot, string=)} */ - on(eventType: string, callback: SnapshotCallback, - cancelCallbackOrContext?: ((a: Error) => any) | Object, context?: Object): SnapshotCallback { + on( + eventType: string, + callback: SnapshotCallback, + cancelCallbackOrContext?: ((a: Error) => any) | Object, + context?: Object + ): SnapshotCallback { validateArgCount('Query.on', 2, 4, arguments.length); validateEventType('Query.on', 1, eventType, false); validateCallback('Query.on', 2, callback, false); - const ret = Query.getCancelAndContextArgs_('Query.on', cancelCallbackOrContext, context); + const ret = Query.getCancelAndContextArgs_( + 'Query.on', + cancelCallbackOrContext, + context + ); if (eventType === 'value') { this.onValueEvent(callback, ret.cancel, ret.context); @@ -187,8 +226,16 @@ export class Query { * @param {?Object} context * @protected */ - protected onValueEvent(callback: (a: DataSnapshot) => void, cancelCallback: ((a: Error) => void) | null, context: Object | null) { - const container = new ValueEventRegistration(callback, cancelCallback || null, context || null); + protected onValueEvent( + callback: (a: DataSnapshot) => void, + cancelCallback: ((a: Error) => void) | null, + context: Object | null + ) { + const container = new ValueEventRegistration( + callback, + cancelCallback || null, + context || null + ); this.repo.addEventCallbackForQuery(this, container); } @@ -198,9 +245,16 @@ export class Query { * @param {?Object} context * @protected */ - onChildEvent(callbacks: { [k: string]: SnapshotCallback }, - cancelCallback: ((a: Error) => any) | null, context: Object | null) { - const container = new ChildEventRegistration(callbacks, cancelCallback, context); + onChildEvent( + callbacks: { [k: string]: SnapshotCallback }, + cancelCallback: ((a: Error) => any) | null, + context: Object | null + ) { + const container = new ChildEventRegistration( + callbacks, + cancelCallback, + context + ); this.repo.addEventCallbackForQuery(this, container); } @@ -219,7 +273,11 @@ export class Query { let callbacks: { [k: string]: typeof callback } | null = null; if (eventType === 'value') { const valueCallback = callback || null; - container = new ValueEventRegistration(valueCallback, null, context || null); + container = new ValueEventRegistration( + valueCallback, + null, + context || null + ); } else if (eventType) { if (callback) { callbacks = {}; @@ -238,15 +296,21 @@ export class Query { * @param context * @return {!firebase.Promise} */ - once(eventType: string, - userCallback?: SnapshotCallback, - cancelOrContext?: ((a: Error) => void) | Object, - context?: Object): Promise { + once( + eventType: string, + userCallback?: SnapshotCallback, + cancelOrContext?: ((a: Error) => void) | Object, + context?: Object + ): Promise { validateArgCount('Query.once', 1, 4, arguments.length); validateEventType('Query.once', 1, eventType, false); validateCallback('Query.once', 2, userCallback, true); - const ret = Query.getCancelAndContextArgs_('Query.once', cancelOrContext, context); + const ret = Query.getCancelAndContextArgs_( + 'Query.once', + cancelOrContext, + context + ); // TODO: Implement this more efficiently (in particular, use 'get' wire protocol for 'value' event) // TODO: consider actually wiring the callbacks into the promise. We cannot do this without a breaking change @@ -270,13 +334,16 @@ export class Query { } }; - this.on(eventType, onceCallback, /*cancel=*/ (err) => { - this.off(eventType, onceCallback); + this.on( + eventType, + onceCallback, + /*cancel=*/ err => { + this.off(eventType, onceCallback); - if (ret.cancel) - ret.cancel.bind(ret.context)(err); - deferred.reject(err); - }); + if (ret.cancel) ret.cancel.bind(ret.context)(err); + deferred.reject(err); + } + ); return deferred.promise; } @@ -287,15 +354,28 @@ export class Query { */ limitToFirst(limit: number): Query { validateArgCount('Query.limitToFirst', 1, 1, arguments.length); - if (typeof limit !== 'number' || Math.floor(limit) !== limit || limit <= 0) { - throw new Error('Query.limitToFirst: First argument must be a positive integer.'); + if ( + typeof limit !== 'number' || + Math.floor(limit) !== limit || + limit <= 0 + ) { + throw new Error( + 'Query.limitToFirst: First argument must be a positive integer.' + ); } if (this.queryParams_.hasLimit()) { - throw new Error('Query.limitToFirst: Limit was already set (by another call to limit, ' + - 'limitToFirst, or limitToLast).'); + throw new Error( + 'Query.limitToFirst: Limit was already set (by another call to limit, ' + + 'limitToFirst, or limitToLast).' + ); } - return new Query(this.repo, this.path, this.queryParams_.limitToFirst(limit), this.orderByCalled_); + return new Query( + this.repo, + this.path, + this.queryParams_.limitToFirst(limit), + this.orderByCalled_ + ); } /** @@ -305,16 +385,28 @@ export class Query { */ limitToLast(limit: number): Query { validateArgCount('Query.limitToLast', 1, 1, arguments.length); - if (typeof limit !== 'number' || Math.floor(limit) !== limit || limit <= 0) { - throw new Error('Query.limitToLast: First argument must be a positive integer.'); + if ( + typeof limit !== 'number' || + Math.floor(limit) !== limit || + limit <= 0 + ) { + throw new Error( + 'Query.limitToLast: First argument must be a positive integer.' + ); } if (this.queryParams_.hasLimit()) { - throw new Error('Query.limitToLast: Limit was already set (by another call to limit, ' + - 'limitToFirst, or limitToLast).'); + throw new Error( + 'Query.limitToLast: Limit was already set (by another call to limit, ' + + 'limitToFirst, or limitToLast).' + ); } - return new Query(this.repo, this.path, this.queryParams_.limitToLast(limit), - this.orderByCalled_); + return new Query( + this.repo, + this.path, + this.queryParams_.limitToLast(limit), + this.orderByCalled_ + ); } /** @@ -325,23 +417,31 @@ export class Query { orderByChild(path: string): Query { validateArgCount('Query.orderByChild', 1, 1, arguments.length); if (path === '$key') { - throw new Error('Query.orderByChild: "$key" is invalid. Use Query.orderByKey() instead.'); + throw new Error( + 'Query.orderByChild: "$key" is invalid. Use Query.orderByKey() instead.' + ); } else if (path === '$priority') { - throw new Error('Query.orderByChild: "$priority" is invalid. Use Query.orderByPriority() instead.'); + throw new Error( + 'Query.orderByChild: "$priority" is invalid. Use Query.orderByPriority() instead.' + ); } else if (path === '$value') { - throw new Error('Query.orderByChild: "$value" is invalid. Use Query.orderByValue() instead.'); + throw new Error( + 'Query.orderByChild: "$value" is invalid. Use Query.orderByValue() instead.' + ); } validatePathString('Query.orderByChild', 1, path, false); this.validateNoPreviousOrderByCall_('Query.orderByChild'); const parsedPath = new Path(path); if (parsedPath.isEmpty()) { - throw new Error('Query.orderByChild: cannot pass in empty path. Use Query.orderByValue() instead.'); + throw new Error( + 'Query.orderByChild: cannot pass in empty path. Use Query.orderByValue() instead.' + ); } const index = new PathIndex(parsedPath); const newParams = this.queryParams_.orderBy(index); Query.validateQueryEndpoints_(newParams); - return new Query(this.repo, this.path, newParams, /*orderByCalled=*/true); + return new Query(this.repo, this.path, newParams, /*orderByCalled=*/ true); } /** @@ -353,7 +453,7 @@ export class Query { this.validateNoPreviousOrderByCall_('Query.orderByKey'); const newParams = this.queryParams_.orderBy(KEY_INDEX); Query.validateQueryEndpoints_(newParams); - return new Query(this.repo, this.path, newParams, /*orderByCalled=*/true); + return new Query(this.repo, this.path, newParams, /*orderByCalled=*/ true); } /** @@ -365,7 +465,7 @@ export class Query { this.validateNoPreviousOrderByCall_('Query.orderByPriority'); const newParams = this.queryParams_.orderBy(PRIORITY_INDEX); Query.validateQueryEndpoints_(newParams); - return new Query(this.repo, this.path, newParams, /*orderByCalled=*/true); + return new Query(this.repo, this.path, newParams, /*orderByCalled=*/ true); } /** @@ -377,7 +477,7 @@ export class Query { this.validateNoPreviousOrderByCall_('Query.orderByValue'); const newParams = this.queryParams_.orderBy(VALUE_INDEX); Query.validateQueryEndpoints_(newParams); - return new Query(this.repo, this.path, newParams, /*orderByCalled=*/true); + return new Query(this.repo, this.path, newParams, /*orderByCalled=*/ true); } /** @@ -385,7 +485,10 @@ export class Query { * @param {?string=} name * @return {!Query} */ - startAt(value: number | string | boolean | null = null, name?: string | null): Query { + startAt( + value: number | string | boolean | null = null, + name?: string | null + ): Query { validateArgCount('Query.startAt', 0, 2, arguments.length); validateFirebaseDataArg('Query.startAt', 1, value, this.path, true); validateKey('Query.startAt', 2, name, true); @@ -394,8 +497,10 @@ export class Query { Query.validateLimit_(newParams); Query.validateQueryEndpoints_(newParams); if (this.queryParams_.hasStart()) { - throw new Error('Query.startAt: Starting point was already set (by another call to startAt ' + - 'or equalTo).'); + throw new Error( + 'Query.startAt: Starting point was already set (by another call to startAt ' + + 'or equalTo).' + ); } // Calling with no params tells us to start at the beginning. @@ -411,7 +516,10 @@ export class Query { * @param {?string=} name * @return {!Query} */ - endAt(value: number | string | boolean | null = null, name?: string | null): Query { + endAt( + value: number | string | boolean | null = null, + name?: string | null + ): Query { validateArgCount('Query.endAt', 0, 2, arguments.length); validateFirebaseDataArg('Query.endAt', 1, value, this.path, true); validateKey('Query.endAt', 2, name, true); @@ -420,8 +528,10 @@ export class Query { Query.validateLimit_(newParams); Query.validateQueryEndpoints_(newParams); if (this.queryParams_.hasEnd()) { - throw new Error('Query.endAt: Ending point was already set (by another call to endAt or ' + - 'equalTo).'); + throw new Error( + 'Query.endAt: Ending point was already set (by another call to endAt or ' + + 'equalTo).' + ); } return new Query(this.repo, this.path, newParams, this.orderByCalled_); @@ -439,12 +549,16 @@ export class Query { validateFirebaseDataArg('Query.equalTo', 1, value, this.path, false); validateKey('Query.equalTo', 2, name, true); if (this.queryParams_.hasStart()) { - throw new Error('Query.equalTo: Starting point was already set (by another call to startAt or ' + - 'equalTo).'); + throw new Error( + 'Query.equalTo: Starting point was already set (by another call to startAt or ' + + 'equalTo).' + ); } if (this.queryParams_.hasEnd()) { - throw new Error('Query.equalTo: Ending point was already set (by another call to endAt or ' + - 'equalTo).'); + throw new Error( + 'Query.equalTo: Ending point was already set (by another call to endAt or ' + + 'equalTo).' + ); } return this.startAt(value, name).endAt(value, name); } @@ -480,7 +594,7 @@ export class Query { queryIdentifier(): string { const obj = this.queryObject(); const id = ObjectToUniqueKey(obj); - return (id === '{}') ? 'default' : id; + return id === '{}' ? 'default' : id; } /** @@ -491,15 +605,17 @@ export class Query { isEqual(other: Query): boolean { validateArgCount('Query.isEqual', 1, 1, arguments.length); if (!(other instanceof Query)) { - const error = 'Query.isEqual failed: First argument must be an instance of firebase.database.Query.'; + const error = + 'Query.isEqual failed: First argument must be an instance of firebase.database.Query.'; throw new Error(error); } - const sameRepo = (this.repo === other.repo); + const sameRepo = this.repo === other.repo; const samePath = this.path.equals(other.path); - const sameQueryIdentifier = (this.queryIdentifier() === other.queryIdentifier()); + const sameQueryIdentifier = + this.queryIdentifier() === other.queryIdentifier(); - return (sameRepo && samePath && sameQueryIdentifier); + return sameRepo && samePath && sameQueryIdentifier; } /** @@ -510,23 +626,33 @@ export class Query { * @return {{cancel: ?function(Error), context: ?Object}} * @private */ - private static getCancelAndContextArgs_(fnName: string, cancelOrContext?: ((a: Error) => void) | Object, - context?: Object): { cancel: ((a: Error) => void) | null, context: Object | null } { - const ret: { cancel: ((a: Error) => void) | null, context: Object | null } = {cancel: null, context: null}; + private static getCancelAndContextArgs_( + fnName: string, + cancelOrContext?: ((a: Error) => void) | Object, + context?: Object + ): { cancel: ((a: Error) => void) | null; context: Object | null } { + const ret: { + cancel: ((a: Error) => void) | null; + context: Object | null; + } = { cancel: null, context: null }; if (cancelOrContext && context) { - ret.cancel = (cancelOrContext as (a: Error) => void); + ret.cancel = cancelOrContext as (a: Error) => void; validateCallback(fnName, 3, ret.cancel, true); ret.context = context; validateContextObject(fnName, 4, ret.context, true); - } else if (cancelOrContext) { // we have either a cancel callback or a context. - if (typeof cancelOrContext === 'object' && cancelOrContext !== null) { // it's a context! + } else if (cancelOrContext) { + // we have either a cancel callback or a context. + if (typeof cancelOrContext === 'object' && cancelOrContext !== null) { + // it's a context! ret.context = cancelOrContext; } else if (typeof cancelOrContext === 'function') { ret.cancel = cancelOrContext; } else { - throw new Error(errorPrefix(fnName, 3, true) + - ' must either be a cancel callback or a context object.'); + throw new Error( + errorPrefix(fnName, 3, true) + + ' must either be a cancel callback or a context object.' + ); } } return ret; diff --git a/src/database/api/Reference.ts b/src/database/api/Reference.ts index f86c4c8ac34..1cf757cb437 100644 --- a/src/database/api/Reference.ts +++ b/src/database/api/Reference.ts @@ -29,19 +29,20 @@ import { validateBoolean, validatePriority, validateFirebaseDataArg, - validateWritablePath, + validateWritablePath } from '../core/util/validation'; +import { validateArgCount, validateCallback } from '../../utils/validation'; import { - validateArgCount, - validateCallback, -} from '../../utils/validation'; -import { Deferred, attachDummyErrorHandler, PromiseImpl } from '../../utils/promise'; + Deferred, + attachDummyErrorHandler, + PromiseImpl +} from '../../utils/promise'; import { SyncPoint } from '../core/SyncPoint'; import { Database } from './Database'; import { DataSnapshot } from './DataSnapshot'; export interface ReferenceConstructor { - new(repo: Repo, path: Path): Reference; + new (repo: Repo, path: Path): Reference; } export class Reference extends Query { @@ -61,7 +62,9 @@ export class Reference extends Query { */ constructor(repo: Repo, path: Path) { if (!(repo instanceof Repo)) { - throw new Error('new Reference() no longer supported - use app.database().'); + throw new Error( + 'new Reference() no longer supported - use app.database().' + ); } // call Query's constructor, passing in the repo and path. @@ -72,10 +75,8 @@ export class Reference extends Query { getKey(): string | null { validateArgCount('Reference.key', 0, 0, arguments.length); - if (this.path.isEmpty()) - return null; - else - return this.path.getBack(); + if (this.path.isEmpty()) return null; + else return this.path.getBack(); } /** @@ -89,8 +90,7 @@ export class Reference extends Query { } else if (!(pathString instanceof Path)) { if (this.path.getFront() === null) validateRootPathString('Reference.child', 1, pathString, false); - else - validatePathString('Reference.child', 1, pathString, false); + else validatePathString('Reference.child', 1, pathString, false); } return new Reference(this.repo, this.path.child(pathString)); @@ -108,7 +108,7 @@ export class Reference extends Query { getRoot(): Reference { validateArgCount('Reference.root', 0, 0, arguments.length); - let ref = (this as any); + let ref = this as any; while (ref.getParent() !== null) { ref = ref.getParent(); } @@ -132,7 +132,12 @@ export class Reference extends Query { validateCallback('Reference.set', 2, onComplete, true); const deferred = new Deferred(); - this.repo.setWithPriority(this.path, newVal, /*priority=*/ null, deferred.wrapCallback(onComplete)); + this.repo.setWithPriority( + this.path, + newVal, + /*priority=*/ null, + deferred.wrapCallback(onComplete) + ); return deferred.promise; } @@ -141,7 +146,10 @@ export class Reference extends Query { * @param {function(?Error)=} onComplete * @return {!Promise} */ - update(objectToMerge: Object, onComplete?: (a: Error | null) => void): Promise { + update( + objectToMerge: Object, + onComplete?: (a: Error | null) => void + ): Promise { validateArgCount('Reference.update', 1, 2, arguments.length); validateWritablePath('Reference.update', this.path); @@ -151,16 +159,27 @@ export class Reference extends Query { newObjectToMerge['' + i] = objectToMerge[i]; } objectToMerge = newObjectToMerge; - warn('Passing an Array to Firebase.update() is deprecated. ' + - 'Use set() if you want to overwrite the existing data, or ' + - 'an Object with integer keys if you really do want to ' + - 'only update some of the children.' + warn( + 'Passing an Array to Firebase.update() is deprecated. ' + + 'Use set() if you want to overwrite the existing data, or ' + + 'an Object with integer keys if you really do want to ' + + 'only update some of the children.' ); } - validateFirebaseMergeDataArg('Reference.update', 1, objectToMerge, this.path, false); + validateFirebaseMergeDataArg( + 'Reference.update', + 1, + objectToMerge, + this.path, + false + ); validateCallback('Reference.update', 2, onComplete, true); const deferred = new Deferred(); - this.repo.update(this.path, objectToMerge, deferred.wrapCallback(onComplete)); + this.repo.update( + this.path, + objectToMerge, + deferred.wrapCallback(onComplete) + ); return deferred.promise; } @@ -170,19 +189,35 @@ export class Reference extends Query { * @param {function(?Error)=} onComplete * @return {!Promise} */ - setWithPriority(newVal: any, newPriority: string | number | null, - onComplete?: (a: Error | null) => void): Promise { + setWithPriority( + newVal: any, + newPriority: string | number | null, + onComplete?: (a: Error | null) => void + ): Promise { validateArgCount('Reference.setWithPriority', 2, 3, arguments.length); validateWritablePath('Reference.setWithPriority', this.path); - validateFirebaseDataArg('Reference.setWithPriority', 1, newVal, this.path, false); + validateFirebaseDataArg( + 'Reference.setWithPriority', + 1, + newVal, + this.path, + false + ); validatePriority('Reference.setWithPriority', 2, newPriority, false); validateCallback('Reference.setWithPriority', 3, onComplete, true); if (this.getKey() === '.length' || this.getKey() === '.keys') - throw 'Reference.setWithPriority failed: ' + this.getKey() + ' is a read-only object.'; + throw 'Reference.setWithPriority failed: ' + + this.getKey() + + ' is a read-only object.'; const deferred = new Deferred(); - this.repo.setWithPriority(this.path, newVal, newPriority, deferred.wrapCallback(onComplete)); + this.repo.setWithPriority( + this.path, + newVal, + newPriority, + deferred.wrapCallback(onComplete) + ); return deferred.promise; } @@ -204,9 +239,11 @@ export class Reference extends Query { * @param {boolean=} applyLocally * @return {!Promise} */ - transaction(transactionUpdate: (a: any) => any, - onComplete?: (a: Error | null, b: boolean, c: DataSnapshot | null) => void, - applyLocally?: boolean): Promise { + transaction( + transactionUpdate: (a: any) => any, + onComplete?: (a: Error | null, b: boolean, c: DataSnapshot | null) => void, + applyLocally?: boolean + ): Promise { validateArgCount('Reference.transaction', 1, 3, arguments.length); validateWritablePath('Reference.transaction', this.path); validateCallback('Reference.transaction', 1, transactionUpdate, false); @@ -216,17 +253,22 @@ export class Reference extends Query { validateBoolean('Reference.transaction', 3, applyLocally, true); if (this.getKey() === '.length' || this.getKey() === '.keys') - throw 'Reference.transaction failed: ' + this.getKey() + ' is a read-only object.'; + throw 'Reference.transaction failed: ' + + this.getKey() + + ' is a read-only object.'; - if (applyLocally === undefined) - applyLocally = true; + if (applyLocally === undefined) applyLocally = true; const deferred = new Deferred(); if (typeof onComplete === 'function') { attachDummyErrorHandler(deferred.promise); } - const promiseComplete = function (error: Error, committed: boolean, snapshot: DataSnapshot) { + const promiseComplete = function( + error: Error, + committed: boolean, + snapshot: DataSnapshot + ) { if (error) { deferred.reject(error); } else { @@ -236,7 +278,12 @@ export class Reference extends Query { onComplete(error, committed, snapshot); } }; - this.repo.startTransaction(this.path, transactionUpdate, promiseComplete, applyLocally); + this.repo.startTransaction( + this.path, + transactionUpdate, + promiseComplete, + applyLocally + ); return deferred.promise; } @@ -246,14 +293,22 @@ export class Reference extends Query { * @param {function(?Error)=} onComplete * @return {!Promise} */ - setPriority(priority: string | number | null, onComplete?: (a: Error | null) => void): Promise { + setPriority( + priority: string | number | null, + onComplete?: (a: Error | null) => void + ): Promise { validateArgCount('Reference.setPriority', 1, 2, arguments.length); validateWritablePath('Reference.setPriority', this.path); validatePriority('Reference.setPriority', 1, priority, false); validateCallback('Reference.setPriority', 2, onComplete, true); const deferred = new Deferred(); - this.repo.setWithPriority(this.path.child('.priority'), priority, null, deferred.wrapCallback(onComplete)); + this.repo.setWithPriority( + this.path.child('.priority'), + priority, + null, + deferred.wrapCallback(onComplete) + ); return deferred.promise; } @@ -328,4 +383,4 @@ export class Reference extends Query { * dependency issues */ Query.__referenceConstructor = Reference; -SyncPoint.__referenceConstructor = Reference; \ No newline at end of file +SyncPoint.__referenceConstructor = Reference; diff --git a/src/database/api/TransactionResult.ts b/src/database/api/TransactionResult.ts index b5d5608c555..5ad71a27102 100644 --- a/src/database/api/TransactionResult.ts +++ b/src/database/api/TransactionResult.ts @@ -25,9 +25,7 @@ export class TransactionResult { * @param {boolean} committed * @param {DataSnapshot} snapshot */ - constructor(public committed: boolean, public snapshot: DataSnapshot) { - - } + constructor(public committed: boolean, public snapshot: DataSnapshot) {} // Do not create public documentation. This is intended to make JSON serialization work but is otherwise unnecessary // for end-users @@ -35,5 +33,4 @@ export class TransactionResult { validateArgCount('TransactionResult.toJSON', 0, 1, arguments.length); return { committed: this.committed, snapshot: this.snapshot.toJSON() }; } - -} \ No newline at end of file +} diff --git a/src/database/api/internal.ts b/src/database/api/internal.ts index 90041e70d0c..9909b9074b8 100644 --- a/src/database/api/internal.ts +++ b/src/database/api/internal.ts @@ -14,8 +14,8 @@ * limitations under the License. */ -import { WebSocketConnection } from "../realtime/WebSocketConnection"; -import { BrowserPollConnection } from "../realtime/BrowserPollConnection"; +import { WebSocketConnection } from '../realtime/WebSocketConnection'; +import { BrowserPollConnection } from '../realtime/BrowserPollConnection'; import { Reference } from './Reference'; /** @@ -40,7 +40,10 @@ export const isWebSocketsAvailable = function(): boolean { return WebSocketConnection['isAvailable'](); }; -export const setSecurityDebugCallback = function(ref: Reference, callback: (a: Object) => void) { +export const setSecurityDebugCallback = function( + ref: Reference, + callback: (a: Object) => void +) { (ref.repo.persistentConnection_ as any).securityDebugCallback_ = callback; }; @@ -56,6 +59,9 @@ export const dataUpdateCount = function(ref: Reference): number { return ref.repo.dataUpdateCount; }; -export const interceptServerData = function(ref: Reference, callback: ((a: string, b: any) => void) | null) { +export const interceptServerData = function( + ref: Reference, + callback: ((a: string, b: any) => void) | null +) { return ref.repo.interceptServerData_(callback); }; diff --git a/src/database/api/onDisconnect.ts b/src/database/api/onDisconnect.ts index 4456513b989..a6568993b56 100644 --- a/src/database/api/onDisconnect.ts +++ b/src/database/api/onDisconnect.ts @@ -14,15 +14,12 @@ * limitations under the License. */ -import { - validateArgCount, - validateCallback -} from '../../utils/validation'; +import { validateArgCount, validateCallback } from '../../utils/validation'; import { validateWritablePath, validateFirebaseDataArg, validatePriority, - validateFirebaseMergeDataArg, + validateFirebaseMergeDataArg } from '../core/util/validation'; import { warn } from '../core/util/util'; import { Deferred } from '../../utils/promise'; @@ -37,9 +34,7 @@ export class OnDisconnect { * @param {!Repo} repo_ * @param {!Path} path_ */ - constructor(private repo_: Repo, - private path_: Path) { - } + constructor(private repo_: Repo, private path_: Path) {} /** * @param {function(?Error)=} onComplete @@ -49,7 +44,10 @@ export class OnDisconnect { validateArgCount('OnDisconnect.cancel', 0, 1, arguments.length); validateCallback('OnDisconnect.cancel', 1, onComplete, true); const deferred = new Deferred(); - this.repo_.onDisconnectCancel(this.path_, deferred.wrapCallback(onComplete)); + this.repo_.onDisconnectCancel( + this.path_, + deferred.wrapCallback(onComplete) + ); return deferred.promise; } @@ -62,7 +60,11 @@ export class OnDisconnect { validateWritablePath('OnDisconnect.remove', this.path_); validateCallback('OnDisconnect.remove', 1, onComplete, true); const deferred = new Deferred(); - this.repo_.onDisconnectSet(this.path_, null, deferred.wrapCallback(onComplete)); + this.repo_.onDisconnectSet( + this.path_, + null, + deferred.wrapCallback(onComplete) + ); return deferred.promise; } @@ -77,7 +79,11 @@ export class OnDisconnect { validateFirebaseDataArg('OnDisconnect.set', 1, value, this.path_, false); validateCallback('OnDisconnect.set', 2, onComplete, true); const deferred = new Deferred(); - this.repo_.onDisconnectSet(this.path_, value, deferred.wrapCallback(onComplete)); + this.repo_.onDisconnectSet( + this.path_, + value, + deferred.wrapCallback(onComplete) + ); return deferred.promise; } @@ -87,16 +93,30 @@ export class OnDisconnect { * @param {function(?Error)=} onComplete * @return {!firebase.Promise} */ - setWithPriority(value: any, priority: number | string | null, onComplete?: (a: Error | null) => void): Promise { + setWithPriority( + value: any, + priority: number | string | null, + onComplete?: (a: Error | null) => void + ): Promise { validateArgCount('OnDisconnect.setWithPriority', 2, 3, arguments.length); validateWritablePath('OnDisconnect.setWithPriority', this.path_); - validateFirebaseDataArg('OnDisconnect.setWithPriority', - 1, value, this.path_, false); + validateFirebaseDataArg( + 'OnDisconnect.setWithPriority', + 1, + value, + this.path_, + false + ); validatePriority('OnDisconnect.setWithPriority', 2, priority, false); validateCallback('OnDisconnect.setWithPriority', 3, onComplete, true); const deferred = new Deferred(); - this.repo_.onDisconnectSetWithPriority(this.path_, value, priority, deferred.wrapCallback(onComplete)); + this.repo_.onDisconnectSetWithPriority( + this.path_, + value, + priority, + deferred.wrapCallback(onComplete) + ); return deferred.promise; } @@ -105,7 +125,10 @@ export class OnDisconnect { * @param {function(?Error)=} onComplete * @return {!firebase.Promise} */ - update(objectToMerge: object, onComplete?: (a: Error | null) => void): Promise { + update( + objectToMerge: object, + onComplete?: (a: Error | null) => void + ): Promise { validateArgCount('OnDisconnect.update', 1, 2, arguments.length); validateWritablePath('OnDisconnect.update', this.path_); if (Array.isArray(objectToMerge)) { @@ -116,14 +139,23 @@ export class OnDisconnect { objectToMerge = newObjectToMerge; warn( 'Passing an Array to firebase.database.onDisconnect().update() is deprecated. Use set() if you want to overwrite the ' + - 'existing data, or an Object with integer keys if you really do want to only update some of the children.' + 'existing data, or an Object with integer keys if you really do want to only update some of the children.' ); } - validateFirebaseMergeDataArg('OnDisconnect.update', 1, objectToMerge, - this.path_, false); + validateFirebaseMergeDataArg( + 'OnDisconnect.update', + 1, + objectToMerge, + this.path_, + false + ); validateCallback('OnDisconnect.update', 2, onComplete, true); const deferred = new Deferred(); - this.repo_.onDisconnectUpdate(this.path_, objectToMerge, deferred.wrapCallback(onComplete)); + this.repo_.onDisconnectUpdate( + this.path_, + objectToMerge, + deferred.wrapCallback(onComplete) + ); return deferred.promise; } -} \ No newline at end of file +} diff --git a/src/database/api/test_access.ts b/src/database/api/test_access.ts index 1bc16cf43bc..a85d5a62cc8 100644 --- a/src/database/api/test_access.ts +++ b/src/database/api/test_access.ts @@ -14,10 +14,10 @@ * limitations under the License. */ -import { RepoInfo } from "../core/RepoInfo"; -import { PersistentConnection } from "../core/PersistentConnection"; -import { RepoManager } from "../core/RepoManager"; -import { Connection } from "../realtime/Connection"; +import { RepoInfo } from '../core/RepoInfo'; +import { PersistentConnection } from '../core/PersistentConnection'; +import { RepoManager } from '../core/RepoManager'; +import { Connection } from '../realtime/Connection'; import { Query } from './Query'; export const DataConnection = PersistentConnection; @@ -26,16 +26,22 @@ export const DataConnection = PersistentConnection; * @param {!string} pathString * @param {function(*)} onComplete */ -(PersistentConnection.prototype as any).simpleListen = function(pathString: string, onComplete: (a: any) => void) { - this.sendRequest('q', {'p': pathString}, onComplete); +(PersistentConnection.prototype as any).simpleListen = function( + pathString: string, + onComplete: (a: any) => void +) { + this.sendRequest('q', { p: pathString }, onComplete); }; /** * @param {*} data * @param {function(*)} onEcho */ -(PersistentConnection.prototype as any).echo = function(data: any, onEcho: (a: any) => void) { - this.sendRequest('echo', {'d': data}, onEcho); +(PersistentConnection.prototype as any).echo = function( + data: any, + onEcho: (a: any) => void +) { + this.sendRequest('echo', { d: data }, onEcho); }; // RealTimeConnection properties that we use in tests. @@ -47,7 +53,12 @@ export const RealTimeConnection = Connection; */ export const hijackHash = function(newHash: () => string) { const oldPut = PersistentConnection.prototype.put; - PersistentConnection.prototype.put = function(pathString, data, opt_onComplete, opt_hash) { + PersistentConnection.prototype.put = function( + pathString, + data, + opt_onComplete, + opt_hash + ) { if (opt_hash !== undefined) { opt_hash = newHash(); } @@ -55,7 +66,7 @@ export const hijackHash = function(newHash: () => string) { }; return function() { PersistentConnection.prototype.put = oldPut; - } + }; }; /** diff --git a/src/database/core/AuthTokenProvider.ts b/src/database/core/AuthTokenProvider.ts index 0cbfd6fb397..f1acc9445bd 100644 --- a/src/database/core/AuthTokenProvider.ts +++ b/src/database/core/AuthTokenProvider.ts @@ -24,28 +24,27 @@ export class AuthTokenProvider { /** * @param {!FirebaseApp} app_ */ - constructor(private app_: FirebaseApp) { - } + constructor(private app_: FirebaseApp) {} /** * @param {boolean} forceRefresh * @return {!Promise} */ getToken(forceRefresh: boolean): Promise { - return this.app_['INTERNAL']['getToken'](forceRefresh) - .then( - null, - // .catch - function (error) { - // TODO: Need to figure out all the cases this is raised and whether - // this makes sense. - if (error && error.code === 'auth/token-not-initialized') { - log('Got auth/token-not-initialized error. Treating as null token.'); - return null; - } else { - return Promise.reject(error); - } - }); + return this.app_['INTERNAL']['getToken'](forceRefresh).then( + null, + // .catch + function(error) { + // TODO: Need to figure out all the cases this is raised and whether + // this makes sense. + if (error && error.code === 'auth/token-not-initialized') { + log('Got auth/token-not-initialized error. Treating as null token.'); + return null; + } else { + return Promise.reject(error); + } + } + ); } addTokenChangeListener(listener: (token: string | null) => void) { @@ -59,19 +58,24 @@ export class AuthTokenProvider { } notifyForInvalidToken() { - let errorMessage = 'Provided authentication credentials for the app named "' + - this.app_.name + '" are invalid. This usually indicates your app was not ' + + let errorMessage = + 'Provided authentication credentials for the app named "' + + this.app_.name + + '" are invalid. This usually indicates your app was not ' + 'initialized correctly. '; if ('credential' in this.app_.options) { - errorMessage += 'Make sure the "credential" property provided to initializeApp() ' + + errorMessage += + 'Make sure the "credential" property provided to initializeApp() ' + 'is authorized to access the specified "databaseURL" and is from the correct ' + 'project.'; } else if ('serviceAccount' in this.app_.options) { - errorMessage += 'Make sure the "serviceAccount" property provided to initializeApp() ' + + errorMessage += + 'Make sure the "serviceAccount" property provided to initializeApp() ' + 'is authorized to access the specified "databaseURL" and is from the correct ' + 'project.'; } else { - errorMessage += 'Make sure the "apiKey" and "databaseURL" properties provided to ' + + errorMessage += + 'Make sure the "apiKey" and "databaseURL" properties provided to ' + 'initializeApp() match the values provided for your app at ' + 'https://console.firebase.google.com/.'; } diff --git a/src/database/core/CompoundWrite.ts b/src/database/core/CompoundWrite.ts index adad6a10574..3cb097cebf7 100644 --- a/src/database/core/CompoundWrite.ts +++ b/src/database/core/CompoundWrite.ts @@ -14,12 +14,12 @@ * limitations under the License. */ -import { ImmutableTree } from "./util/ImmutableTree"; -import { Path } from "./util/Path"; -import { forEach } from "../../utils/obj"; -import { Node, NamedNode } from "./snap/Node"; -import { PRIORITY_INDEX } from "./snap/indexes/PriorityIndex"; -import { assert } from "../../utils/assert"; +import { ImmutableTree } from './util/ImmutableTree'; +import { Path } from './util/Path'; +import { forEach } from '../../utils/obj'; +import { Node, NamedNode } from './snap/Node'; +import { PRIORITY_INDEX } from './snap/indexes/PriorityIndex'; +import { assert } from '../../utils/assert'; import { ChildrenNode } from './snap/ChildrenNode'; /** @@ -32,7 +32,7 @@ import { ChildrenNode } from './snap/ChildrenNode'; * @param {!ImmutableTree.} writeTree */ export class CompoundWrite { - constructor(private writeTree_: ImmutableTree) {}; + constructor(private writeTree_: ImmutableTree) {} /** * @type {!CompoundWrite} */ @@ -112,7 +112,9 @@ export class CompoundWrite { getCompleteNode(path: Path): Node | null { const rootmost = this.writeTree_.findRootMostValueAndPath(path); if (rootmost != null) { - return this.writeTree_.get(rootmost.path).getChild(Path.relativePath(rootmost.path, path)); + return this.writeTree_ + .get(rootmost.path) + .getChild(Path.relativePath(rootmost.path, path)); } else { return null; } @@ -129,7 +131,10 @@ export class CompoundWrite { if (node != null) { // If it's a leaf node, it has no children; so nothing to do. if (!node.isLeafNode()) { - (node as ChildrenNode).forEachChild(PRIORITY_INDEX, function(childName, childNode) { + (node as ChildrenNode).forEachChild(PRIORITY_INDEX, function( + childName, + childNode + ) { children.push(new NamedNode(childName, childNode)); }); } @@ -185,7 +190,11 @@ export class CompoundWrite { * @return {!Node} * @private */ - private static applySubtreeWrite_ = function(relativePath: Path, writeTree: ImmutableTree, node: Node): Node { + private static applySubtreeWrite_ = function( + relativePath: Path, + writeTree: ImmutableTree, + node: Node + ): Node { if (writeTree.value != null) { // Since there a write is always a leaf, we're done here return node.updateChild(relativePath, writeTree.value); @@ -195,10 +204,17 @@ export class CompoundWrite { if (childKey === '.priority') { // Apply priorities at the end so we don't update priorities for either empty nodes or forget // to apply priorities to empty nodes that are later filled - assert(childTree.value !== null, 'Priority writes must always be leaf nodes'); + assert( + childTree.value !== null, + 'Priority writes must always be leaf nodes' + ); priorityWrite = childTree.value; } else { - node = CompoundWrite.applySubtreeWrite_(relativePath.child(childKey), childTree, node); + node = CompoundWrite.applySubtreeWrite_( + relativePath.child(childKey), + childTree, + node + ); } }); // If there was a priority write, we only apply it if the node is not empty @@ -207,6 +223,5 @@ export class CompoundWrite { } return node; } - } + }; } - diff --git a/src/database/core/PersistentConnection.ts b/src/database/core/PersistentConnection.ts index 3a8e0cfd926..5590f3da82d 100644 --- a/src/database/core/PersistentConnection.ts +++ b/src/database/core/PersistentConnection.ts @@ -36,7 +36,7 @@ import { RepoInfo } from './RepoInfo'; import { Query } from '../api/Query'; const RECONNECT_MIN_DELAY = 1000; -const RECONNECT_MAX_DELAY_DEFAULT = 60 * 5 * 1000; // 5 minutes in milliseconds (Case: 1858) +const RECONNECT_MAX_DELAY_DEFAULT = 60 * 5 * 1000; // 5 minutes in milliseconds (Case: 1858) const RECONNECT_MAX_DELAY_FOR_ADMINS = 30 * 1000; // 30 seconds for admin clients (likely to be a backend server) const RECONNECT_DELAY_MULTIPLIER = 1.3; const RECONNECT_DELAY_RESET_TIMEOUT = 30000; // Reset delay back to MIN_DELAY after being connected for 30sec. @@ -105,7 +105,10 @@ export class PersistentConnection extends ServerActions { * sendRequest(Object), * close() * }} */ - private realtime_: { sendRequest(a: Object): void, close(): void } | null = null; + private realtime_: { + sendRequest(a: Object): void; + close(): void; + } | null = null; /** @private {string|null} */ private authToken_: string | null = null; @@ -116,7 +119,6 @@ export class PersistentConnection extends ServerActions { private lastConnectionAttemptTime_: number | null = null; private lastConnectionEstablishedTime_: number | null = null; - /** * @private */ @@ -138,16 +140,25 @@ export class PersistentConnection extends ServerActions { * @param authTokenProvider_ * @param authOverride_ */ - constructor(private repoInfo_: RepoInfo, - private onDataUpdate_: (a: string, b: any, c: boolean, d: number | null) => void, - private onConnectStatus_: (a: boolean) => void, - private onServerInfoUpdate_: (a: any) => void, - private authTokenProvider_: AuthTokenProvider, - private authOverride_?: Object | null) { + constructor( + private repoInfo_: RepoInfo, + private onDataUpdate_: ( + a: string, + b: any, + c: boolean, + d: number | null + ) => void, + private onConnectStatus_: (a: boolean) => void, + private onServerInfoUpdate_: (a: any) => void, + private authTokenProvider_: AuthTokenProvider, + private authOverride_?: Object | null + ) { super(); if (authOverride_ && !isNodeSdk()) { - throw new Error('Auth override specified in options, but not supported on non Node.js platforms'); + throw new Error( + 'Auth override specified in options, but not supported on non Node.js platforms' + ); } this.scheduleConnect_(0); @@ -164,12 +175,19 @@ export class PersistentConnection extends ServerActions { * @param {function(*)=} onResponse * @protected */ - protected sendRequest(action: string, body: any, onResponse?: (a: any) => void) { + protected sendRequest( + action: string, + body: any, + onResponse?: (a: any) => void + ) { const curReqNum = ++this.requestNumber_; - const msg = {'r': curReqNum, 'a': action, 'b': body}; + const msg = { r: curReqNum, a: action, b: body }; this.log_(stringify(msg)); - assert(this.connected_, 'sendRequest call when we\'re not connected not allowed.'); + assert( + this.connected_, + "sendRequest call when we're not connected not allowed." + ); this.realtime_.sendRequest(msg); if (onResponse) { this.requestCBHash_[curReqNum] = onResponse; @@ -179,14 +197,25 @@ export class PersistentConnection extends ServerActions { /** * @inheritDoc */ - listen(query: Query, currentHashFn: () => string, tag: number | null, onComplete: (a: string, b: any) => void) { + listen( + query: Query, + currentHashFn: () => string, + tag: number | null, + onComplete: (a: string, b: any) => void + ) { const queryId = query.queryIdentifier(); const pathString = query.path.toString(); this.log_('Listen called for ' + pathString + ' ' + queryId); this.listens_[pathString] = this.listens_[pathString] || {}; - assert(query.getQueryParams().isDefault() || !query.getQueryParams().loadsAllData(), - 'listen() called for non-default but complete query'); - assert(!this.listens_[pathString][queryId], 'listen() called twice for same path/queryId.'); + assert( + query.getQueryParams().isDefault() || + !query.getQueryParams().loadsAllData(), + 'listen() called for non-default but complete query' + ); + assert( + !this.listens_[pathString][queryId], + 'listen() called twice for same path/queryId.' + ); const listenSpec: ListenSpec = { onComplete: onComplete, hashFn: currentHashFn, @@ -212,7 +241,7 @@ export class PersistentConnection extends ServerActions { const pathString = query.path.toString(); const queryId = query.queryIdentifier(); this.log_('Listen on ' + pathString + ' for ' + queryId); - const req: { [k: string]: any } = {/*path*/ 'p': pathString}; + const req: { [k: string]: any } = { /*path*/ p: pathString }; const action = 'q'; @@ -222,7 +251,7 @@ export class PersistentConnection extends ServerActions { req['t'] = listenSpec.tag; } - req[/*hash*/'h'] = listenSpec.hashFn(); + req[/*hash*/ 'h'] = listenSpec.hashFn(); this.sendRequest(action, req, (message: { [k: string]: any }) => { const payload: any = message[/*data*/ 'd']; @@ -231,7 +260,8 @@ export class PersistentConnection extends ServerActions { // print warnings in any case... PersistentConnection.warnOnListenWarnings_(payload, query); - const currentListenSpec = this.listens_[pathString] && this.listens_[pathString][queryId]; + const currentListenSpec = + this.listens_[pathString] && this.listens_[pathString][queryId]; // only trigger actions if the listen hasn't been removed and readded if (currentListenSpec === listenSpec) { this.log_('listen response', message); @@ -256,10 +286,16 @@ export class PersistentConnection extends ServerActions { if (payload && typeof payload === 'object' && contains(payload, 'w')) { const warnings = safeGet(payload, 'w'); if (Array.isArray(warnings) && ~warnings.indexOf('no_index')) { - const indexSpec = '".indexOn": "' + query.getQueryParams().getIndex().toString() + '"'; + const indexSpec = + '".indexOn": "' + query.getQueryParams().getIndex().toString() + '"'; const indexPath = query.path.toString(); - warn('Using an unspecified index. Consider adding ' + indexSpec + ' at ' + indexPath + - ' to your security rules for better performance'); + warn( + 'Using an unspecified index. Consider adding ' + + indexSpec + + ' at ' + + indexPath + + ' to your security rules for better performance' + ); } } } @@ -276,7 +312,7 @@ export class PersistentConnection extends ServerActions { //If we're connected we want to let the server know to unauthenticate us. If we're not connected, simply delete //the credential so we dont become authenticated next time we connect. if (this.connected_) { - this.sendRequest('unauth', {}, () => { }); + this.sendRequest('unauth', {}, () => {}); } } @@ -292,7 +328,9 @@ export class PersistentConnection extends ServerActions { // Additionally, we don't bother resetting the max delay back to the default if auth fails / expires. const isFirebaseSecret = credential && credential.length === 40; if (isFirebaseSecret || isAdmin(credential)) { - this.log_('Admin auth credential detected. Reducing max reconnect time.'); + this.log_( + 'Admin auth credential detected. Reducing max reconnect time.' + ); this.maxReconnectDelay_ = RECONNECT_MAX_DELAY_FOR_ADMINS; } } @@ -305,7 +343,7 @@ export class PersistentConnection extends ServerActions { if (this.connected_ && this.authToken_) { const token = this.authToken_; const authMethod = isValidFormat(token) ? 'auth' : 'gauth'; - const requestData: { [k: string]: any } = {'cred': token}; + const requestData: { [k: string]: any } = { cred: token }; if (this.authOverride_ === null) { requestData['noauth'] = true; } else if (typeof this.authOverride_ === 'object') { @@ -336,18 +374,26 @@ export class PersistentConnection extends ServerActions { this.log_('Unlisten called for ' + pathString + ' ' + queryId); - assert(query.getQueryParams().isDefault() || !query.getQueryParams().loadsAllData(), - 'unlisten() called for non-default but complete query'); + assert( + query.getQueryParams().isDefault() || + !query.getQueryParams().loadsAllData(), + 'unlisten() called for non-default but complete query' + ); const listen = this.removeListen_(pathString, queryId); if (listen && this.connected_) { this.sendUnlisten_(pathString, queryId, query.queryObject(), tag); } } - private sendUnlisten_(pathString: string, queryId: string, queryObj: Object, tag: number | null) { + private sendUnlisten_( + pathString: string, + queryId: string, + queryObj: Object, + tag: number | null + ) { this.log_('Unlisten on ' + pathString + ' for ' + queryId); - const req: { [k: string]: any } = {/*path*/ 'p': pathString}; + const req: { [k: string]: any } = { /*path*/ p: pathString }; const action = 'n'; // Only bother sending queryId if it's non-default. if (tag) { @@ -361,7 +407,11 @@ export class PersistentConnection extends ServerActions { /** * @inheritDoc */ - onDisconnectPut(pathString: string, data: any, onComplete?: (a: string, b: string) => void) { + onDisconnectPut( + pathString: string, + data: any, + onComplete?: (a: string, b: string) => void + ) { if (this.connected_) { this.sendOnDisconnect_('o', pathString, data, onComplete); } else { @@ -377,7 +427,11 @@ export class PersistentConnection extends ServerActions { /** * @inheritDoc */ - onDisconnectMerge(pathString: string, data: any, onComplete?: (a: string, b: string) => void) { + onDisconnectMerge( + pathString: string, + data: any, + onComplete?: (a: string, b: string) => void + ) { if (this.connected_) { this.sendOnDisconnect_('om', pathString, data, onComplete); } else { @@ -393,7 +447,10 @@ export class PersistentConnection extends ServerActions { /** * @inheritDoc */ - onDisconnectCancel(pathString: string, onComplete?: (a: string, b: string) => void) { + onDisconnectCancel( + pathString: string, + onComplete?: (a: string, b: string) => void + ) { if (this.connected_) { this.sendOnDisconnect_('oc', pathString, null, onComplete); } else { @@ -406,13 +463,18 @@ export class PersistentConnection extends ServerActions { } } - private sendOnDisconnect_(action: string, pathString: string, data: any, onComplete: (a: string, b: string) => void) { - const request = {/*path*/ 'p': pathString, /*data*/ 'd': data}; + private sendOnDisconnect_( + action: string, + pathString: string, + data: any, + onComplete: (a: string, b: string) => void + ) { + const request = { /*path*/ p: pathString, /*data*/ d: data }; this.log_('onDisconnect ' + action, request); this.sendRequest(action, request, (response: { [k: string]: any }) => { if (onComplete) { - setTimeout(function () { - onComplete(response[/*status*/ 's'], response[/* data */'d']); + setTimeout(function() { + onComplete(response[/*status*/ 's'], response[/* data */ 'd']); }, Math.floor(0)); } }); @@ -421,23 +483,40 @@ export class PersistentConnection extends ServerActions { /** * @inheritDoc */ - put(pathString: string, data: any, onComplete?: (a: string, b: string) => void, hash?: string) { + put( + pathString: string, + data: any, + onComplete?: (a: string, b: string) => void, + hash?: string + ) { this.putInternal('p', pathString, data, onComplete, hash); } /** * @inheritDoc */ - merge(pathString: string, data: any, onComplete: (a: string, b: string | null) => void, hash?: string) { + merge( + pathString: string, + data: any, + onComplete: (a: string, b: string | null) => void, + hash?: string + ) { this.putInternal('m', pathString, data, onComplete, hash); } - putInternal(action: string, pathString: string, data: any, - onComplete: (a: string, b: string | null) => void, hash?: string) { - const request: { [k: string]: any } = {/*path*/ 'p': pathString, /*data*/ 'd': data}; + putInternal( + action: string, + pathString: string, + data: any, + onComplete: (a: string, b: string | null) => void, + hash?: string + ) { + const request: { [k: string]: any } = { + /*path*/ p: pathString, + /*data*/ d: data + }; - if (hash !== undefined) - request[/*hash*/ 'h'] = hash; + if (hash !== undefined) request[/*hash*/ 'h'] = hash; // TODO: Only keep track of the most recent put for a given path? this.outstandingPuts_.push({ @@ -484,10 +563,10 @@ export class PersistentConnection extends ServerActions { reportStats(stats: { [k: string]: any }) { // If we're not connected, we just drop the stats. if (this.connected_) { - const request = {/*counters*/ 'c': stats}; + const request = { /*counters*/ c: stats }; this.log_('reportStats', request); - this.sendRequest(/*stats*/ 's', request, (result) => { + this.sendRequest(/*stats*/ 's', request, result => { const status = result[/*status*/ 's']; if (status !== 'ok') { const errorReason = result[/* data */ 'd']; @@ -522,18 +601,33 @@ export class PersistentConnection extends ServerActions { private onDataPush_(action: string, body: { [k: string]: any }) { this.log_('handleServerMessage', action, body); if (action === 'd') - this.onDataUpdate_(body[/*path*/ 'p'], body[/*data*/ 'd'], /*isMerge*/false, body['t']); + this.onDataUpdate_( + body[/*path*/ 'p'], + body[/*data*/ 'd'], + /*isMerge*/ false, + body['t'] + ); else if (action === 'm') - this.onDataUpdate_(body[/*path*/ 'p'], body[/*data*/ 'd'], /*isMerge=*/true, body['t']); + this.onDataUpdate_( + body[/*path*/ 'p'], + body[/*data*/ 'd'], + /*isMerge=*/ true, + body['t'] + ); else if (action === 'c') this.onListenRevoked_(body[/*path*/ 'p'], body[/*query*/ 'q']); else if (action === 'ac') - this.onAuthRevoked_(body[/*status code*/ 's'], body[/* explanation */ 'd']); - else if (action === 'sd') - this.onSecurityDebugPacket_(body); + this.onAuthRevoked_( + body[/*status code*/ 's'], + body[/* explanation */ 'd'] + ); + else if (action === 'sd') this.onSecurityDebugPacket_(body); else - error('Unrecognized action received from server: ' + stringify(action) + - '\nAre you using the latest client?'); + error( + 'Unrecognized action received from server: ' + + stringify(action) + + '\nAre you using the latest client?' + ); } private onReady_(timestamp: number, sessionId: string) { @@ -551,7 +645,10 @@ export class PersistentConnection extends ServerActions { } private scheduleConnect_(timeout: number) { - assert(!this.realtime_, 'Scheduling a connect when we\'re already connected/ing?'); + assert( + !this.realtime_, + "Scheduling a connect when we're already connected/ing?" + ); if (this.establishConnectionTimer_) { clearTimeout(this.establishConnectionTimer_); @@ -572,7 +669,11 @@ export class PersistentConnection extends ServerActions { */ private onVisible_(visible: boolean) { // NOTE: Tabbing away and back to a window will defeat our reconnect backoff, but I think that's fine. - if (visible && !this.visible_ && this.reconnectDelay_ === this.maxReconnectDelay_) { + if ( + visible && + !this.visible_ && + this.reconnectDelay_ === this.maxReconnectDelay_ + ) { this.log_('Window became visible. Reducing delay.'); this.reconnectDelay_ = RECONNECT_MIN_DELAY; @@ -611,26 +712,34 @@ export class PersistentConnection extends ServerActions { if (this.shouldReconnect_()) { if (!this.visible_) { - this.log_('Window isn\'t visible. Delaying reconnect.'); + this.log_("Window isn't visible. Delaying reconnect."); this.reconnectDelay_ = this.maxReconnectDelay_; this.lastConnectionAttemptTime_ = new Date().getTime(); } else if (this.lastConnectionEstablishedTime_) { // If we've been connected long enough, reset reconnect delay to minimum. - const timeSinceLastConnectSucceeded = new Date().getTime() - this.lastConnectionEstablishedTime_; + const timeSinceLastConnectSucceeded = + new Date().getTime() - this.lastConnectionEstablishedTime_; if (timeSinceLastConnectSucceeded > RECONNECT_DELAY_RESET_TIMEOUT) this.reconnectDelay_ = RECONNECT_MIN_DELAY; this.lastConnectionEstablishedTime_ = null; } - const timeSinceLastConnectAttempt = new Date().getTime() - this.lastConnectionAttemptTime_; - let reconnectDelay = Math.max(0, this.reconnectDelay_ - timeSinceLastConnectAttempt); + const timeSinceLastConnectAttempt = + new Date().getTime() - this.lastConnectionAttemptTime_; + let reconnectDelay = Math.max( + 0, + this.reconnectDelay_ - timeSinceLastConnectAttempt + ); reconnectDelay = Math.random() * reconnectDelay; this.log_('Trying to reconnect in ' + reconnectDelay + 'ms'); this.scheduleConnect_(reconnectDelay); // Adjust reconnect delay for next time. - this.reconnectDelay_ = Math.min(this.maxReconnectDelay_, this.reconnectDelay_ * RECONNECT_DELAY_MULTIPLIER); + this.reconnectDelay_ = Math.min( + this.maxReconnectDelay_, + this.reconnectDelay_ * RECONNECT_DELAY_MULTIPLIER + ); } this.onConnectStatus_(false); } @@ -648,7 +757,7 @@ export class PersistentConnection extends ServerActions { const lastSessionId = this.lastSessionId; let canceled = false; let connection: Connection | null = null; - const closeFn = function () { + const closeFn = function() { if (connection) { connection.close(); } else { @@ -656,8 +765,11 @@ export class PersistentConnection extends ServerActions { onDisconnect(); } }; - const sendRequestFn = function (msg: Object) { - assert(connection, 'sendRequest call when we\'re not connected not allowed.'); + const sendRequestFn = function(msg: Object) { + assert( + connection, + "sendRequest call when we're not connected not allowed." + ); connection.sendRequest(msg); }; @@ -670,33 +782,40 @@ export class PersistentConnection extends ServerActions { this.forceTokenRefresh_ = false; // First fetch auth token, and establish connection after fetching the token was successful - this.authTokenProvider_.getToken(forceRefresh).then(function (result) { - if (!canceled) { - log('getToken() completed. Creating connection.'); - self.authToken_ = result && result.accessToken; - connection = new Connection(connId, self.repoInfo_, - onDataMessage, - onReady, - onDisconnect, /* onKill= */ function (reason) { - warn(reason + ' (' + self.repoInfo_.toString() + ')'); - self.interrupt(SERVER_KILL_INTERRUPT_REASON); - }, - lastSessionId); - } else { - log('getToken() completed but was canceled'); - } - }).then(null, function (error) { - self.log_('Failed to get token: ' + error); - if (!canceled) { - if (CONSTANTS.NODE_ADMIN) { - // This may be a critical error for the Admin Node.js SDK, so log a warning. - // But getToken() may also just have temporarily failed, so we still want to - // continue retrying. - warn(error); + this.authTokenProvider_ + .getToken(forceRefresh) + .then(function(result) { + if (!canceled) { + log('getToken() completed. Creating connection.'); + self.authToken_ = result && result.accessToken; + connection = new Connection( + connId, + self.repoInfo_, + onDataMessage, + onReady, + onDisconnect, + /* onKill= */ function(reason) { + warn(reason + ' (' + self.repoInfo_.toString() + ')'); + self.interrupt(SERVER_KILL_INTERRUPT_REASON); + }, + lastSessionId + ); + } else { + log('getToken() completed but was canceled'); } - closeFn(); - } - }); + }) + .then(null, function(error) { + self.log_('Failed to get token: ' + error); + if (!canceled) { + if (CONSTANTS.NODE_ADMIN) { + // This may be a critical error for the Admin Node.js SDK, so log a warning. + // But getToken() may also just have temporarily failed, so we still want to + // continue retrying. + warn(error); + } + closeFn(); + } + }); } } @@ -735,15 +854,14 @@ export class PersistentConnection extends ServerActions { private handleTimestamp_(timestamp: number) { const delta = timestamp - new Date().getTime(); - this.onServerInfoUpdate_({'serverTimeOffset': delta}); + this.onServerInfoUpdate_({ serverTimeOffset: delta }); } private cancelSentTransactions_() { for (let i = 0; i < this.outstandingPuts_.length; i++) { const put = this.outstandingPuts_[i]; - if (put && /*hash*/'h' in put.request && put.queued) { - if (put.onComplete) - put.onComplete('disconnect'); + if (put && /*hash*/ 'h' in put.request && put.queued) { + if (put.onComplete) put.onComplete('disconnect'); delete this.outstandingPuts_[i]; this.outstandingPutCount_--; @@ -751,8 +869,7 @@ export class PersistentConnection extends ServerActions { } // Clean up array occasionally. - if (this.outstandingPutCount_ === 0) - this.outstandingPuts_ = []; + if (this.outstandingPutCount_ === 0) this.outstandingPuts_ = []; } /** @@ -769,8 +886,7 @@ export class PersistentConnection extends ServerActions { queryId = query.map(q => ObjectToUniqueKey(q)).join('$'); } const listen = this.removeListen_(pathString, queryId); - if (listen && listen.onComplete) - listen.onComplete('permission_denied'); + if (listen && listen.onComplete) listen.onComplete('permission_denied'); } /** @@ -839,13 +955,17 @@ export class PersistentConnection extends ServerActions { }); for (let i = 0; i < this.outstandingPuts_.length; i++) { - if (this.outstandingPuts_[i]) - this.sendPut_(i); + if (this.outstandingPuts_[i]) this.sendPut_(i); } while (this.onDisconnectRequestQueue_.length) { const request = this.onDisconnectRequestQueue_.shift(); - this.sendOnDisconnect_(request.action, request.pathString, request.data, request.onComplete); + this.sendOnDisconnect_( + request.action, + request.pathString, + request.data, + request.onComplete + ); } } @@ -863,12 +983,13 @@ export class PersistentConnection extends ServerActions { clientName = 'node'; } - stats['sdk.' + clientName + '.' + firebase.SDK_VERSION.replace(/\./g, '-')] = 1; + stats[ + 'sdk.' + clientName + '.' + firebase.SDK_VERSION.replace(/\./g, '-') + ] = 1; if (isMobileCordova()) { stats['framework.cordova'] = 1; - } - else if (isReactNative()) { + } else if (isReactNative()) { stats['framework.reactnative'] = 1; } this.reportStats(stats); @@ -883,4 +1004,3 @@ export class PersistentConnection extends ServerActions { return isEmpty(this.interruptReasons_) && online; } } - diff --git a/src/database/core/ReadonlyRestClient.ts b/src/database/core/ReadonlyRestClient.ts index 70205919b8d..8176e2f57cc 100644 --- a/src/database/core/ReadonlyRestClient.ts +++ b/src/database/core/ReadonlyRestClient.ts @@ -30,9 +30,7 @@ import { Query } from '../api/Query'; * persistent connection (using WebSockets or long-polling) */ export class ReadonlyRestClient extends ServerActions { - reportStats(stats: { - [k: string]: any; - }): void { + reportStats(stats: { [k: string]: any }): void { throw new Error('Method not implemented.'); } @@ -57,7 +55,10 @@ export class ReadonlyRestClient extends ServerActions { if (tag !== undefined) { return 'tag$' + tag; } else { - assert(query.getQueryParams().isDefault(), 'should have a tag if it\'s not a default query.'); + assert( + query.getQueryParams().isDefault(), + "should have a tag if it's not a default query." + ); return query.path.toString(); } } @@ -68,49 +69,69 @@ export class ReadonlyRestClient extends ServerActions { * @param {AuthTokenProvider} authTokenProvider_ * @implements {ServerActions} */ - constructor(private repoInfo_: RepoInfo, - private onDataUpdate_: (a: string, b: any, c: boolean, d: number | null) => void, - private authTokenProvider_: AuthTokenProvider) { + constructor( + private repoInfo_: RepoInfo, + private onDataUpdate_: ( + a: string, + b: any, + c: boolean, + d: number | null + ) => void, + private authTokenProvider_: AuthTokenProvider + ) { super(); } /** @inheritDoc */ - listen(query: Query, currentHashFn: () => string, tag: number | null, onComplete: (a: string, b: any) => void) { + listen( + query: Query, + currentHashFn: () => string, + tag: number | null, + onComplete: (a: string, b: any) => void + ) { const pathString = query.path.toString(); - this.log_('Listen called for ' + pathString + ' ' + query.queryIdentifier()); + this.log_( + 'Listen called for ' + pathString + ' ' + query.queryIdentifier() + ); // Mark this listener so we can tell if it's removed. const listenId = ReadonlyRestClient.getListenId_(query, tag); const thisListen = {}; this.listens_[listenId] = thisListen; - const queryStringParamaters = query.getQueryParams().toRestQueryStringParameters(); + const queryStringParamaters = query + .getQueryParams() + .toRestQueryStringParameters(); - this.restRequest_(pathString + '.json', queryStringParamaters, (error, result) => { - let data = result; + this.restRequest_( + pathString + '.json', + queryStringParamaters, + (error, result) => { + let data = result; - if (error === 404) { - data = null; - error = null; - } - - if (error === null) { - this.onDataUpdate_(pathString, data, /*isMerge=*/false, tag); - } + if (error === 404) { + data = null; + error = null; + } - if (safeGet(this.listens_, listenId) === thisListen) { - let status; - if (!error) { - status = 'ok'; - } else if (error == 401) { - status = 'permission_denied'; - } else { - status = 'rest_error:' + error; + if (error === null) { + this.onDataUpdate_(pathString, data, /*isMerge=*/ false, tag); } - onComplete(status, null); + if (safeGet(this.listens_, listenId) === thisListen) { + let status; + if (!error) { + status = 'ok'; + } else if (error == 401) { + status = 'permission_denied'; + } else { + status = 'rest_error:' + error; + } + + onComplete(status, null); + } } - }); + ); } /** @inheritDoc */ @@ -133,48 +154,69 @@ export class ReadonlyRestClient extends ServerActions { * @param {?function(?number, *=)} callback * @private */ - private restRequest_(pathString: string, queryStringParameters: { [k: string]: any } = {}, - callback: ((a: number | null, b?: any) => void) | null) { + private restRequest_( + pathString: string, + queryStringParameters: { [k: string]: any } = {}, + callback: ((a: number | null, b?: any) => void) | null + ) { queryStringParameters['format'] = 'export'; - this.authTokenProvider_.getToken(/*forceRefresh=*/false).then((authTokenData) => { - const authToken = authTokenData && authTokenData.accessToken; - if (authToken) { - queryStringParameters['auth'] = authToken; - } + this.authTokenProvider_ + .getToken(/*forceRefresh=*/ false) + .then(authTokenData => { + const authToken = authTokenData && authTokenData.accessToken; + if (authToken) { + queryStringParameters['auth'] = authToken; + } - const url = (this.repoInfo_.secure ? 'https://' : 'http://') + - this.repoInfo_.host + - pathString + - '?' + - querystring(queryStringParameters); - - this.log_('Sending REST request for ' + url); - const xhr = new XMLHttpRequest(); - xhr.onreadystatechange = () => { - if (callback && xhr.readyState === 4) { - this.log_('REST Response for ' + url + ' received. status:', xhr.status, 'response:', xhr.responseText); - let res = null; - if (xhr.status >= 200 && xhr.status < 300) { - try { - res = jsonEval(xhr.responseText); - } catch (e) { - warn('Failed to parse JSON response for ' + url + ': ' + xhr.responseText); + const url = + (this.repoInfo_.secure ? 'https://' : 'http://') + + this.repoInfo_.host + + pathString + + '?' + + querystring(queryStringParameters); + + this.log_('Sending REST request for ' + url); + const xhr = new XMLHttpRequest(); + xhr.onreadystatechange = () => { + if (callback && xhr.readyState === 4) { + this.log_( + 'REST Response for ' + url + ' received. status:', + xhr.status, + 'response:', + xhr.responseText + ); + let res = null; + if (xhr.status >= 200 && xhr.status < 300) { + try { + res = jsonEval(xhr.responseText); + } catch (e) { + warn( + 'Failed to parse JSON response for ' + + url + + ': ' + + xhr.responseText + ); + } + callback(null, res); + } else { + // 401 and 404 are expected. + if (xhr.status !== 401 && xhr.status !== 404) { + warn( + 'Got unsuccessful REST response for ' + + url + + ' Status: ' + + xhr.status + ); + } + callback(xhr.status); } - callback(null, res); - } else { - // 401 and 404 are expected. - if (xhr.status !== 401 && xhr.status !== 404) { - warn('Got unsuccessful REST response for ' + url + ' Status: ' + xhr.status); - } - callback(xhr.status); + callback = null; } - callback = null; - } - }; + }; - xhr.open('GET', url, /*asynchronous=*/true); - xhr.send(); - }); + xhr.open('GET', url, /*asynchronous=*/ true); + xhr.send(); + }); } } diff --git a/src/database/core/Repo.ts b/src/database/core/Repo.ts index 982018369dd..4df142a861a 100644 --- a/src/database/core/Repo.ts +++ b/src/database/core/Repo.ts @@ -64,7 +64,9 @@ export class Repo { private infoData_: SnapshotHolder; private abortTransactions_: (path: Path) => Path; private rerunTransactions_: (changedPath: Path) => Path; - private interceptServerDataCallback_: ((a: string, b: any) => void) | null = null; + private interceptServerDataCallback_: + | ((a: string, b: any) => void) + | null = null; private __database: Database; // A list of data pieces and paths to be set when this client disconnects. @@ -81,16 +83,22 @@ export class Repo { * @param {boolean} forceRestClient * @param {!FirebaseApp} app */ - constructor(private repoInfo_: RepoInfo, forceRestClient: boolean, public app: FirebaseApp) { + constructor( + private repoInfo_: RepoInfo, + forceRestClient: boolean, + public app: FirebaseApp + ) { /** @type {!AuthTokenProvider} */ const authTokenProvider = new AuthTokenProvider(app); this.stats_ = StatsManager.getCollection(repoInfo_); if (forceRestClient || beingCrawled()) { - this.server_ = new ReadonlyRestClient(this.repoInfo_, + this.server_ = new ReadonlyRestClient( + this.repoInfo_, this.onDataUpdate_.bind(this), - authTokenProvider); + authTokenProvider + ); // Minor hack: Fire onConnect immediately, since there's no actual connection. setTimeout(this.onConnectStatus_.bind(this, true), 0); @@ -99,7 +107,9 @@ export class Repo { // Validate authOverride if (typeof authOverride !== 'undefined' && authOverride !== null) { if (typeof authOverride !== 'object') { - throw new Error('Only objects are supported for option databaseAuthVariableOverride'); + throw new Error( + 'Only objects are supported for option databaseAuthVariableOverride' + ); } try { stringify(authOverride); @@ -108,24 +118,28 @@ export class Repo { } } - this.persistentConnection_ = new PersistentConnection(this.repoInfo_, + this.persistentConnection_ = new PersistentConnection( + this.repoInfo_, this.onDataUpdate_.bind(this), this.onConnectStatus_.bind(this), this.onServerInfoUpdate_.bind(this), authTokenProvider, - authOverride); + authOverride + ); this.server_ = this.persistentConnection_; } - authTokenProvider.addTokenChangeListener((token) => { + authTokenProvider.addTokenChangeListener(token => { this.server_.refreshAuthToken(token); }); // In the case of multiple Repos for the same repoInfo (i.e. there are multiple Firebase.Contexts being used), // we only want to create one StatsReporter. As such, we'll report stats over the first Repo created. - this.statsReporter_ = StatsManager.getOrCreateReporter(repoInfo_, - () => new StatsReporter(this.stats_, this.server_)); + this.statsReporter_ = StatsManager.getOrCreateReporter( + repoInfo_, + () => new StatsReporter(this.stats_, this.server_) + ); this.transactions_init_(); @@ -138,7 +152,10 @@ export class Repo { // This is possibly a hack, but we have different semantics for .info endpoints. We don't raise null events // on initial data... if (!node.isEmpty()) { - infoEvents = this.infoSyncTree_.applyServerOverwrite(query.path, node); + infoEvents = this.infoSyncTree_.applyServerOverwrite( + query.path, + node + ); setTimeout(() => { onComplete('ok'); }, 0); @@ -168,7 +185,9 @@ export class Repo { * @return {string} The URL corresponding to the root of this Firebase. */ toString(): string { - return (this.repoInfo_.secure ? 'https://' : 'http://') + this.repoInfo_.host; + return ( + (this.repoInfo_.secure ? 'https://' : 'http://') + this.repoInfo_.host + ); } /** @@ -182,7 +201,9 @@ export class Repo { * @return {!number} The time in milliseconds, taking the server offset into account if we have one. */ serverTime(): number { - const offsetNode = this.infoData_.getNode(new Path('.info/serverTimeOffset')); + const offsetNode = this.infoData_.getNode( + new Path('.info/serverTimeOffset') + ); const offset = (offsetNode.val() as number) || 0; return new Date().getTime() + offset; } @@ -193,7 +214,7 @@ export class Repo { */ generateServerValues(): Object { return generateWithValues({ - 'timestamp': this.serverTime() + timestamp: this.serverTime() }); } @@ -206,22 +227,41 @@ export class Repo { * @param {boolean} isMerge * @param {?number} tag */ - private onDataUpdate_(pathString: string, data: any, isMerge: boolean, tag: number | null) { + private onDataUpdate_( + pathString: string, + data: any, + isMerge: boolean, + tag: number | null + ) { // For testing. this.dataUpdateCount++; const path = new Path(pathString); - data = this.interceptServerDataCallback_ ? this.interceptServerDataCallback_(pathString, data) : data; + data = this.interceptServerDataCallback_ + ? this.interceptServerDataCallback_(pathString, data) + : data; let events = []; if (tag) { if (isMerge) { - const taggedChildren = map(data as { [k: string]: any }, (raw: any) => nodeFromJSON(raw)); - events = this.serverSyncTree_.applyTaggedQueryMerge(path, taggedChildren, tag); + const taggedChildren = map(data as { [k: string]: any }, (raw: any) => + nodeFromJSON(raw) + ); + events = this.serverSyncTree_.applyTaggedQueryMerge( + path, + taggedChildren, + tag + ); } else { const taggedSnap = nodeFromJSON(data); - events = this.serverSyncTree_.applyTaggedQueryOverwrite(path, taggedSnap, tag); + events = this.serverSyncTree_.applyTaggedQueryOverwrite( + path, + taggedSnap, + tag + ); } } else if (isMerge) { - const changedChildren = map(data as { [k: string]: any }, (raw: any) => nodeFromJSON(raw)); + const changedChildren = map(data as { [k: string]: any }, (raw: any) => + nodeFromJSON(raw) + ); events = this.serverSyncTree_.applyServerMerge(path, changedChildren); } else { const snap = nodeFromJSON(data); @@ -294,30 +334,52 @@ export class Repo { * @param {number|string|null} newPriority * @param {?function(?Error, *=)} onComplete */ - setWithPriority(path: Path, newVal: any, - newPriority: number | string | null, - onComplete: ((status: Error | null, errorReason?: string) => void) | null) { - this.log_('set', {path: path.toString(), value: newVal, priority: newPriority}); + setWithPriority( + path: Path, + newVal: any, + newPriority: number | string | null, + onComplete: ((status: Error | null, errorReason?: string) => void) | null + ) { + this.log_('set', { + path: path.toString(), + value: newVal, + priority: newPriority + }); // TODO: Optimize this behavior to either (a) store flag to skip resolving where possible and / or // (b) store unresolved paths on JSON parse const serverValues = this.generateServerValues(); const newNodeUnresolved = nodeFromJSON(newVal, newPriority); - const newNode = resolveDeferredValueSnapshot(newNodeUnresolved, serverValues); + const newNode = resolveDeferredValueSnapshot( + newNodeUnresolved, + serverValues + ); const writeId = this.getNextWriteId_(); - const events = this.serverSyncTree_.applyUserOverwrite(path, newNode, writeId, true); + const events = this.serverSyncTree_.applyUserOverwrite( + path, + newNode, + writeId, + true + ); this.eventQueue_.queueEvents(events); - this.server_.put(path.toString(), newNodeUnresolved.val(/*export=*/true), (status, errorReason) => { - const success = status === 'ok'; - if (!success) { - warn('set at ' + path + ' failed: ' + status); - } + this.server_.put( + path.toString(), + newNodeUnresolved.val(/*export=*/ true), + (status, errorReason) => { + const success = status === 'ok'; + if (!success) { + warn('set at ' + path + ' failed: ' + status); + } - const clearEvents = this.serverSyncTree_.ackUserWrite(writeId, !success); - this.eventQueue_.raiseEventsForChangedPath(path, clearEvents); - this.callOnCompleteCallback(onComplete, status, errorReason); - }); + const clearEvents = this.serverSyncTree_.ackUserWrite( + writeId, + !success + ); + this.eventQueue_.raiseEventsForChangedPath(path, clearEvents); + this.callOnCompleteCallback(onComplete, status, errorReason); + } + ); const affectedPath = this.abortTransactions_(path); this.rerunTransactions_(affectedPath); // We queued the events above, so just flush the queue here @@ -329,9 +391,12 @@ export class Repo { * @param {!Object} childrenToMerge * @param {?function(?Error, *=)} onComplete */ - update(path: Path, childrenToMerge: { [k: string]: any }, - onComplete: ((status: Error | null, errorReason?: string) => void) | null) { - this.log_('update', {path: path.toString(), value: childrenToMerge}); + update( + path: Path, + childrenToMerge: { [k: string]: any }, + onComplete: ((status: Error | null, errorReason?: string) => void) | null + ) { + this.log_('update', { path: path.toString(), value: childrenToMerge }); // Start with our existing data and merge each child into it. let empty = true; @@ -340,24 +405,39 @@ export class Repo { forEach(childrenToMerge, (changedKey: string, changedValue: any) => { empty = false; const newNodeUnresolved = nodeFromJSON(changedValue); - changedChildren[changedKey] = resolveDeferredValueSnapshot(newNodeUnresolved, serverValues); + changedChildren[changedKey] = resolveDeferredValueSnapshot( + newNodeUnresolved, + serverValues + ); }); if (!empty) { const writeId = this.getNextWriteId_(); - const events = this.serverSyncTree_.applyUserMerge(path, changedChildren, writeId); + const events = this.serverSyncTree_.applyUserMerge( + path, + changedChildren, + writeId + ); this.eventQueue_.queueEvents(events); - this.server_.merge(path.toString(), childrenToMerge, (status, errorReason) => { - const success = status === 'ok'; - if (!success) { - warn('update at ' + path + ' failed: ' + status); + this.server_.merge( + path.toString(), + childrenToMerge, + (status, errorReason) => { + const success = status === 'ok'; + if (!success) { + warn('update at ' + path + ' failed: ' + status); + } + + const clearEvents = this.serverSyncTree_.ackUserWrite( + writeId, + !success + ); + const affectedPath = + clearEvents.length > 0 ? this.rerunTransactions_(path) : path; + this.eventQueue_.raiseEventsForChangedPath(affectedPath, clearEvents); + this.callOnCompleteCallback(onComplete, status, errorReason); } - - const clearEvents = this.serverSyncTree_.ackUserWrite(writeId, !success); - const affectedPath = (clearEvents.length > 0) ? this.rerunTransactions_(path) : path; - this.eventQueue_.raiseEventsForChangedPath(affectedPath, clearEvents); - this.callOnCompleteCallback(onComplete, status, errorReason); - }); + ); forEach(childrenToMerge, (changedPath: string) => { const affectedPath = this.abortTransactions_(path.child(changedPath)); @@ -367,7 +447,7 @@ export class Repo { // We queued the events above, so just flush the queue here this.eventQueue_.raiseEventsForChangedPath(path, []); } else { - log('update() called with empty data. Don\'t do anything.'); + log("update() called with empty data. Don't do anything."); this.callOnCompleteCallback(onComplete, 'ok'); } } @@ -380,11 +460,16 @@ export class Repo { this.log_('onDisconnectEvents'); const serverValues = this.generateServerValues(); - const resolvedOnDisconnectTree = resolveDeferredValueTree(this.onDisconnect_, serverValues); + const resolvedOnDisconnectTree = resolveDeferredValueTree( + this.onDisconnect_, + serverValues + ); let events: Event[] = []; resolvedOnDisconnectTree.forEachTree(Path.Empty, (path, snap) => { - events = events.concat(this.serverSyncTree_.applyServerOverwrite(path, snap)); + events = events.concat( + this.serverSyncTree_.applyServerOverwrite(path, snap) + ); const affectedPath = this.abortTransactions_(path); this.rerunTransactions_(affectedPath); }); @@ -397,7 +482,10 @@ export class Repo { * @param {!Path} path * @param {?function(?Error, *=)} onComplete */ - onDisconnectCancel(path: Path, onComplete: ((status: Error | null, errorReason?: string) => void) | null) { + onDisconnectCancel( + path: Path, + onComplete: ((status: Error | null, errorReason?: string) => void) | null + ) { this.server_.onDisconnectCancel(path.toString(), (status, errorReason) => { if (status === 'ok') { this.onDisconnect_.forget(path); @@ -411,14 +499,22 @@ export class Repo { * @param {*} value * @param {?function(?Error, *=)} onComplete */ - onDisconnectSet(path: Path, value: any, onComplete: ((status: Error | null, errorReason?: string) => void) | null) { + onDisconnectSet( + path: Path, + value: any, + onComplete: ((status: Error | null, errorReason?: string) => void) | null + ) { const newNode = nodeFromJSON(value); - this.server_.onDisconnectPut(path.toString(), newNode.val(/*export=*/true), (status, errorReason) => { - if (status === 'ok') { - this.onDisconnect_.remember(path, newNode); + this.server_.onDisconnectPut( + path.toString(), + newNode.val(/*export=*/ true), + (status, errorReason) => { + if (status === 'ok') { + this.onDisconnect_.remember(path, newNode); + } + this.callOnCompleteCallback(onComplete, status, errorReason); } - this.callOnCompleteCallback(onComplete, status, errorReason); - }); + ); } /** @@ -427,14 +523,23 @@ export class Repo { * @param {*} priority * @param {?function(?Error, *=)} onComplete */ - onDisconnectSetWithPriority(path: Path, value: any, priority: any, onComplete: ((status: Error | null, errorReason?: string) => void) | null) { + onDisconnectSetWithPriority( + path: Path, + value: any, + priority: any, + onComplete: ((status: Error | null, errorReason?: string) => void) | null + ) { const newNode = nodeFromJSON(value, priority); - this.server_.onDisconnectPut(path.toString(), newNode.val(/*export=*/true), (status, errorReason) => { - if (status === 'ok') { - this.onDisconnect_.remember(path, newNode); + this.server_.onDisconnectPut( + path.toString(), + newNode.val(/*export=*/ true), + (status, errorReason) => { + if (status === 'ok') { + this.onDisconnect_.remember(path, newNode); + } + this.callOnCompleteCallback(onComplete, status, errorReason); } - this.callOnCompleteCallback(onComplete, status, errorReason); - }); + ); } /** @@ -442,23 +547,32 @@ export class Repo { * @param {*} childrenToMerge * @param {?function(?Error, *=)} onComplete */ - onDisconnectUpdate(path: Path, childrenToMerge: { [k: string]: any }, - onComplete: ((status: Error | null, errorReason?: string) => void) | null) { + onDisconnectUpdate( + path: Path, + childrenToMerge: { [k: string]: any }, + onComplete: ((status: Error | null, errorReason?: string) => void) | null + ) { if (isEmpty(childrenToMerge)) { - log('onDisconnect().update() called with empty data. Don\'t do anything.'); + log( + "onDisconnect().update() called with empty data. Don't do anything." + ); this.callOnCompleteCallback(onComplete, 'ok'); return; } - this.server_.onDisconnectMerge(path.toString(), childrenToMerge, (status, errorReason) => { - if (status === 'ok') { - forEach(childrenToMerge, (childName: string, childNode: any) => { - const newChildNode = nodeFromJSON(childNode); - this.onDisconnect_.remember(path.child(childName), newChildNode); - }); + this.server_.onDisconnectMerge( + path.toString(), + childrenToMerge, + (status, errorReason) => { + if (status === 'ok') { + forEach(childrenToMerge, (childName: string, childNode: any) => { + const newChildNode = nodeFromJSON(childNode); + this.onDisconnect_.remember(path.child(childName), newChildNode); + }); + } + this.callOnCompleteCallback(onComplete, status, errorReason); } - this.callOnCompleteCallback(onComplete, status, errorReason); - }); + ); } /** @@ -468,9 +582,15 @@ export class Repo { addEventCallbackForQuery(query: Query, eventRegistration: EventRegistration) { let events; if (query.path.getFront() === '.info') { - events = this.infoSyncTree_.addEventRegistration(query, eventRegistration); + events = this.infoSyncTree_.addEventRegistration( + query, + eventRegistration + ); } else { - events = this.serverSyncTree_.addEventRegistration(query, eventRegistration); + events = this.serverSyncTree_.addEventRegistration( + query, + eventRegistration + ); } this.eventQueue_.raiseEventsAtPath(query.path, events); } @@ -479,14 +599,23 @@ export class Repo { * @param {!Query} query * @param {?EventRegistration} eventRegistration */ - removeEventCallbackForQuery(query: Query, eventRegistration: EventRegistration) { + removeEventCallbackForQuery( + query: Query, + eventRegistration: EventRegistration + ) { // These are guaranteed not to raise events, since we're not passing in a cancelError. However, we can future-proof // a little bit by handling the return values anyways. let events; if (query.path.getFront() === '.info') { - events = this.infoSyncTree_.removeEventRegistration(query, eventRegistration); + events = this.infoSyncTree_.removeEventRegistration( + query, + eventRegistration + ); } else { - events = this.serverSyncTree_.removeEventRegistration(query, eventRegistration); + events = this.serverSyncTree_.removeEventRegistration( + query, + eventRegistration + ); } this.eventQueue_.raiseEventsAtPath(query.path, events); } @@ -504,8 +633,7 @@ export class Repo { } stats(showDelta: boolean = false) { - if (typeof console === 'undefined') - return; + if (typeof console === 'undefined') return; let stats: { [k: string]: any }; if (showDelta) { @@ -517,12 +645,14 @@ export class Repo { } const longestName = Object.keys(stats).reduce( - (previousValue, currentValue) => Math.max(currentValue.length, previousValue), 0); + (previousValue, currentValue) => + Math.max(currentValue.length, previousValue), + 0 + ); forEach(stats, (stat: string, value: any) => { // pad stat names to be the same length (plus 2 extra spaces). - for (let i = stat.length; i < longestName + 2; i++) - stat += ' '; + for (let i = stat.length; i < longestName + 2; i++) stat += ' '; console.log(stat + value); }); } @@ -549,17 +679,19 @@ export class Repo { * @param {!string} status * @param {?string=} errorReason */ - callOnCompleteCallback(callback: ((status: Error | null, errorReason?: string) => void) | null, - status: string, errorReason?: string | null) { + callOnCompleteCallback( + callback: ((status: Error | null, errorReason?: string) => void) | null, + status: string, + errorReason?: string | null + ) { if (callback) { - exceptionGuard(function () { + exceptionGuard(function() { if (status == 'ok') { callback(null); } else { const code = (status || 'error').toUpperCase(); let message = code; - if (errorReason) - message += ': ' + errorReason; + if (errorReason) message += ': ' + errorReason; const error = new Error(message); (error as any).code = code; @@ -573,4 +705,3 @@ export class Repo { return this.__database || (this.__database = new Database(this)); } } - diff --git a/src/database/core/RepoInfo.ts b/src/database/core/RepoInfo.ts index 20acdd25c32..dcd498ef35b 100644 --- a/src/database/core/RepoInfo.ts +++ b/src/database/core/RepoInfo.ts @@ -19,7 +19,6 @@ import { forEach } from '../../utils/obj'; import { PersistentStorage } from './storage/storage'; import { LONG_POLLING, WEBSOCKET } from '../realtime/Constants'; - /** * A class that holds metadata about a Repo object * @@ -37,8 +36,13 @@ export class RepoInfo { * @param {boolean} webSocketOnly Whether to prefer websockets over all other transports (used by Nest). * @param {string=} persistenceKey Override the default session persistence storage key */ - constructor(host: string,public secure: boolean, public namespace: string, - public webSocketOnly: boolean, public persistenceKey: string = '') { + constructor( + host: string, + public secure: boolean, + public namespace: string, + public webSocketOnly: boolean, + public persistenceKey: string = '' + ) { this.host = host.toLowerCase(); this.domain = this.host.substr(this.host.indexOf('.') + 1); this.internalHost = PersistentStorage.get('host:' + host) || this.host; @@ -57,7 +61,9 @@ export class RepoInfo { } isCustomHost() { - return this.domain !== 'firebaseio.com' && this.domain !== 'firebaseio-demo.com'; + return ( + this.domain !== 'firebaseio.com' && this.domain !== 'firebaseio-demo.com' + ); } updateHost(newHost: string) { @@ -81,9 +87,11 @@ export class RepoInfo { let connURL: string; if (type === WEBSOCKET) { - connURL = (this.secure ? 'wss://' : 'ws://') + this.internalHost + '/.ws?'; + connURL = + (this.secure ? 'wss://' : 'ws://') + this.internalHost + '/.ws?'; } else if (type === LONG_POLLING) { - connURL = (this.secure ? 'https://' : 'http://') + this.internalHost + '/.lp?'; + connURL = + (this.secure ? 'https://' : 'http://') + this.internalHost + '/.lp?'; } else { throw new Error('Unknown connection type: ' + type); } diff --git a/src/database/core/RepoManager.ts b/src/database/core/RepoManager.ts index e1fbd2e7950..1598096f961 100644 --- a/src/database/core/RepoManager.ts +++ b/src/database/core/RepoManager.ts @@ -14,13 +14,13 @@ * limitations under the License. */ -import { FirebaseApp } from "../../app/firebase_app"; -import { safeGet } from "../../utils/obj"; -import { Repo } from "./Repo"; -import { fatal } from "./util/util"; -import { parseRepoInfo } from "./util/libs/parser"; -import { validateUrl } from "./util/validation"; -import "./Repo_transaction"; +import { FirebaseApp } from '../../app/firebase_app'; +import { safeGet } from '../../utils/obj'; +import { Repo } from './Repo'; +import { fatal } from './util/util'; +import { parseRepoInfo } from './util/libs/parser'; +import { validateUrl } from './util/validation'; +import './Repo_transaction'; import { Database } from '../api/Database'; import { RepoInfo } from './RepoInfo'; @@ -37,7 +37,7 @@ export class RepoManager { * @private {!Object.} */ private repos_: { - [name: string]: Repo + [name: string]: Repo; } = {}; /** @@ -75,9 +75,11 @@ export class RepoManager { databaseFromApp(app: FirebaseApp): Database { const dbUrl: string = app.options[DATABASE_URL_OPTION]; if (dbUrl === undefined) { - fatal("Can't determine Firebase Database URL. Be sure to include " + - DATABASE_URL_OPTION + - " option when calling firebase.intializeApp()."); + fatal( + "Can't determine Firebase Database URL. Be sure to include " + + DATABASE_URL_OPTION + + ' option when calling firebase.intializeApp().' + ); } const parsedUrl = parseRepoInfo(dbUrl); @@ -85,8 +87,10 @@ export class RepoManager { validateUrl('Invalid Firebase Database URL', 1, parsedUrl); if (!parsedUrl.path.isEmpty()) { - fatal("Database URL must point to the root of a Firebase Database " + - "(not including a child path)."); + fatal( + 'Database URL must point to the root of a Firebase Database ' + + '(not including a child path).' + ); } const repo = this.createRepo(repoInfo, app); @@ -100,10 +104,9 @@ export class RepoManager { * @param {!Repo} repo */ deleteRepo(repo: Repo) { - // This should never happen... if (safeGet(this.repos_, repo.app.name) !== repo) { - fatal("Database " + repo.app.name + " has already been deleted."); + fatal('Database ' + repo.app.name + ' has already been deleted.'); } repo.interrupt(); delete this.repos_[repo.app.name]; @@ -135,4 +138,4 @@ export class RepoManager { forceRestClient(forceRestClient: boolean) { this.useRestClient_ = forceRestClient; } -} \ No newline at end of file +} diff --git a/src/database/core/Repo_transaction.ts b/src/database/core/Repo_transaction.ts index 757e4a2b65b..87bfd76f215 100644 --- a/src/database/core/Repo_transaction.ts +++ b/src/database/core/Repo_transaction.ts @@ -21,11 +21,7 @@ import { Path } from './util/Path'; import { Tree } from './util/Tree'; import { PRIORITY_INDEX } from './snap/indexes/PriorityIndex'; import { Node } from './snap/Node'; -import { - LUIDGenerator, - warn, - exceptionGuard, -} from './util/util'; +import { LUIDGenerator, warn, exceptionGuard } from './util/util'; import { resolveDeferredValueSnapshot } from './util/ServerValues'; import { isValidPriority, validateFirebaseData } from './util/validation'; import { contains, safeGet } from '../../utils/obj'; @@ -48,19 +44,19 @@ export enum TransactionStatus { // mismatched hash. RUN, - // We've run the transaction and sent it to the server and it's currently outstanding (hasn't come back as accepted - // or rejected yet). + // We've run the transaction and sent it to the server and it's currently outstanding (hasn't come back as accepted + // or rejected yet). SENT, - // Temporary state used to mark completed transactions (whether successful or aborted). The transaction will be - // removed when we get a chance to prune completed ones. + // Temporary state used to mark completed transactions (whether successful or aborted). The transaction will be + // removed when we get a chance to prune completed ones. COMPLETED, - // Used when an already-sent transaction needs to be aborted (e.g. due to a conflicting set() call that was made). - // If it comes back as unsuccessful, we'll abort it. + // Used when an already-sent transaction needs to be aborted (e.g. due to a conflicting set() call that was made). + // If it comes back as unsuccessful, we'll abort it. SENT_NEEDS_ABORT, - // Temporary state used to mark transactions that need to be aborted. + // Temporary state used to mark transactions that need to be aborted. NEEDS_ABORT } @@ -104,13 +100,13 @@ type Transaction = { currentInputSnapshot: Node | null; currentOutputSnapshotRaw: Node | null; currentOutputSnapshotResolved: Node | null; -} +}; /** * Setup the transaction data structures * @private */ -(Repo.prototype as any).transactions_init_ = function () { +(Repo.prototype as any).transactions_init_ = function() { /** * Stores queues of outstanding transactions for Firebase locations. * @@ -122,10 +118,12 @@ type Transaction = { declare module './Repo' { interface Repo { - startTransaction(path: Path, - transactionUpdate: (a: any) => void, - onComplete: ((a: Error, b: boolean, c: DataSnapshot) => void) | null, - applyLocally: boolean): void + startTransaction( + path: Path, + transactionUpdate: (a: any) => void, + onComplete: ((a: Error, b: boolean, c: DataSnapshot) => void) | null, + applyLocally: boolean + ): void; } } @@ -137,17 +135,21 @@ declare module './Repo' { * @param {?function(?Error, boolean, ?DataSnapshot)} onComplete Completion callback. * @param {boolean} applyLocally Whether or not to make intermediate results visible */ -Repo.prototype.startTransaction = function (path: Path, - transactionUpdate: (a: any) => any, - onComplete: ((a: Error, b: boolean, c: DataSnapshot) => void) | null, - applyLocally: boolean) { +Repo.prototype.startTransaction = function( + path: Path, + transactionUpdate: (a: any) => any, + onComplete: ((a: Error, b: boolean, c: DataSnapshot) => void) | null, + applyLocally: boolean +) { this.log_('transaction on ' + path); // Add a watch to make sure we get server updates. - const valueCallback = function () { }; + const valueCallback = function() {}; const watchRef = new Reference(this, path); watchRef.on('value', valueCallback); - const unwatcher = function () { watchRef.off('value', valueCallback); }; + const unwatcher = function() { + watchRef.off('value', valueCallback); + }; // Initialize transaction. const transaction: Transaction = { @@ -182,7 +184,6 @@ Repo.prototype.startTransaction = function (path: Path, currentOutputSnapshotResolved: null }; - // Run transaction initially. const currentState = this.getLatestState_(path); transaction.currentInputSnapshot = currentState; @@ -194,11 +195,19 @@ Repo.prototype.startTransaction = function (path: Path, transaction.currentOutputSnapshotResolved = null; if (transaction.onComplete) { // We just set the input snapshot, so this cast should be safe - const snapshot = new DataSnapshot(transaction.currentInputSnapshot, new Reference(this, transaction.path), PRIORITY_INDEX); + const snapshot = new DataSnapshot( + transaction.currentInputSnapshot, + new Reference(this, transaction.path), + PRIORITY_INDEX + ); transaction.onComplete(null, false, snapshot); } } else { - validateFirebaseData('transaction failed: Data returned ', newVal, transaction.path); + validateFirebaseData( + 'transaction failed: Data returned ', + newVal, + transaction.path + ); // Mark as run and add to our queue. transaction.status = TransactionStatus.RUN; @@ -212,24 +221,41 @@ Repo.prototype.startTransaction = function (path: Path, // Note: We intentionally raise events after updating all of our transaction state, since the user could // start new transactions from the event callbacks. let priorityForNode; - if (typeof newVal === 'object' && newVal !== null && contains(newVal, '.priority')) { + if ( + typeof newVal === 'object' && + newVal !== null && + contains(newVal, '.priority') + ) { priorityForNode = safeGet(newVal, '.priority'); - assert(isValidPriority(priorityForNode), 'Invalid priority returned by transaction. ' + - 'Priority must be a valid string, finite number, server value, or null.'); + assert( + isValidPriority(priorityForNode), + 'Invalid priority returned by transaction. ' + + 'Priority must be a valid string, finite number, server value, or null.' + ); } else { - const currentNode = this.serverSyncTree_.calcCompleteEventCache(path) || ChildrenNode.EMPTY_NODE; + const currentNode = + this.serverSyncTree_.calcCompleteEventCache(path) || + ChildrenNode.EMPTY_NODE; priorityForNode = currentNode.getPriority().val(); } - priorityForNode = /** @type {null|number|string} */ (priorityForNode); + priorityForNode /** @type {null|number|string} */ = priorityForNode; const serverValues = this.generateServerValues(); const newNodeUnresolved = nodeFromJSON(newVal, priorityForNode); - const newNode = resolveDeferredValueSnapshot(newNodeUnresolved, serverValues); + const newNode = resolveDeferredValueSnapshot( + newNodeUnresolved, + serverValues + ); transaction.currentOutputSnapshotRaw = newNodeUnresolved; transaction.currentOutputSnapshotResolved = newNode; transaction.currentWriteId = this.getNextWriteId_(); - const events = this.serverSyncTree_.applyUserOverwrite(path, newNode, transaction.currentWriteId, transaction.applyLocally); + const events = this.serverSyncTree_.applyUserOverwrite( + path, + newNode, + transaction.currentWriteId, + transaction.applyLocally + ); this.eventQueue_.raiseEventsForChangedPath(path, events); this.sendReadyTransactions_(); @@ -242,11 +268,16 @@ Repo.prototype.startTransaction = function (path: Path, * @return {Node} * @private */ -(Repo.prototype as any).getLatestState_ = function (path: Path, excludeSets?: number[]): Node { - return this.serverSyncTree_.calcCompleteEventCache(path, excludeSets) || ChildrenNode.EMPTY_NODE; +(Repo.prototype as any).getLatestState_ = function( + path: Path, + excludeSets?: number[] +): Node { + return ( + this.serverSyncTree_.calcCompleteEventCache(path, excludeSets) || + ChildrenNode.EMPTY_NODE + ); }; - /** * Sends any already-run transactions that aren't waiting for outstanding transactions to * complete. @@ -257,7 +288,9 @@ Repo.prototype.startTransaction = function (path: Path, * @param {Tree.>=} node transactionQueueTree node to start at. * @private */ -(Repo.prototype as any).sendReadyTransactions_ = function (node: Tree = this.transactionQueueTree_) { +(Repo.prototype as any).sendReadyTransactions_ = function( + node: Tree = this.transactionQueueTree_ +) { // Before recursing, make sure any completed transactions are removed. if (!node) { this.pruneCompletedTransactionsBelowNode_(node); @@ -267,20 +300,21 @@ Repo.prototype.startTransaction = function (path: Path, const queue = this.buildTransactionQueue_(node); assert(queue.length > 0, 'Sending zero length transaction queue'); - const allRun = queue.every((transaction: Transaction) => transaction.status === TransactionStatus.RUN); + const allRun = queue.every( + (transaction: Transaction) => transaction.status === TransactionStatus.RUN + ); // If they're all run (and not sent), we can send them. Else, we must wait. if (allRun) { this.sendTransactionQueue_(node.path(), queue); } } else if (node.hasChildren()) { - node.forEachChild((childNode) => { + node.forEachChild(childNode => { this.sendReadyTransactions_(childNode); }); } }; - /** * Given a list of run transactions, send them to the server and then handle the result (success or failure). * @@ -288,79 +322,104 @@ Repo.prototype.startTransaction = function (path: Path, * @param {!Array.} queue Queue of transactions under the specified location. * @private */ -(Repo.prototype as any).sendTransactionQueue_ = function (path: Path, queue: Array) { +(Repo.prototype as any).sendTransactionQueue_ = function( + path: Path, + queue: Array +) { // Mark transactions as sent and increment retry count! - const setsToIgnore = queue.map(function (txn) { return txn.currentWriteId; }); + const setsToIgnore = queue.map(function(txn) { + return txn.currentWriteId; + }); const latestState = this.getLatestState_(path, setsToIgnore); let snapToSend = latestState; const latestHash = latestState.hash(); for (let i = 0; i < queue.length; i++) { const txn = queue[i]; - assert(txn.status === TransactionStatus.RUN, - 'tryToSendTransactionQueue_: items in queue should all be run.'); + assert( + txn.status === TransactionStatus.RUN, + 'tryToSendTransactionQueue_: items in queue should all be run.' + ); txn.status = TransactionStatus.SENT; txn.retryCount++; const relativePath = Path.relativePath(path, txn.path); // If we've gotten to this point, the output snapshot must be defined. - snapToSend = snapToSend.updateChild(relativePath, /**@type {!Node} */ (txn.currentOutputSnapshotRaw)); + snapToSend = snapToSend.updateChild( + relativePath /**@type {!Node} */, + txn.currentOutputSnapshotRaw + ); } const dataToSend = snapToSend.val(true); const pathToSend = path; // Send the put. - this.server_.put(pathToSend.toString(), dataToSend, (status: string) => { - this.log_('transaction put response', {path: pathToSend.toString(), status}); - - let events: Event[] = []; - if (status === 'ok') { - // Queue up the callbacks and fire them after cleaning up all of our transaction state, since - // the callback could trigger more transactions or sets. - const callbacks = []; - for (let i = 0; i < queue.length; i++) { - queue[i].status = TransactionStatus.COMPLETED; - events = events.concat(this.serverSyncTree_.ackUserWrite(queue[i].currentWriteId)); - if (queue[i].onComplete) { - // We never unset the output snapshot, and given that this transaction is complete, it should be set - const node = queue[i].currentOutputSnapshotResolved as Node; - const ref = new Reference(this, queue[i].path); - const snapshot = new DataSnapshot(node, ref, PRIORITY_INDEX); - callbacks.push(queue[i].onComplete.bind(null, null, true, snapshot)); + this.server_.put( + pathToSend.toString(), + dataToSend, + (status: string) => { + this.log_('transaction put response', { + path: pathToSend.toString(), + status + }); + + let events: Event[] = []; + if (status === 'ok') { + // Queue up the callbacks and fire them after cleaning up all of our transaction state, since + // the callback could trigger more transactions or sets. + const callbacks = []; + for (let i = 0; i < queue.length; i++) { + queue[i].status = TransactionStatus.COMPLETED; + events = events.concat( + this.serverSyncTree_.ackUserWrite(queue[i].currentWriteId) + ); + if (queue[i].onComplete) { + // We never unset the output snapshot, and given that this transaction is complete, it should be set + const node = queue[i].currentOutputSnapshotResolved as Node; + const ref = new Reference(this, queue[i].path); + const snapshot = new DataSnapshot(node, ref, PRIORITY_INDEX); + callbacks.push( + queue[i].onComplete.bind(null, null, true, snapshot) + ); + } + queue[i].unwatcher(); } - queue[i].unwatcher(); - } - // Now remove the completed transactions. - this.pruneCompletedTransactionsBelowNode_(this.transactionQueueTree_.subTree(path)); - // There may be pending transactions that we can now send. - this.sendReadyTransactions_(); + // Now remove the completed transactions. + this.pruneCompletedTransactionsBelowNode_( + this.transactionQueueTree_.subTree(path) + ); + // There may be pending transactions that we can now send. + this.sendReadyTransactions_(); - this.eventQueue_.raiseEventsForChangedPath(path, events); + this.eventQueue_.raiseEventsForChangedPath(path, events); - // Finally, trigger onComplete callbacks. - for (let i = 0; i < callbacks.length; i++) { - exceptionGuard(callbacks[i]); - } - } else { - // transactions are no longer sent. Update their status appropriately. - if (status === 'datastale') { - for (let i = 0; i < queue.length; i++) { - if (queue[i].status === TransactionStatus.SENT_NEEDS_ABORT) - queue[i].status = TransactionStatus.NEEDS_ABORT; - else - queue[i].status = TransactionStatus.RUN; + // Finally, trigger onComplete callbacks. + for (let i = 0; i < callbacks.length; i++) { + exceptionGuard(callbacks[i]); } } else { - warn('transaction at ' + pathToSend.toString() + ' failed: ' + status); - for (let i = 0; i < queue.length; i++) { - queue[i].status = TransactionStatus.NEEDS_ABORT; - queue[i].abortReason = status; + // transactions are no longer sent. Update their status appropriately. + if (status === 'datastale') { + for (let i = 0; i < queue.length; i++) { + if (queue[i].status === TransactionStatus.SENT_NEEDS_ABORT) + queue[i].status = TransactionStatus.NEEDS_ABORT; + else queue[i].status = TransactionStatus.RUN; + } + } else { + warn( + 'transaction at ' + pathToSend.toString() + ' failed: ' + status + ); + for (let i = 0; i < queue.length; i++) { + queue[i].status = TransactionStatus.NEEDS_ABORT; + queue[i].abortReason = status; + } } - } - this.rerunTransactions_(path); - } - }, latestHash); + this.rerunTransactions_(path); + } + }, + latestHash + ); }; /** @@ -375,7 +434,7 @@ Repo.prototype.startTransaction = function (path: Path, * @return {!Path} The rootmost path that was affected by rerunning transactions. * @private */ -(Repo.prototype as any).rerunTransactions_ = function (changedPath: Path): Path { +(Repo.prototype as any).rerunTransactions_ = function(changedPath: Path): Path { const rootMostTransactionNode = this.getAncestorTransactionNode_(changedPath); const path = rootMostTransactionNode.path(); @@ -385,7 +444,6 @@ Repo.prototype.startTransaction = function (path: Path, return path; }; - /** * Does all the work of rerunning transactions (as well as cleans up aborted transactions and whatnot). * @@ -393,7 +451,10 @@ Repo.prototype.startTransaction = function (path: Path, * @param {!Path} path The path the queue is for. * @private */ -(Repo.prototype as any).rerunTransactionQueue_ = function (queue: Array, path: Path) { +(Repo.prototype as any).rerunTransactionQueue_ = function( + queue: Array, + path: Path +) { if (queue.length === 0) { return; // Nothing to do! } @@ -403,33 +464,54 @@ Repo.prototype.startTransaction = function (path: Path, const callbacks = []; let events: Event[] = []; // Ignore all of the sets we're going to re-run. - const txnsToRerun = queue.filter(function (q) { return q.status === TransactionStatus.RUN; }); - const setsToIgnore = txnsToRerun.map(function (q) { return q.currentWriteId; }); + const txnsToRerun = queue.filter(function(q) { + return q.status === TransactionStatus.RUN; + }); + const setsToIgnore = txnsToRerun.map(function(q) { + return q.currentWriteId; + }); for (let i = 0; i < queue.length; i++) { const transaction = queue[i]; const relativePath = Path.relativePath(path, transaction.path); - let abortTransaction = false, abortReason; - assert(relativePath !== null, 'rerunTransactionsUnderNode_: relativePath should not be null.'); + let abortTransaction = false, + abortReason; + assert( + relativePath !== null, + 'rerunTransactionsUnderNode_: relativePath should not be null.' + ); if (transaction.status === TransactionStatus.NEEDS_ABORT) { abortTransaction = true; abortReason = transaction.abortReason; - events = events.concat(this.serverSyncTree_.ackUserWrite(transaction.currentWriteId, true)); + events = events.concat( + this.serverSyncTree_.ackUserWrite(transaction.currentWriteId, true) + ); } else if (transaction.status === TransactionStatus.RUN) { if (transaction.retryCount >= (Repo as any).MAX_TRANSACTION_RETRIES_) { abortTransaction = true; abortReason = 'maxretry'; - events = events.concat(this.serverSyncTree_.ackUserWrite(transaction.currentWriteId, true)); + events = events.concat( + this.serverSyncTree_.ackUserWrite(transaction.currentWriteId, true) + ); } else { // This code reruns a transaction - const currentNode = this.getLatestState_(transaction.path, setsToIgnore); + const currentNode = this.getLatestState_( + transaction.path, + setsToIgnore + ); transaction.currentInputSnapshot = currentNode; const newData = queue[i].update(currentNode.val()); if (newData !== undefined) { - validateFirebaseData('transaction failed: Data returned ', newData, transaction.path); + validateFirebaseData( + 'transaction failed: Data returned ', + newData, + transaction.path + ); let newDataNode = nodeFromJSON(newData); - const hasExplicitPriority = (typeof newData === 'object' && newData != null && - contains(newData, '.priority')); + const hasExplicitPriority = + typeof newData === 'object' && + newData != null && + contains(newData, '.priority'); if (!hasExplicitPriority) { // Keep the old priority if there wasn't a priority explicitly specified. newDataNode = newDataNode.updatePriority(currentNode.getPriority()); @@ -437,7 +519,10 @@ Repo.prototype.startTransaction = function (path: Path, const oldWriteId = transaction.currentWriteId; const serverValues = this.generateServerValues(); - const newNodeResolved = resolveDeferredValueSnapshot(newDataNode, serverValues); + const newNodeResolved = resolveDeferredValueSnapshot( + newDataNode, + serverValues + ); transaction.currentOutputSnapshotRaw = newDataNode; transaction.currentOutputSnapshotResolved = newNodeResolved; @@ -445,14 +530,22 @@ Repo.prototype.startTransaction = function (path: Path, // Mutates setsToIgnore in place setsToIgnore.splice(setsToIgnore.indexOf(oldWriteId), 1); events = events.concat( - this.serverSyncTree_.applyUserOverwrite(transaction.path, newNodeResolved, transaction.currentWriteId, - transaction.applyLocally) + this.serverSyncTree_.applyUserOverwrite( + transaction.path, + newNodeResolved, + transaction.currentWriteId, + transaction.applyLocally + ) + ); + events = events.concat( + this.serverSyncTree_.ackUserWrite(oldWriteId, true) ); - events = events.concat(this.serverSyncTree_.ackUserWrite(oldWriteId, true)); } else { abortTransaction = true; abortReason = 'nodata'; - events = events.concat(this.serverSyncTree_.ackUserWrite(transaction.currentWriteId, true)); + events = events.concat( + this.serverSyncTree_.ackUserWrite(transaction.currentWriteId, true) + ); } } } @@ -464,7 +557,7 @@ Repo.prototype.startTransaction = function (path: Path, // Removing a listener can trigger pruning which can muck with mergedData/visibleData (as it prunes data). // So defer the unwatcher until we're done. - (function (unwatcher) { + (function(unwatcher) { setTimeout(unwatcher, Math.floor(0)); })(queue[i].unwatcher); @@ -472,11 +565,13 @@ Repo.prototype.startTransaction = function (path: Path, if (abortReason === 'nodata') { const ref = new Reference(this, queue[i].path); // We set this field immediately, so it's safe to cast to an actual snapshot - const lastInput = /** @type {!Node} */ (queue[i].currentInputSnapshot); + const lastInput /** @type {!Node} */ = queue[i].currentInputSnapshot; const snapshot = new DataSnapshot(lastInput, ref, PRIORITY_INDEX); callbacks.push(queue[i].onComplete.bind(null, null, false, snapshot)); } else { - callbacks.push(queue[i].onComplete.bind(null, new Error(abortReason), false, null)); + callbacks.push( + queue[i].onComplete.bind(null, new Error(abortReason), false, null) + ); } } } @@ -494,7 +589,6 @@ Repo.prototype.startTransaction = function (path: Path, this.sendReadyTransactions_(); }; - /** * Returns the rootmost ancestor node of the specified path that has a pending transaction on it, or just returns * the node for the given path if there are no pending transactions on any ancestor. @@ -503,12 +597,17 @@ Repo.prototype.startTransaction = function (path: Path, * @return {!Tree.>} The rootmost node with a transaction. * @private */ -(Repo.prototype as any).getAncestorTransactionNode_ = function (path: Path): Tree { +(Repo.prototype as any).getAncestorTransactionNode_ = function( + path: Path +): Tree { let front; // Start at the root and walk deeper into the tree towards path until we find a node with pending transactions. let transactionNode = this.transactionQueueTree_; - while ((front = path.getFront()) !== null && transactionNode.getValue() === null) { + while ( + (front = path.getFront()) !== null && + transactionNode.getValue() === null + ) { transactionNode = transactionNode.subTree(front); path = path.popFront(); } @@ -516,7 +615,6 @@ Repo.prototype.startTransaction = function (path: Path, return transactionNode; }; - /** * Builds the queue of all transactions at or below the specified transactionNode. * @@ -524,13 +622,17 @@ Repo.prototype.startTransaction = function (path: Path, * @return {Array.} The generated queue. * @private */ -(Repo.prototype as any).buildTransactionQueue_ = function (transactionNode: Tree): Array { +(Repo.prototype as any).buildTransactionQueue_ = function( + transactionNode: Tree +): Array { // Walk any child transaction queues and aggregate them into a single queue. const transactionQueue: Transaction[] = []; this.aggregateTransactionQueuesForNode_(transactionNode, transactionQueue); // Sort them by the order the transactions were created. - transactionQueue.sort(function (a, b) { return a.order - b.order; }); + transactionQueue.sort(function(a, b) { + return a.order - b.order; + }); return transactionQueue; }; @@ -540,8 +642,10 @@ Repo.prototype.startTransaction = function (path: Path, * @param {Array.} queue * @private */ -(Repo.prototype as any).aggregateTransactionQueuesForNode_ = function (node: Tree, - queue: Array) { +(Repo.prototype as any).aggregateTransactionQueuesForNode_ = function( + node: Tree, + queue: Array +) { const nodeQueue = node.getValue(); if (nodeQueue !== null) { for (let i = 0; i < nodeQueue.length; i++) { @@ -549,19 +653,20 @@ Repo.prototype.startTransaction = function (path: Path, } } - node.forEachChild((child) => { + node.forEachChild(child => { this.aggregateTransactionQueuesForNode_(child, queue); }); }; - /** * Remove COMPLETED transactions at or below this node in the transactionQueueTree_. * * @param {!Tree.>} node * @private */ -(Repo.prototype as any).pruneCompletedTransactionsBelowNode_ = function (node: Tree) { +(Repo.prototype as any).pruneCompletedTransactionsBelowNode_ = function( + node: Tree +) { const queue = node.getValue(); if (queue) { let to = 0; @@ -575,12 +680,11 @@ Repo.prototype.startTransaction = function (path: Path, node.setValue(queue.length > 0 ? queue : null); } - node.forEachChild((childNode) => { + node.forEachChild(childNode => { this.pruneCompletedTransactionsBelowNode_(childNode); }); }; - /** * Aborts all transactions on ancestors or descendants of the specified path. Called when doing a set() or update() * since we consider them incompatible with transactions. @@ -589,7 +693,7 @@ Repo.prototype.startTransaction = function (path: Path, * @return {!Path} * @private */ -(Repo.prototype as any).abortTransactions_ = function (path: Path): Path { +(Repo.prototype as any).abortTransactions_ = function(path: Path): Path { const affectedPath = this.getAncestorTransactionNode_(path).path(); const transactionNode = this.transactionQueueTree_.subTree(path); @@ -607,17 +711,17 @@ Repo.prototype.startTransaction = function (path: Path, return affectedPath; }; - /** * Abort transactions stored in this transaction queue node. * * @param {!Tree.>} node Node to abort transactions for. * @private */ -(Repo.prototype as any).abortTransactionsOnNode_ = function (node: Tree) { +(Repo.prototype as any).abortTransactionsOnNode_ = function( + node: Tree +) { const queue = node.getValue(); if (queue !== null) { - // Queue up the callbacks and fire them after cleaning up all of our transaction state, since // the callback could trigger more transactions or sets. const callbacks = []; @@ -630,20 +734,29 @@ Repo.prototype.startTransaction = function (path: Path, if (queue[i].status === TransactionStatus.SENT_NEEDS_ABORT) { // Already marked. No action needed. } else if (queue[i].status === TransactionStatus.SENT) { - assert(lastSent === i - 1, 'All SENT items should be at beginning of queue.'); + assert( + lastSent === i - 1, + 'All SENT items should be at beginning of queue.' + ); lastSent = i; // Mark transaction for abort when it comes back. queue[i].status = TransactionStatus.SENT_NEEDS_ABORT; queue[i].abortReason = 'set'; } else { - assert(queue[i].status === TransactionStatus.RUN, - 'Unexpected transaction status in abort'); + assert( + queue[i].status === TransactionStatus.RUN, + 'Unexpected transaction status in abort' + ); // We can abort it immediately. queue[i].unwatcher(); - events = events.concat(this.serverSyncTree_.ackUserWrite(queue[i].currentWriteId, true)); + events = events.concat( + this.serverSyncTree_.ackUserWrite(queue[i].currentWriteId, true) + ); if (queue[i].onComplete) { const snapshot: DataSnapshot | null = null; - callbacks.push(queue[i].onComplete.bind(null, new Error('set'), false, snapshot)); + callbacks.push( + queue[i].onComplete.bind(null, new Error('set'), false, snapshot) + ); } } } diff --git a/src/database/core/ServerActions.ts b/src/database/core/ServerActions.ts index 58dc438616b..7e50170a0f9 100644 --- a/src/database/core/ServerActions.ts +++ b/src/database/core/ServerActions.ts @@ -23,15 +23,18 @@ import { Query } from '../api/Query'; * @interface */ export abstract class ServerActions { - /** * @param {!Query} query * @param {function():string} currentHashFn * @param {?number} tag * @param {function(string, *)} onComplete */ - abstract listen(query: Query, currentHashFn: () => string, tag: number | null, - onComplete: (a: string, b: any) => void): void; + abstract listen( + query: Query, + currentHashFn: () => string, + tag: number | null, + onComplete: (a: string, b: any) => void + ): void; /** * Remove a listen. @@ -47,7 +50,12 @@ export abstract class ServerActions { * @param {function(string, string)=} onComplete * @param {string=} hash */ - put(pathString: string, data: any, onComplete?: (a: string, b: string) => void, hash?: string) { } + put( + pathString: string, + data: any, + onComplete?: (a: string, b: string) => void, + hash?: string + ) {} /** * @param {string} pathString @@ -55,37 +63,52 @@ export abstract class ServerActions { * @param {function(string, ?string)} onComplete * @param {string=} hash */ - merge(pathString: string, data: any, onComplete: (a: string, b: string | null) => void, hash?: string) { } + merge( + pathString: string, + data: any, + onComplete: (a: string, b: string | null) => void, + hash?: string + ) {} /** * Refreshes the auth token for the current connection. * @param {string} token The authentication token */ - refreshAuthToken(token: string) { } + refreshAuthToken(token: string) {} /** * @param {string} pathString * @param {*} data * @param {function(string, string)=} onComplete */ - onDisconnectPut(pathString: string, data: any, onComplete?: (a: string, b: string) => void) { } + onDisconnectPut( + pathString: string, + data: any, + onComplete?: (a: string, b: string) => void + ) {} /** * @param {string} pathString * @param {*} data * @param {function(string, string)=} onComplete */ - onDisconnectMerge(pathString: string, data: any, onComplete?: (a: string, b: string) => void) { } + onDisconnectMerge( + pathString: string, + data: any, + onComplete?: (a: string, b: string) => void + ) {} /** * @param {string} pathString * @param {function(string, string)=} onComplete */ - onDisconnectCancel(pathString: string, onComplete?: (a: string, b: string) => void) { } + onDisconnectCancel( + pathString: string, + onComplete?: (a: string, b: string) => void + ) {} /** * @param {Object.} stats */ - reportStats(stats: { [k: string]: any }) { } - + reportStats(stats: { [k: string]: any }) {} } diff --git a/src/database/core/SnapshotHolder.ts b/src/database/core/SnapshotHolder.ts index 58d94f2e2b5..e3b2c1e745f 100644 --- a/src/database/core/SnapshotHolder.ts +++ b/src/database/core/SnapshotHolder.ts @@ -14,7 +14,7 @@ * limitations under the License. */ -import { ChildrenNode } from "./snap/ChildrenNode"; +import { ChildrenNode } from './snap/ChildrenNode'; import { Path } from './util/Path'; import { Node } from './snap/Node'; diff --git a/src/database/core/SparseSnapshotTree.ts b/src/database/core/SparseSnapshotTree.ts index 83b383339bd..c7f7c0f8d56 100644 --- a/src/database/core/SparseSnapshotTree.ts +++ b/src/database/core/SparseSnapshotTree.ts @@ -14,9 +14,9 @@ * limitations under the License. */ -import { Path } from "./util/Path"; -import { PRIORITY_INDEX } from "./snap/indexes/PriorityIndex"; -import { CountedSet } from "./util/CountedSet"; +import { Path } from './util/Path'; +import { PRIORITY_INDEX } from './snap/indexes/PriorityIndex'; +import { CountedSet } from './util/CountedSet'; import { Node } from './snap/Node'; /** @@ -120,7 +120,9 @@ export class SparseSnapshotTree { const childKey = path.getFront(); path = path.popFront(); if (this.children_.contains(childKey)) { - const safeToRemove = (this.children_.get(childKey) as SparseSnapshotTree).forget(path); + const safeToRemove = (this.children_.get( + childKey + ) as SparseSnapshotTree).forget(path); if (safeToRemove) { this.children_.remove(childKey); } @@ -132,7 +134,6 @@ export class SparseSnapshotTree { } else { return false; } - } else { return true; } @@ -146,7 +147,7 @@ export class SparseSnapshotTree { * @param {!Path} prefixPath Path to look up node for. * @param {!Function} func The function to invoke for each tree. */ - forEachTree(prefixPath: Path, func: (a: Path, b: Node) => any) { + forEachTree(prefixPath: Path, func: (a: Path, b: Node) => any) { if (this.value_ !== null) { func(prefixPath, this.value_); } else { diff --git a/src/database/core/SyncPoint.ts b/src/database/core/SyncPoint.ts index 1798b0bd82a..d744fa57bb1 100644 --- a/src/database/core/SyncPoint.ts +++ b/src/database/core/SyncPoint.ts @@ -43,7 +43,10 @@ let __referenceConstructor: ReferenceConstructor; */ export class SyncPoint { static set __referenceConstructor(val: ReferenceConstructor) { - assert(!__referenceConstructor, '__referenceConstructor has already been defined'); + assert( + !__referenceConstructor, + '__referenceConstructor has already been defined' + ); __referenceConstructor = val; } @@ -77,18 +80,27 @@ export class SyncPoint { * @param {?Node} optCompleteServerCache * @return {!Array.} */ - applyOperation(operation: Operation, writesCache: WriteTreeRef, - optCompleteServerCache: Node | null): Event[] { + applyOperation( + operation: Operation, + writesCache: WriteTreeRef, + optCompleteServerCache: Node | null + ): Event[] { const queryId = operation.source.queryId; if (queryId !== null) { const view = safeGet(this.views_, queryId); assert(view != null, 'SyncTree gave us an op for an invalid query.'); - return view.applyOperation(operation, writesCache, optCompleteServerCache); + return view.applyOperation( + operation, + writesCache, + optCompleteServerCache + ); } else { let events: Event[] = []; - forEach(this.views_, function (key: string, view: View) { - events = events.concat(view.applyOperation(operation, writesCache, optCompleteServerCache)); + forEach(this.views_, function(key: string, view: View) { + events = events.concat( + view.applyOperation(operation, writesCache, optCompleteServerCache) + ); }); return events; @@ -105,13 +117,20 @@ export class SyncPoint { * @param {boolean} serverCacheComplete * @return {!Array.} Events to raise. */ - addEventRegistration(query: Query, eventRegistration: EventRegistration, writesCache: WriteTreeRef, - serverCache: Node | null, serverCacheComplete: boolean): Event[] { + addEventRegistration( + query: Query, + eventRegistration: EventRegistration, + writesCache: WriteTreeRef, + serverCache: Node | null, + serverCacheComplete: boolean + ): Event[] { const queryId = query.queryIdentifier(); let view = safeGet(this.views_, queryId); if (!view) { // TODO: make writesCache take flag for complete server node - let eventCache = writesCache.calcCompleteEventCache(serverCacheComplete ? serverCache : null); + let eventCache = writesCache.calcCompleteEventCache( + serverCacheComplete ? serverCache : null + ); let eventCacheComplete = false; if (eventCache) { eventCacheComplete = true; @@ -123,8 +142,16 @@ export class SyncPoint { eventCacheComplete = false; } const viewCache = new ViewCache( - new CacheNode(/** @type {!Node} */ (eventCache), eventCacheComplete, false), - new CacheNode(/** @type {!Node} */ (serverCache), serverCacheComplete, false) + new CacheNode /** @type {!Node} */( + eventCache, + eventCacheComplete, + false + ), + new CacheNode /** @type {!Node} */( + serverCache, + serverCacheComplete, + false + ) ); view = new View(query, viewCache); this.views_[queryId] = view; @@ -146,8 +173,11 @@ export class SyncPoint { * @param {Error=} cancelError If a cancelError is provided, appropriate cancel events will be returned. * @return {{removed:!Array., events:!Array.}} removed queries and any cancel events */ - removeEventRegistration(query: Query, eventRegistration: EventRegistration | null, - cancelError?: Error): { removed: Query[], events: Event[] } { + removeEventRegistration( + query: Query, + eventRegistration: EventRegistration | null, + cancelError?: Error + ): { removed: Query[]; events: Event[] } { const queryId = query.queryIdentifier(); const removed: Query[] = []; let cancelEvents: Event[] = []; @@ -155,8 +185,10 @@ export class SyncPoint { if (queryId === 'default') { // When you do ref.off(...), we search all views for the registration to remove. const self = this; - forEach(this.views_, function (viewQueryId: string, view: View) { - cancelEvents = cancelEvents.concat(view.removeEventRegistration(eventRegistration, cancelError)); + forEach(this.views_, function(viewQueryId: string, view: View) { + cancelEvents = cancelEvents.concat( + view.removeEventRegistration(eventRegistration, cancelError) + ); if (view.isEmpty()) { delete self.views_[viewQueryId]; @@ -170,7 +202,9 @@ export class SyncPoint { // remove the callback from the specific view. const view = safeGet(this.views_, queryId); if (view) { - cancelEvents = cancelEvents.concat(view.removeEventRegistration(eventRegistration, cancelError)); + cancelEvents = cancelEvents.concat( + view.removeEventRegistration(eventRegistration, cancelError) + ); if (view.isEmpty()) { delete this.views_[queryId]; @@ -184,19 +218,20 @@ export class SyncPoint { if (hadCompleteView && !this.hasCompleteView()) { // We removed our last complete view. - removed.push(new SyncPoint.__referenceConstructor(query.repo, query.path)); + removed.push( + new SyncPoint.__referenceConstructor(query.repo, query.path) + ); } - return {removed: removed, events: cancelEvents}; + return { removed: removed, events: cancelEvents }; } /** * @return {!Array.} */ getQueryViews(): View[] { - const values = Object.keys(this.views_) - .map(key => this.views_[key]); - return values.filter(function (view) { + const values = Object.keys(this.views_).map(key => this.views_[key]); + return values.filter(function(view) { return !view.getQuery().getQueryParams().loadsAllData(); }); } @@ -247,7 +282,9 @@ export class SyncPoint { * @return {?View} */ getCompleteView(): View | null { - const completeView = findValue(this.views_, (view: View) => view.getQuery().getQueryParams().loadsAllData()); + const completeView = findValue(this.views_, (view: View) => + view.getQuery().getQueryParams().loadsAllData() + ); return completeView || null; } } diff --git a/src/database/core/SyncTree.ts b/src/database/core/SyncTree.ts index d933ba431f7..4f956ef417d 100644 --- a/src/database/core/SyncTree.ts +++ b/src/database/core/SyncTree.ts @@ -46,10 +46,12 @@ import { View } from './view/View'; * }} */ export interface ListenProvider { - startListening(query: Query, - tag: number | null, - hashFn: () => string, - onComplete: (a: string, b?: any) => Event[]): Event[]; + startListening( + query: Query, + tag: number | null, + hashFn: () => string, + onComplete: (a: string, b?: any) => Event[] + ): Event[]; stopListening(a: Query, b: number | null): void; } @@ -98,8 +100,7 @@ export class SyncTree { * @param {!ListenProvider} listenProvider_ Used by SyncTree to start / stop listening * to server data. */ - constructor(private listenProvider_: ListenProvider) { - } + constructor(private listenProvider_: ListenProvider) {} /** * Apply the data changes for a user-generated set() or transaction() call. @@ -110,7 +111,12 @@ export class SyncTree { * @param {boolean=} visible * @return {!Array.} Events to raise. */ - applyUserOverwrite(path: Path, newData: Node, writeId: number, visible?: boolean): Event[] { + applyUserOverwrite( + path: Path, + newData: Node, + writeId: number, + visible?: boolean + ): Event[] { // Record pending write. this.pendingWriteTree_.addOverwrite(path, newData, writeId, visible); @@ -118,7 +124,8 @@ export class SyncTree { return []; } else { return this.applyOperationToSyncPoints_( - new Overwrite(OperationSource.User, path, newData)); + new Overwrite(OperationSource.User, path, newData) + ); } } @@ -130,14 +137,19 @@ export class SyncTree { * @param {!number} writeId * @return {!Array.} Events to raise. */ - applyUserMerge(path: Path, changedChildren: { [k: string]: Node }, writeId: number): Event[] { + applyUserMerge( + path: Path, + changedChildren: { [k: string]: Node }, + writeId: number + ): Event[] { // Record pending merge. this.pendingWriteTree_.addMerge(path, changedChildren, writeId); const changeTree = ImmutableTree.fromObject(changedChildren); return this.applyOperationToSyncPoints_( - new Merge(OperationSource.User, path, changeTree)); + new Merge(OperationSource.User, path, changeTree) + ); } /** @@ -154,14 +166,17 @@ export class SyncTree { return []; } else { let affectedTree = ImmutableTree.Empty; - if (write.snap != null) { // overwrite + if (write.snap != null) { + // overwrite affectedTree = affectedTree.set(Path.Empty, true); } else { - forEach(write.children, function (pathString: string, node: Node) { + forEach(write.children, function(pathString: string, node: Node) { affectedTree = affectedTree.set(new Path(pathString), node); }); } - return this.applyOperationToSyncPoints_(new AckUserWrite(write.path, affectedTree, revert)); + return this.applyOperationToSyncPoints_( + new AckUserWrite(write.path, affectedTree, revert) + ); } } @@ -174,7 +189,8 @@ export class SyncTree { */ applyServerOverwrite(path: Path, newData: Node): Event[] { return this.applyOperationToSyncPoints_( - new Overwrite(OperationSource.Server, path, newData)); + new Overwrite(OperationSource.Server, path, newData) + ); } /** @@ -184,11 +200,15 @@ export class SyncTree { * @param {!Object.} changedChildren * @return {!Array.} Events to raise. */ - applyServerMerge(path: Path, changedChildren: { [k: string]: Node }): Event[] { + applyServerMerge( + path: Path, + changedChildren: { [k: string]: Node } + ): Event[] { const changeTree = ImmutableTree.fromObject(changedChildren); return this.applyOperationToSyncPoints_( - new Merge(OperationSource.Server, path, changeTree)); + new Merge(OperationSource.Server, path, changeTree) + ); } /** @@ -199,7 +219,8 @@ export class SyncTree { */ applyListenComplete(path: Path): Event[] { return this.applyOperationToSyncPoints_( - new ListenComplete(OperationSource.Server, path)); + new ListenComplete(OperationSource.Server, path) + ); } /** @@ -214,10 +235,14 @@ export class SyncTree { const queryKey = this.queryKeyForTag_(tag); if (queryKey != null) { const r = SyncTree.parseQueryKey_(queryKey); - const queryPath = r.path, queryId = r.queryId; + const queryPath = r.path, + queryId = r.queryId; const relativePath = Path.relativePath(queryPath, path); - const op = new Overwrite(OperationSource.forServerTaggedQuery(queryId), - relativePath, snap); + const op = new Overwrite( + OperationSource.forServerTaggedQuery(queryId), + relativePath, + snap + ); return this.applyTaggedOperation_(queryPath, op); } else { // Query must have been removed already @@ -233,15 +258,23 @@ export class SyncTree { * @param {!number} tag * @return {!Array.} Events to raise. */ - applyTaggedQueryMerge(path: Path, changedChildren: { [k: string]: Node }, tag: number): Event[] { + applyTaggedQueryMerge( + path: Path, + changedChildren: { [k: string]: Node }, + tag: number + ): Event[] { const queryKey = this.queryKeyForTag_(tag); if (queryKey) { const r = SyncTree.parseQueryKey_(queryKey); - const queryPath = r.path, queryId = r.queryId; + const queryPath = r.path, + queryId = r.queryId; const relativePath = Path.relativePath(queryPath, path); const changeTree = ImmutableTree.fromObject(changedChildren); - const op = new Merge(OperationSource.forServerTaggedQuery(queryId), - relativePath, changeTree); + const op = new Merge( + OperationSource.forServerTaggedQuery(queryId), + relativePath, + changeTree + ); return this.applyTaggedOperation_(queryPath, op); } else { // We've already removed the query. No big deal, ignore the update @@ -260,10 +293,13 @@ export class SyncTree { const queryKey = this.queryKeyForTag_(tag); if (queryKey) { const r = SyncTree.parseQueryKey_(queryKey); - const queryPath = r.path, queryId = r.queryId; + const queryPath = r.path, + queryId = r.queryId; const relativePath = Path.relativePath(queryPath, path); - const op = new ListenComplete(OperationSource.forServerTaggedQuery(queryId), - relativePath); + const op = new ListenComplete( + OperationSource.forServerTaggedQuery(queryId), + relativePath + ); return this.applyTaggedOperation_(queryPath, op); } else { // We've already removed the query. No big deal, ignore the update @@ -278,24 +314,29 @@ export class SyncTree { * @param {!EventRegistration} eventRegistration * @return {!Array.} Events to raise. */ - addEventRegistration(query: Query, eventRegistration: EventRegistration): Event[] { + addEventRegistration( + query: Query, + eventRegistration: EventRegistration + ): Event[] { const path = query.path; let serverCache: Node | null = null; let foundAncestorDefaultView = false; // Any covering writes will necessarily be at the root, so really all we need to find is the server cache. // Consider optimizing this once there's a better understanding of what actual behavior will be. - this.syncPointTree_.foreachOnPath(path, function (pathToSyncPoint, sp) { + this.syncPointTree_.foreachOnPath(path, function(pathToSyncPoint, sp) { const relativePath = Path.relativePath(pathToSyncPoint, path); serverCache = serverCache || sp.getCompleteServerCache(relativePath); - foundAncestorDefaultView = foundAncestorDefaultView || sp.hasCompleteView(); + foundAncestorDefaultView = + foundAncestorDefaultView || sp.hasCompleteView(); }); let syncPoint = this.syncPointTree_.get(path); if (!syncPoint) { syncPoint = new SyncPoint(); this.syncPointTree_ = this.syncPointTree_.set(path, syncPoint); } else { - foundAncestorDefaultView = foundAncestorDefaultView || syncPoint.hasCompleteView(); + foundAncestorDefaultView = + foundAncestorDefaultView || syncPoint.hasCompleteView(); serverCache = serverCache || syncPoint.getCompleteServerCache(Path.Empty); } @@ -306,10 +347,13 @@ export class SyncTree { serverCacheComplete = false; serverCache = ChildrenNode.EMPTY_NODE; const subtree = this.syncPointTree_.subtree(path); - subtree.foreachChild(function (childName, childSyncPoint) { + subtree.foreachChild(function(childName, childSyncPoint) { const completeCache = childSyncPoint.getCompleteServerCache(Path.Empty); if (completeCache) { - serverCache = serverCache.updateImmediateChild(childName, completeCache); + serverCache = serverCache.updateImmediateChild( + childName, + completeCache + ); } }); } @@ -318,17 +362,25 @@ export class SyncTree { if (!viewAlreadyExists && !query.getQueryParams().loadsAllData()) { // We need to track a tag for this query const queryKey = SyncTree.makeQueryKey_(query); - assert(!(queryKey in this.queryToTagMap_), - 'View does not exist, but we have a tag'); + assert( + !(queryKey in this.queryToTagMap_), + 'View does not exist, but we have a tag' + ); const tag = SyncTree.getNextQueryTag_(); this.queryToTagMap_[queryKey] = tag; // Coerce to string to avoid sparse arrays. this.tagToQueryMap_['_' + tag] = queryKey; } const writesCache = this.pendingWriteTree_.childWrites(path); - let events = syncPoint.addEventRegistration(query, eventRegistration, writesCache, serverCache, serverCacheComplete); + let events = syncPoint.addEventRegistration( + query, + eventRegistration, + writesCache, + serverCache, + serverCacheComplete + ); if (!viewAlreadyExists && !foundAncestorDefaultView) { - const view = /** @type !View */ (syncPoint.viewForQuery(query)); + const view /** @type !View */ = syncPoint.viewForQuery(query); events = events.concat(this.setupListener_(query, view)); } return events; @@ -345,8 +397,11 @@ export class SyncTree { * @param {Error=} cancelError If a cancelError is provided, appropriate cancel events will be returned. * @return {!Array.} Cancel events, if cancelError was provided. */ - removeEventRegistration(query: Query, eventRegistration: EventRegistration | null, - cancelError?: Error): Event[] { + removeEventRegistration( + query: Query, + eventRegistration: EventRegistration | null, + cancelError?: Error + ): Event[] { // Find the syncPoint first. Then deal with whether or not it has matching listeners const path = query.path; const maybeSyncPoint = this.syncPointTree_.get(path); @@ -354,11 +409,19 @@ export class SyncTree { // A removal on a default query affects all queries at that location. A removal on an indexed query, even one without // other query constraints, does *not* affect all queries at that location. So this check must be for 'default', and // not loadsAllData(). - if (maybeSyncPoint && (query.queryIdentifier() === 'default' || maybeSyncPoint.viewExistsForQuery(query))) { + if ( + maybeSyncPoint && + (query.queryIdentifier() === 'default' || + maybeSyncPoint.viewExistsForQuery(query)) + ) { /** * @type {{removed: !Array., events: !Array.}} */ - const removedAndEvents = maybeSyncPoint.removeEventRegistration(query, eventRegistration, cancelError); + const removedAndEvents = maybeSyncPoint.removeEventRegistration( + query, + eventRegistration, + cancelError + ); if (maybeSyncPoint.isEmpty()) { this.syncPointTree_ = this.syncPointTree_.remove(path); } @@ -370,10 +433,15 @@ export class SyncTree { // // Since indexed queries can shadow if they don't have other query constraints, check for loadsAllData(), instead of // queryId === 'default' - const removingDefault = -1 !== removed.findIndex(function (query) { - return query.getQueryParams().loadsAllData(); - }); - const covered = this.syncPointTree_.findOnPath(path, function (relativePath, parentSyncPoint) { + const removingDefault = + -1 !== + removed.findIndex(function(query) { + return query.getQueryParams().loadsAllData(); + }); + const covered = this.syncPointTree_.findOnPath(path, function( + relativePath, + parentSyncPoint + ) { return parentSyncPoint.hasCompleteView(); }); @@ -387,10 +455,15 @@ export class SyncTree { // Ok, we've collected all the listens we need. Set them up. for (let i = 0; i < newViews.length; ++i) { - const view = newViews[i], newQuery = view.getQuery(); + const view = newViews[i], + newQuery = view.getQuery(); const listener = this.createListenerForView_(view); - this.listenProvider_.startListening(SyncTree.queryForListening_(newQuery), this.tagForQuery_(newQuery), - listener.hashFn, listener.onComplete); + this.listenProvider_.startListening( + SyncTree.queryForListening_(newQuery), + this.tagForQuery_(newQuery), + listener.hashFn, + listener.onComplete + ); } } else { // There's nothing below us, so nothing we need to start listening on @@ -405,11 +478,19 @@ export class SyncTree { if (removingDefault) { // We don't tag default listeners const defaultTag: number | null = null; - this.listenProvider_.stopListening(SyncTree.queryForListening_(query), defaultTag); + this.listenProvider_.stopListening( + SyncTree.queryForListening_(query), + defaultTag + ); } else { removed.forEach((queryToRemove: Query) => { - const tagToRemove = this.queryToTagMap_[SyncTree.makeQueryKey_(queryToRemove)]; - this.listenProvider_.stopListening(SyncTree.queryForListening_(queryToRemove), tagToRemove); + const tagToRemove = this.queryToTagMap_[ + SyncTree.makeQueryKey_(queryToRemove) + ]; + this.listenProvider_.stopListening( + SyncTree.queryForListening_(queryToRemove), + tagToRemove + ); }); } } @@ -430,17 +511,28 @@ export class SyncTree { * @param {Array.=} writeIdsToExclude A specific set to be excluded * @return {?Node} */ - calcCompleteEventCache(path: Path, writeIdsToExclude?: number[]): Node | null { + calcCompleteEventCache( + path: Path, + writeIdsToExclude?: number[] + ): Node | null { const includeHiddenSets = true; const writeTree = this.pendingWriteTree_; - const serverCache = this.syncPointTree_.findOnPath(path, function (pathSoFar, syncPoint) { + const serverCache = this.syncPointTree_.findOnPath(path, function( + pathSoFar, + syncPoint + ) { const relativePath = Path.relativePath(pathSoFar, path); const serverCache = syncPoint.getCompleteServerCache(relativePath); if (serverCache) { return serverCache; } }); - return writeTree.calcCompleteEventCache(path, serverCache, writeIdsToExclude, includeHiddenSets); + return writeTree.calcCompleteEventCache( + path, + serverCache, + writeIdsToExclude, + includeHiddenSets + ); } /** @@ -451,8 +543,12 @@ export class SyncTree { * @return {!Array.} * @private */ - private collectDistinctViewsForSubTree_(subtree: ImmutableTree): View[] { - return subtree.fold((relativePath, maybeChildSyncPoint, childMap) => { + private collectDistinctViewsForSubTree_( + subtree: ImmutableTree + ): View[] { + return subtree.fold< + View[] + >((relativePath, maybeChildSyncPoint, childMap) => { if (maybeChildSyncPoint && maybeChildSyncPoint.hasCompleteView()) { const completeView = maybeChildSyncPoint.getCompleteView(); return [completeView]; @@ -462,7 +558,7 @@ export class SyncTree { if (maybeChildSyncPoint) { views = maybeChildSyncPoint.getQueryViews(); } - forEach(childMap, function (key: string, childViews: View[]) { + forEach(childMap, function(key: string, childViews: View[]) { views = views.concat(childViews); }); return views; @@ -487,7 +583,6 @@ export class SyncTree { } } - /** * Normalizes a query to a query we send the server for listening * @param {!Query} query @@ -495,17 +590,19 @@ export class SyncTree { * @private */ private static queryForListening_(query: Query): Query { - if (query.getQueryParams().loadsAllData() && !query.getQueryParams().isDefault()) { + if ( + query.getQueryParams().loadsAllData() && + !query.getQueryParams().isDefault() + ) { // We treat queries that load all data as default queries // Cast is necessary because ref() technically returns Firebase which is actually fb.api.Firebase which inherits // from Query - return /** @type {!Query} */(query.getRef()); + return /** @type {!Query} */ query.getRef(); } else { return query; } } - /** * For a given new listen, manage the de-duplication of outstanding subscriptions. * @@ -519,28 +616,43 @@ export class SyncTree { const tag = this.tagForQuery_(query); const listener = this.createListenerForView_(view); - const events = this.listenProvider_.startListening(SyncTree.queryForListening_(query), tag, listener.hashFn, - listener.onComplete); + const events = this.listenProvider_.startListening( + SyncTree.queryForListening_(query), + tag, + listener.hashFn, + listener.onComplete + ); const subtree = this.syncPointTree_.subtree(path); // The root of this subtree has our query. We're here because we definitely need to send a listen for that, but we // may need to shadow other listens as well. if (tag) { - assert(!subtree.value.hasCompleteView(), 'If we\'re adding a query, it shouldn\'t be shadowed'); + assert( + !subtree.value.hasCompleteView(), + "If we're adding a query, it shouldn't be shadowed" + ); } else { // Shadow everything at or below this location, this is a default listener. - const queriesToStop = subtree.fold(function (relativePath, maybeChildSyncPoint, childMap) { - if (!relativePath.isEmpty() && maybeChildSyncPoint && maybeChildSyncPoint.hasCompleteView()) { + const queriesToStop = subtree.fold(function( + relativePath, + maybeChildSyncPoint, + childMap + ) { + if ( + !relativePath.isEmpty() && + maybeChildSyncPoint && + maybeChildSyncPoint.hasCompleteView() + ) { return [maybeChildSyncPoint.getCompleteView().getQuery()]; } else { // No default listener here, flatten any deeper queries into an array let queries: Query[] = []; if (maybeChildSyncPoint) { queries = queries.concat( - maybeChildSyncPoint.getQueryViews().map(view=> view.getQuery()) + maybeChildSyncPoint.getQueryViews().map(view => view.getQuery()) ); } - forEach(childMap, function (key: string, childQueries: Query[]) { + forEach(childMap, function(key: string, childQueries: Query[]) { queries = queries.concat(childQueries); }); return queries; @@ -548,7 +660,10 @@ export class SyncTree { }); for (let i = 0; i < queriesToStop.length; ++i) { const queryToStop = queriesToStop[i]; - this.listenProvider_.stopListening(SyncTree.queryForListening_(queryToStop), this.tagForQuery_(queryToStop)); + this.listenProvider_.stopListening( + SyncTree.queryForListening_(queryToStop), + this.tagForQuery_(queryToStop) + ); } } return events; @@ -560,7 +675,9 @@ export class SyncTree { * @return {{hashFn: function(), onComplete: function(!string, *)}} * @private */ - private createListenerForView_(view: View): { hashFn(): string, onComplete(a: string, b?: any): Event[] } { + private createListenerForView_( + view: View + ): { hashFn(): string; onComplete(a: string, b?: any): Event[] } { const query = view.getQuery(); const tag = this.tagForQuery_(query); @@ -580,7 +697,11 @@ export class SyncTree { // If a listen failed, kill all of the listeners here, not just the one that triggered the error. // Note that this may need to be scoped to just this listener if we change permissions on filtered children const error = errorForServerCode(status, query); - return this.removeEventRegistration(query, /*eventRegistration*/null, error); + return this.removeEventRegistration( + query, + /*eventRegistration*/ null, + error + ); } } }; @@ -602,9 +723,14 @@ export class SyncTree { * @param {!string} queryKey * @return {{queryId: !string, path: !Path}} */ - private static parseQueryKey_(queryKey: string): { queryId: string, path: Path } { + private static parseQueryKey_( + queryKey: string + ): { queryId: string; path: Path } { const splitIndex = queryKey.indexOf('$'); - assert(splitIndex !== -1 && splitIndex < queryKey.length - 1, 'Bad queryKey.'); + assert( + splitIndex !== -1 && splitIndex < queryKey.length - 1, + 'Bad queryKey.' + ); return { queryId: queryKey.substr(splitIndex + 1), path: new Path(queryKey.substr(0, splitIndex)) @@ -656,11 +782,18 @@ export class SyncTree { * @return {!Array.} * @private */ - private applyTaggedOperation_(queryPath: Path, operation: Operation): Event[] { + private applyTaggedOperation_( + queryPath: Path, + operation: Operation + ): Event[] { const syncPoint = this.syncPointTree_.get(queryPath); - assert(syncPoint, 'Missing sync point for query tag that we\'re tracking'); + assert(syncPoint, "Missing sync point for query tag that we're tracking"); const writesCache = this.pendingWriteTree_.childWrites(queryPath); - return syncPoint.applyOperation(operation, writesCache, /*serverCache=*/null); + return syncPoint.applyOperation( + operation, + writesCache, + /*serverCache=*/ null + ); } /** @@ -681,9 +814,12 @@ export class SyncTree { * @private */ private applyOperationToSyncPoints_(operation: Operation): Event[] { - return this.applyOperationHelper_(operation, this.syncPointTree_, /*serverCache=*/ null, - this.pendingWriteTree_.childWrites(Path.Empty)); - + return this.applyOperationHelper_( + operation, + this.syncPointTree_, + /*serverCache=*/ null, + this.pendingWriteTree_.childWrites(Path.Empty) + ); } /** @@ -696,11 +832,19 @@ export class SyncTree { * @param {!WriteTreeRef} writesCache * @return {!Array.} */ - private applyOperationHelper_(operation: Operation, syncPointTree: ImmutableTree, - serverCache: Node | null, writesCache: WriteTreeRef): Event[] { - + private applyOperationHelper_( + operation: Operation, + syncPointTree: ImmutableTree, + serverCache: Node | null, + writesCache: WriteTreeRef + ): Event[] { if (operation.path.isEmpty()) { - return this.applyOperationDescendantsHelper_(operation, syncPointTree, serverCache, writesCache); + return this.applyOperationDescendantsHelper_( + operation, + syncPointTree, + serverCache, + writesCache + ); } else { const syncPoint = syncPointTree.get(Path.Empty); @@ -714,14 +858,24 @@ export class SyncTree { const childOperation = operation.operationForChild(childName); const childTree = syncPointTree.children.get(childName); if (childTree && childOperation) { - const childServerCache = serverCache ? serverCache.getImmediateChild(childName) : null; + const childServerCache = serverCache + ? serverCache.getImmediateChild(childName) + : null; const childWritesCache = writesCache.child(childName); events = events.concat( - this.applyOperationHelper_(childOperation, childTree, childServerCache, childWritesCache)); + this.applyOperationHelper_( + childOperation, + childTree, + childServerCache, + childWritesCache + ) + ); } if (syncPoint) { - events = events.concat(syncPoint.applyOperation(operation, writesCache, serverCache)); + events = events.concat( + syncPoint.applyOperation(operation, writesCache, serverCache) + ); } return events; @@ -738,8 +892,12 @@ export class SyncTree { * @param {!WriteTreeRef} writesCache * @return {!Array.} */ - private applyOperationDescendantsHelper_(operation: Operation, syncPointTree: ImmutableTree, - serverCache: Node | null, writesCache: WriteTreeRef): Event[] { + private applyOperationDescendantsHelper_( + operation: Operation, + syncPointTree: ImmutableTree, + serverCache: Node | null, + writesCache: WriteTreeRef + ): Event[] { const syncPoint = syncPointTree.get(Path.Empty); // If we don't have cached server data, see if we can get it from this SyncPoint. @@ -749,17 +907,27 @@ export class SyncTree { let events: Event[] = []; syncPointTree.children.inorderTraversal((childName, childTree) => { - const childServerCache = serverCache ? serverCache.getImmediateChild(childName) : null; + const childServerCache = serverCache + ? serverCache.getImmediateChild(childName) + : null; const childWritesCache = writesCache.child(childName); const childOperation = operation.operationForChild(childName); if (childOperation) { events = events.concat( - this.applyOperationDescendantsHelper_(childOperation, childTree, childServerCache, childWritesCache)); + this.applyOperationDescendantsHelper_( + childOperation, + childTree, + childServerCache, + childWritesCache + ) + ); } }); if (syncPoint) { - events = events.concat(syncPoint.applyOperation(operation, writesCache, serverCache)); + events = events.concat( + syncPoint.applyOperation(operation, writesCache, serverCache) + ); } return events; diff --git a/src/database/core/WriteTree.ts b/src/database/core/WriteTree.ts index 8462a8a6ebf..2b42fe7c58a 100644 --- a/src/database/core/WriteTree.ts +++ b/src/database/core/WriteTree.ts @@ -33,7 +33,7 @@ export interface WriteRecord { path: Path; snap?: Node | null; children?: { [k: string]: Node } | null; - visible: boolean + visible: boolean; } /** @@ -84,11 +84,19 @@ export class WriteTree { * @param {boolean=} visible This is set to false by some transactions. It should be excluded from event caches */ addOverwrite(path: Path, snap: Node, writeId: number, visible?: boolean) { - assert(writeId > this.lastWriteId_, 'Stacking an older write on top of newer ones'); + assert( + writeId > this.lastWriteId_, + 'Stacking an older write on top of newer ones' + ); if (visible === undefined) { visible = true; } - this.allWrites_.push({path: path, snap: snap, writeId: writeId, visible: visible}); + this.allWrites_.push({ + path: path, + snap: snap, + writeId: writeId, + visible: visible + }); if (visible) { this.visibleWrites_ = this.visibleWrites_.addWrite(path, snap); @@ -103,15 +111,26 @@ export class WriteTree { * @param {!Object.} changedChildren * @param {!number} writeId */ - addMerge(path: Path, changedChildren: { [k: string]: Node }, writeId: number) { - assert(writeId > this.lastWriteId_, 'Stacking an older merge on top of newer ones'); - this.allWrites_.push({path: path, children: changedChildren, writeId: writeId, visible: true}); + addMerge( + path: Path, + changedChildren: { [k: string]: Node }, + writeId: number + ) { + assert( + writeId > this.lastWriteId_, + 'Stacking an older merge on top of newer ones' + ); + this.allWrites_.push({ + path: path, + children: changedChildren, + writeId: writeId, + visible: true + }); this.visibleWrites_ = this.visibleWrites_.addWrites(path, changedChildren); this.lastWriteId_ = writeId; } - /** * @param {!number} writeId * @return {?WriteRecord} @@ -126,7 +145,6 @@ export class WriteTree { return null; } - /** * Remove a write (either an overwrite or merge) that has been successfully acknowledge by the server. Recalculates * the tree if necessary. We return true if it may have been visible, meaning views need to reevaluate. @@ -141,7 +159,9 @@ export class WriteTree { //const validClear = revert || this.allWrites_.length === 0 || writeId <= this.allWrites_[0].writeId; //assert(validClear, "Either we don't have this write, or it's the first one in the queue"); - const idx = this.allWrites_.findIndex(function (s) { return s.writeId === writeId; }); + const idx = this.allWrites_.findIndex(function(s) { + return s.writeId === writeId; + }); assert(idx >= 0, 'removeWrite called with nonexistent writeId.'); const writeToRemove = this.allWrites_[idx]; this.allWrites_.splice(idx, 1); @@ -154,7 +174,10 @@ export class WriteTree { while (removedWriteWasVisible && i >= 0) { const currentWrite = this.allWrites_[i]; if (currentWrite.visible) { - if (i >= idx && this.recordContainsPath_(currentWrite, writeToRemove.path)) { + if ( + i >= idx && + this.recordContainsPath_(currentWrite, writeToRemove.path) + ) { // The removed write was completely shadowed by a subsequent write. removedWriteWasVisible = false; } else if (writeToRemove.path.contains(currentWrite.path)) { @@ -174,11 +197,15 @@ export class WriteTree { } else { // There's no shadowing. We can safely just remove the write(s) from visibleWrites. if (writeToRemove.snap) { - this.visibleWrites_ = this.visibleWrites_.removeWrite(writeToRemove.path); + this.visibleWrites_ = this.visibleWrites_.removeWrite( + writeToRemove.path + ); } else { const children = writeToRemove.children; forEach(children, (childName: string) => { - this.visibleWrites_ = this.visibleWrites_.removeWrite(writeToRemove.path.child(childName)); + this.visibleWrites_ = this.visibleWrites_.removeWrite( + writeToRemove.path.child(childName) + ); }); } return true; @@ -206,8 +233,12 @@ export class WriteTree { * @param {boolean=} includeHiddenWrites Defaults to false, whether or not to layer on writes with visible set to false * @return {?Node} */ - calcCompleteEventCache(treePath: Path, completeServerCache: Node | null, writeIdsToExclude?: number[], - includeHiddenWrites?: boolean): Node | null { + calcCompleteEventCache( + treePath: Path, + completeServerCache: Node | null, + writeIdsToExclude?: number[], + includeHiddenWrites?: boolean + ): Node | null { if (!writeIdsToExclude && !includeHiddenWrites) { const shadowingNode = this.visibleWrites_.getCompleteNode(treePath); if (shadowingNode != null) { @@ -216,7 +247,10 @@ export class WriteTree { const subMerge = this.visibleWrites_.childCompoundWrite(treePath); if (subMerge.isEmpty()) { return completeServerCache; - } else if (completeServerCache == null && !subMerge.hasCompleteWrite(Path.Empty)) { + } else if ( + completeServerCache == null && + !subMerge.hasCompleteWrite(Path.Empty) + ) { // We wouldn't have a complete snapshot, since there's no underlying data and no complete shadow return null; } else { @@ -230,15 +264,26 @@ export class WriteTree { return completeServerCache; } else { // If the server cache is null, and we don't have a complete cache, we need to return null - if (!includeHiddenWrites && completeServerCache == null && !merge.hasCompleteWrite(Path.Empty)) { + if ( + !includeHiddenWrites && + completeServerCache == null && + !merge.hasCompleteWrite(Path.Empty) + ) { return null; } else { - const filter = function (write: WriteRecord) { - return (write.visible || includeHiddenWrites) && - (!writeIdsToExclude || !~writeIdsToExclude.indexOf(write.writeId)) && - (write.path.contains(treePath) || treePath.contains(write.path)); + const filter = function(write: WriteRecord) { + return ( + (write.visible || includeHiddenWrites) && + (!writeIdsToExclude || + !~writeIdsToExclude.indexOf(write.writeId)) && + (write.path.contains(treePath) || treePath.contains(write.path)) + ); }; - const mergeAtPath = WriteTree.layerTree_(this.allWrites_, filter, treePath); + const mergeAtPath = WriteTree.layerTree_( + this.allWrites_, + filter, + treePath + ); const layeredCache = completeServerCache || ChildrenNode.EMPTY_NODE; return mergeAtPath.apply(layeredCache); } @@ -254,14 +299,23 @@ export class WriteTree { * @param {?ChildrenNode} completeServerChildren * @return {!ChildrenNode} */ - calcCompleteEventChildren(treePath: Path, completeServerChildren: ChildrenNode | null) { + calcCompleteEventChildren( + treePath: Path, + completeServerChildren: ChildrenNode | null + ) { let completeChildren = ChildrenNode.EMPTY_NODE as Node; const topLevelSet = this.visibleWrites_.getCompleteNode(treePath); if (topLevelSet) { if (!topLevelSet.isLeafNode()) { // we're shadowing everything. Return the children. - topLevelSet.forEachChild(PRIORITY_INDEX, function (childName, childSnap) { - completeChildren = completeChildren.updateImmediateChild(childName, childSnap); + topLevelSet.forEachChild(PRIORITY_INDEX, function( + childName, + childSnap + ) { + completeChildren = completeChildren.updateImmediateChild( + childName, + childSnap + ); }); } return completeChildren; @@ -269,21 +323,35 @@ export class WriteTree { // Layer any children we have on top of this // We know we don't have a top-level set, so just enumerate existing children const merge = this.visibleWrites_.childCompoundWrite(treePath); - completeServerChildren.forEachChild(PRIORITY_INDEX, function (childName, childNode) { - const node = merge.childCompoundWrite(new Path(childName)).apply(childNode); - completeChildren = completeChildren.updateImmediateChild(childName, node); + completeServerChildren.forEachChild(PRIORITY_INDEX, function( + childName, + childNode + ) { + const node = merge + .childCompoundWrite(new Path(childName)) + .apply(childNode); + completeChildren = completeChildren.updateImmediateChild( + childName, + node + ); }); // Add any complete children we have from the set - merge.getCompleteChildren().forEach(function (namedNode) { - completeChildren = completeChildren.updateImmediateChild(namedNode.name, namedNode.node); + merge.getCompleteChildren().forEach(function(namedNode) { + completeChildren = completeChildren.updateImmediateChild( + namedNode.name, + namedNode.node + ); }); return completeChildren; } else { // We don't have anything to layer on top of. Layer on any children we have // Note that we can return an empty snap if we have a defined delete const merge = this.visibleWrites_.childCompoundWrite(treePath); - merge.getCompleteChildren().forEach(function (namedNode) { - completeChildren = completeChildren.updateImmediateChild(namedNode.name, namedNode.node); + merge.getCompleteChildren().forEach(function(namedNode) { + completeChildren = completeChildren.updateImmediateChild( + namedNode.name, + namedNode.node + ); }); return completeChildren; } @@ -309,10 +377,16 @@ export class WriteTree { * @param {?Node} existingServerSnap * @return {?Node} */ - calcEventCacheAfterServerOverwrite(treePath: Path, childPath: Path, existingEventSnap: Node | null, - existingServerSnap: Node | null): Node | null { - assert(existingEventSnap || existingServerSnap, - 'Either existingEventSnap or existingServerSnap must exist'); + calcEventCacheAfterServerOverwrite( + treePath: Path, + childPath: Path, + existingEventSnap: Node | null, + existingServerSnap: Node | null + ): Node | null { + assert( + existingEventSnap || existingServerSnap, + 'Either existingEventSnap or existingServerSnap must exist' + ); const path = treePath.child(childPath); if (this.visibleWrites_.hasCompleteWrite(path)) { // At this point we can probably guarantee that we're in case 2, meaning no events @@ -345,7 +419,11 @@ export class WriteTree { * @param {!CacheNode} existingServerSnap * @return {?Node} */ - calcCompleteChild(treePath: Path, childKey: string, existingServerSnap: CacheNode): Node | null { + calcCompleteChild( + treePath: Path, + childKey: string, + existingServerSnap: CacheNode + ): Node | null { const path = treePath.child(childKey); const shadowingNode = this.visibleWrites_.getCompleteNode(path); if (shadowingNode != null) { @@ -353,7 +431,9 @@ export class WriteTree { } else { if (existingServerSnap.isCompleteForChild(childKey)) { const childMerge = this.visibleWrites_.childCompoundWrite(path); - return childMerge.apply(existingServerSnap.getNode().getImmediateChild(childKey)); + return childMerge.apply( + existingServerSnap.getNode().getImmediateChild(childKey) + ); } else { return null; } @@ -384,8 +464,14 @@ export class WriteTree { * @param {!Index} index * @return {!Array.} */ - calcIndexedSlice(treePath: Path, completeServerData: Node | null, startPost: NamedNode, count: number, - reverse: boolean, index: Index): NamedNode[] { + calcIndexedSlice( + treePath: Path, + completeServerData: Node | null, + startPost: NamedNode, + count: number, + reverse: boolean, + index: Index + ): NamedNode[] { let toIterate: Node; const merge = this.visibleWrites_.childCompoundWrite(treePath); const shadowingNode = merge.getCompleteNode(Path.Empty); @@ -401,8 +487,9 @@ export class WriteTree { if (!toIterate.isEmpty() && !toIterate.isLeafNode()) { const nodes = []; const cmp = index.getCompare(); - const iter = reverse ? (toIterate as ChildrenNode).getReverseIteratorFrom(startPost, index) : - (toIterate as ChildrenNode).getIteratorFrom(startPost, index); + const iter = reverse + ? (toIterate as ChildrenNode).getReverseIteratorFrom(startPost, index) + : (toIterate as ChildrenNode).getIteratorFrom(startPost, index); let next = iter.getNext(); while (next && nodes.length < count) { if (cmp(next, startPost) !== 0) { @@ -427,7 +514,10 @@ export class WriteTree { return writeRecord.path.contains(path); } else { // findKey can return undefined, so use !! to coerce to boolean - return !!findKey(writeRecord.children, function (childSnap: Node, childName: string) { + return !!findKey(writeRecord.children, function( + childSnap: Node, + childName: string + ) { return writeRecord.path.child(childName).contains(path); }); } @@ -438,8 +528,11 @@ export class WriteTree { * @private */ private resetTree_() { - this.visibleWrites_ = WriteTree.layerTree_(this.allWrites_, WriteTree.DefaultFilter_, - Path.Empty); + this.visibleWrites_ = WriteTree.layerTree_( + this.allWrites_, + WriteTree.DefaultFilter_, + Path.Empty + ); if (this.allWrites_.length > 0) { this.lastWriteId_ = this.allWrites_[this.allWrites_.length - 1].writeId; } else { @@ -468,7 +561,11 @@ export class WriteTree { * @return {!CompoundWrite} * @private */ - private static layerTree_(writes: WriteRecord[], filter: (w: WriteRecord) => boolean, treeRoot: Path): CompoundWrite { + private static layerTree_( + writes: WriteRecord[], + filter: (w: WriteRecord) => boolean, + treeRoot: Path + ): CompoundWrite { let compoundWrite = CompoundWrite.Empty; for (let i = 0; i < writes.length; ++i) { const write = writes[i]; @@ -484,18 +581,27 @@ export class WriteTree { compoundWrite = compoundWrite.addWrite(relativePath, write.snap); } else if (writePath.contains(treeRoot)) { relativePath = Path.relativePath(writePath, treeRoot); - compoundWrite = compoundWrite.addWrite(Path.Empty, write.snap.getChild(relativePath)); + compoundWrite = compoundWrite.addWrite( + Path.Empty, + write.snap.getChild(relativePath) + ); } else { // There is no overlap between root path and write path, ignore write } } else if (write.children) { if (treeRoot.contains(writePath)) { relativePath = Path.relativePath(treeRoot, writePath); - compoundWrite = compoundWrite.addWrites(relativePath, write.children); + compoundWrite = compoundWrite.addWrites( + relativePath, + write.children + ); } else if (writePath.contains(treeRoot)) { relativePath = Path.relativePath(writePath, treeRoot); if (relativePath.isEmpty()) { - compoundWrite = compoundWrite.addWrites(Path.Empty, write.children); + compoundWrite = compoundWrite.addWrites( + Path.Empty, + write.children + ); } else { const child = safeGet(write.children, relativePath.getFront()); if (child) { @@ -565,10 +671,17 @@ export class WriteTreeRef { * @param {boolean=} includeHiddenWrites Defaults to false, whether or not to layer on writes with visible set to false * @return {?Node} */ - calcCompleteEventCache(completeServerCache: Node | null, writeIdsToExclude?: number[], - includeHiddenWrites?: boolean): Node | null { - return this.writeTree_.calcCompleteEventCache(this.treePath_, completeServerCache, writeIdsToExclude, - includeHiddenWrites); + calcCompleteEventCache( + completeServerCache: Node | null, + writeIdsToExclude?: number[], + includeHiddenWrites?: boolean + ): Node | null { + return this.writeTree_.calcCompleteEventCache( + this.treePath_, + completeServerCache, + writeIdsToExclude, + includeHiddenWrites + ); } /** @@ -579,7 +692,10 @@ export class WriteTreeRef { * @return {!ChildrenNode} */ calcCompleteEventChildren(completeServerChildren: ChildrenNode | null): ChildrenNode { - return this.writeTree_.calcCompleteEventChildren(this.treePath_, completeServerChildren) as ChildrenNode; + return this.writeTree_.calcCompleteEventChildren( + this.treePath_, + completeServerChildren + ) as ChildrenNode; } /** @@ -601,9 +717,17 @@ export class WriteTreeRef { * @param {?Node} existingServerSnap * @return {?Node} */ - calcEventCacheAfterServerOverwrite(path: Path, existingEventSnap: Node | null, - existingServerSnap: Node | null): Node | null { - return this.writeTree_.calcEventCacheAfterServerOverwrite(this.treePath_, path, existingEventSnap, existingServerSnap); + calcEventCacheAfterServerOverwrite( + path: Path, + existingEventSnap: Node | null, + existingServerSnap: Node | null + ): Node | null { + return this.writeTree_.calcEventCacheAfterServerOverwrite( + this.treePath_, + path, + existingEventSnap, + existingServerSnap + ); } /** @@ -629,9 +753,21 @@ export class WriteTreeRef { * @param {!Index} index * @return {!Array.} */ - calcIndexedSlice(completeServerData: Node | null, startPost: NamedNode, count: number, - reverse: boolean, index: Index): NamedNode[] { - return this.writeTree_.calcIndexedSlice(this.treePath_, completeServerData, startPost, count, reverse, index); + calcIndexedSlice( + completeServerData: Node | null, + startPost: NamedNode, + count: number, + reverse: boolean, + index: Index + ): NamedNode[] { + return this.writeTree_.calcIndexedSlice( + this.treePath_, + completeServerData, + startPost, + count, + reverse, + index + ); } /** @@ -642,8 +778,15 @@ export class WriteTreeRef { * @param {!CacheNode} existingServerCache * @return {?Node} */ - calcCompleteChild(childKey: string, existingServerCache: CacheNode): Node | null { - return this.writeTree_.calcCompleteChild(this.treePath_, childKey, existingServerCache); + calcCompleteChild( + childKey: string, + existingServerCache: CacheNode + ): Node | null { + return this.writeTree_.calcCompleteChild( + this.treePath_, + childKey, + existingServerCache + ); } /** diff --git a/src/database/core/operation/AckUserWrite.ts b/src/database/core/operation/AckUserWrite.ts index 29e553fb82b..02b88466ea7 100644 --- a/src/database/core/operation/AckUserWrite.ts +++ b/src/database/core/operation/AckUserWrite.ts @@ -14,8 +14,8 @@ * limitations under the License. */ -import { assert } from "../../../utils/assert"; -import { Path } from "../util/Path"; +import { assert } from '../../../utils/assert'; +import { Path } from '../util/Path'; import { Operation, OperationSource, OperationType } from './Operation'; import { ImmutableTree } from '../util/ImmutableTree'; @@ -32,22 +32,31 @@ export class AckUserWrite implements Operation { * @param {!ImmutableTree} affectedTree A tree containing true for each affected path. Affected paths can't overlap. * @param {!boolean} revert */ - constructor(/**@inheritDoc */ public path: Path, - /**@inheritDoc */ public affectedTree: ImmutableTree, - /**@inheritDoc */ public revert: boolean) { - - } + constructor( + /**@inheritDoc */ public path: Path, + /**@inheritDoc */ public affectedTree: ImmutableTree, + /**@inheritDoc */ public revert: boolean + ) {} /** * @inheritDoc */ operationForChild(childName: string): AckUserWrite { if (!this.path.isEmpty()) { - assert(this.path.getFront() === childName, 'operationForChild called for unrelated child.'); - return new AckUserWrite(this.path.popFront(), this.affectedTree, this.revert); + assert( + this.path.getFront() === childName, + 'operationForChild called for unrelated child.' + ); + return new AckUserWrite( + this.path.popFront(), + this.affectedTree, + this.revert + ); } else if (this.affectedTree.value != null) { - assert(this.affectedTree.children.isEmpty(), - 'affectedTree should not have overlapping affected paths.'); + assert( + this.affectedTree.children.isEmpty(), + 'affectedTree should not have overlapping affected paths.' + ); // All child locations are affected as well; just return same operation. return this; } else { @@ -55,4 +64,4 @@ export class AckUserWrite implements Operation { return new AckUserWrite(Path.Empty, childTree, this.revert); } } -} \ No newline at end of file +} diff --git a/src/database/core/operation/ListenComplete.ts b/src/database/core/operation/ListenComplete.ts index 54a97dfdd44..d3acc097a5a 100644 --- a/src/database/core/operation/ListenComplete.ts +++ b/src/database/core/operation/ListenComplete.ts @@ -14,7 +14,7 @@ * limitations under the License. */ -import { Path } from "../util/Path"; +import { Path } from '../util/Path'; import { Operation, OperationSource, OperationType } from './Operation'; /** @@ -27,8 +27,7 @@ export class ListenComplete implements Operation { /** @inheritDoc */ type = OperationType.LISTEN_COMPLETE; - constructor(public source: OperationSource, public path: Path) { - } + constructor(public source: OperationSource, public path: Path) {} operationForChild(childName: string): ListenComplete { if (this.path.isEmpty()) { diff --git a/src/database/core/operation/Merge.ts b/src/database/core/operation/Merge.ts index 60aa5aac5e4..fa3f114d5d1 100644 --- a/src/database/core/operation/Merge.ts +++ b/src/database/core/operation/Merge.ts @@ -15,9 +15,9 @@ */ import { Operation, OperationSource, OperationType } from './Operation'; -import { Overwrite } from "./Overwrite"; -import { Path } from "../util/Path"; -import { assert } from "../../../utils/assert"; +import { Overwrite } from './Overwrite'; +import { Path } from '../util/Path'; +import { assert } from '../../../utils/assert'; import { ImmutableTree } from '../util/ImmutableTree'; import { Node } from '../snap/Node'; @@ -32,10 +32,11 @@ export class Merge implements Operation { /** @inheritDoc */ type = OperationType.MERGE; - constructor(/**@inheritDoc */ public source: OperationSource, - /**@inheritDoc */ public path: Path, - /**@inheritDoc */ public children: ImmutableTree) { - } + constructor( + /**@inheritDoc */ public source: OperationSource, + /**@inheritDoc */ public path: Path, + /**@inheritDoc */ public children: ImmutableTree + ) {} /** * @inheritDoc @@ -54,8 +55,10 @@ export class Merge implements Operation { return new Merge(this.source, Path.Empty, childTree); } } else { - assert(this.path.getFront() === childName, - 'Can\'t get a merge for a child not on the path of the operation'); + assert( + this.path.getFront() === childName, + "Can't get a merge for a child not on the path of the operation" + ); return new Merge(this.source, this.path.popFront(), this.children); } } @@ -64,6 +67,14 @@ export class Merge implements Operation { * @inheritDoc */ toString(): string { - return 'Operation(' + this.path + ': ' + this.source.toString() + ' merge: ' + this.children.toString() + ')'; + return ( + 'Operation(' + + this.path + + ': ' + + this.source.toString() + + ' merge: ' + + this.children.toString() + + ')' + ); } -} \ No newline at end of file +} diff --git a/src/database/core/operation/Operation.ts b/src/database/core/operation/Operation.ts index 16b55f26eee..6ceffaed92b 100644 --- a/src/database/core/operation/Operation.ts +++ b/src/database/core/operation/Operation.ts @@ -14,7 +14,7 @@ * limitations under the License. */ -import { assert } from "../../../utils/assert"; +import { assert } from '../../../utils/assert'; import { Path } from '../util/Path'; /** @@ -62,29 +62,46 @@ export interface Operation { * @constructor */ export class OperationSource { - constructor(public fromUser: boolean, - public fromServer: boolean, - public queryId: string | null, - public tagged: boolean) { + constructor( + public fromUser: boolean, + public fromServer: boolean, + public queryId: string | null, + public tagged: boolean + ) { assert(!tagged || fromServer, 'Tagged queries must be from server.'); } /** * @const * @type {!OperationSource} */ - static User = new OperationSource(/*fromUser=*/true, false, null, /*tagged=*/false); + static User = new OperationSource( + /*fromUser=*/ true, + false, + null, + /*tagged=*/ false + ); /** * @const * @type {!OperationSource} */ - static Server = new OperationSource(false, /*fromServer=*/true, null, /*tagged=*/false); + static Server = new OperationSource( + false, + /*fromServer=*/ true, + null, + /*tagged=*/ false + ); /** * @param {string} queryId * @return {!OperationSource} */ static forServerTaggedQuery = function(queryId: string): OperationSource { - return new OperationSource(false, /*fromServer=*/true, queryId, /*tagged=*/true); + return new OperationSource( + false, + /*fromServer=*/ true, + queryId, + /*tagged=*/ true + ); }; -} \ No newline at end of file +} diff --git a/src/database/core/operation/Overwrite.ts b/src/database/core/operation/Overwrite.ts index 1a37522081b..8ffd735eadf 100644 --- a/src/database/core/operation/Overwrite.ts +++ b/src/database/core/operation/Overwrite.ts @@ -15,7 +15,7 @@ */ import { Operation, OperationSource, OperationType } from './Operation'; -import { Path } from "../util/Path"; +import { Path } from '../util/Path'; import { Node } from '../snap/Node'; /** @@ -29,17 +29,21 @@ export class Overwrite implements Operation { /** @inheritDoc */ type = OperationType.OVERWRITE; - constructor(public source: OperationSource, - public path: Path, - public snap: Node) { - } + constructor( + public source: OperationSource, + public path: Path, + public snap: Node + ) {} operationForChild(childName: string): Overwrite { if (this.path.isEmpty()) { - return new Overwrite(this.source, Path.Empty, - this.snap.getImmediateChild(childName)); + return new Overwrite( + this.source, + Path.Empty, + this.snap.getImmediateChild(childName) + ); } else { return new Overwrite(this.source, this.path.popFront(), this.snap); } } -} \ No newline at end of file +} diff --git a/src/database/core/snap/ChildrenNode.ts b/src/database/core/snap/ChildrenNode.ts index 22f1ecccec6..2133906403c 100644 --- a/src/database/core/snap/ChildrenNode.ts +++ b/src/database/core/snap/ChildrenNode.ts @@ -14,20 +14,15 @@ * limitations under the License. */ -import { assert } from "../../../utils/assert"; -import { - sha1, - MAX_NAME, - MIN_NAME -} from '../util/util'; +import { assert } from '../../../utils/assert'; +import { sha1, MAX_NAME, MIN_NAME } from '../util/util'; import { SortedMap, SortedMapIterator } from '../util/SortedMap'; import { Node, NamedNode } from './Node'; +import { validatePriorityNode, priorityHashText, setMaxNode } from './snap'; import { - validatePriorityNode, - priorityHashText, - setMaxNode -} from './snap'; -import { PRIORITY_INDEX, setMaxNode as setPriorityMaxNode } from './indexes/PriorityIndex'; + PRIORITY_INDEX, + setMaxNode as setPriorityMaxNode +} from './indexes/PriorityIndex'; import { KEY_INDEX, KeyIndex } from './indexes/KeyIndex'; import { IndexMap } from './IndexMap'; import { LeafNode } from './LeafNode'; @@ -36,7 +31,11 @@ import { Index } from './indexes/Index'; import { Path } from '../util/Path'; export interface ChildrenNodeConstructor { - new(children_: SortedMap, priorityNode_: Node | null, indexMap_: IndexMap): ChildrenNode; + new ( + children_: SortedMap, + priorityNode_: Node | null, + indexMap_: IndexMap + ): ChildrenNode; EMPTY_NODE: ChildrenNode; } @@ -56,7 +55,14 @@ export class ChildrenNode implements Node { private lazyHash_: string | null = null; static get EMPTY_NODE(): ChildrenNode { - return EMPTY_NODE || (EMPTY_NODE = new ChildrenNode(new SortedMap(NAME_COMPARATOR), null, IndexMap.Default)); + return ( + EMPTY_NODE || + (EMPTY_NODE = new ChildrenNode( + new SortedMap(NAME_COMPARATOR), + null, + IndexMap.Default + )) + ); } /** @@ -66,10 +72,11 @@ export class ChildrenNode implements Node { * @param {?Node} priorityNode_ The priority of this node (as a snapshot node). * @param {!IndexMap} indexMap_ */ - constructor(private readonly children_: SortedMap, - private readonly priorityNode_: Node | null, - private indexMap_: IndexMap) { - + constructor( + private readonly children_: SortedMap, + private readonly priorityNode_: Node | null, + private indexMap_: IndexMap + ) { /** * Note: The only reason we allow null priority is for EMPTY_NODE, since we can't use * EMPTY_NODE as the priority of EMPTY_NODE. We might want to consider making EMPTY_NODE its own @@ -80,7 +87,10 @@ export class ChildrenNode implements Node { } if (this.children_.isEmpty()) { - assert(!this.priorityNode_ || this.priorityNode_.isEmpty(), 'An empty node cannot have a priority'); + assert( + !this.priorityNode_ || this.priorityNode_.isEmpty(), + 'An empty node cannot have a priority' + ); } } @@ -118,8 +128,7 @@ export class ChildrenNode implements Node { /** @inheritDoc */ getChild(path: Path): Node { const front = path.getFront(); - if (front === null) - return this; + if (front === null) return this; return this.getImmediateChild(front).getChild(path.popFront()); } @@ -139,7 +148,9 @@ export class ChildrenNode implements Node { let newChildren, newIndexMap, newPriority; if (newChildNode.isEmpty()) { newChildren = this.children_.remove(childName); - newIndexMap = this.indexMap_.removeFromIndexes(namedNode, this.children_ + newIndexMap = this.indexMap_.removeFromIndexes( + namedNode, + this.children_ ); } else { newChildren = this.children_.insert(childName, newChildNode); @@ -157,9 +168,14 @@ export class ChildrenNode implements Node { if (front === null) { return newChildNode; } else { - assert(path.getFront() !== '.priority' || path.getLength() === 1, - '.priority must be the last token in a path'); - const newImmediateChild = this.getImmediateChild(front).updateChild(path.popFront(), newChildNode); + assert( + path.getFront() !== '.priority' || path.getLength() === 1, + '.priority must be the last token in a path' + ); + const newImmediateChild = this.getImmediateChild(front).updateChild( + path.popFront(), + newChildNode + ); return this.updateImmediateChild(front, newImmediateChild); } } @@ -182,12 +198,13 @@ export class ChildrenNode implements Node { /** @inheritDoc */ val(exportFormat?: boolean): object { - if (this.isEmpty()) - return null; + if (this.isEmpty()) return null; const obj: { [k: string]: Object } = {}; - let numKeys = 0, maxKey = 0, allIntegerKeys = true; - this.forEachChild(PRIORITY_INDEX, function (key: string, childNode: Node) { + let numKeys = 0, + maxKey = 0, + allIntegerKeys = true; + this.forEachChild(PRIORITY_INDEX, function(key: string, childNode: Node) { obj[key] = childNode.val(exportFormat); numKeys++; @@ -201,8 +218,7 @@ export class ChildrenNode implements Node { if (!exportFormat && allIntegerKeys && maxKey < 2 * numKeys) { // convert to array. const array: Object[] = []; - for (let key in obj) - array[key as any as number] = obj[key]; + for (let key in obj) array[(key as any) as number] = obj[key]; return array; } else { @@ -213,32 +229,37 @@ export class ChildrenNode implements Node { } } - /** @inheritDoc */ hash(): string { if (this.lazyHash_ === null) { let toHash = ''; if (!this.getPriority().isEmpty()) - toHash += 'priority:' + priorityHashText( - (this.getPriority().val() as string | number)) + ':'; + toHash += + 'priority:' + + priorityHashText(this.getPriority().val() as string | number) + + ':'; - this.forEachChild(PRIORITY_INDEX, function (key, childNode) { + this.forEachChild(PRIORITY_INDEX, function(key, childNode) { const childHash = childNode.hash(); - if (childHash !== '') - toHash += ':' + key + ':' + childHash; + if (childHash !== '') toHash += ':' + key + ':' + childHash; }); - this.lazyHash_ = (toHash === '') ? '' : sha1(toHash); + this.lazyHash_ = toHash === '' ? '' : sha1(toHash); } return this.lazyHash_; } - /** @inheritDoc */ - getPredecessorChildName(childName: string, childNode: Node, index: Index): string { + getPredecessorChildName( + childName: string, + childNode: Node, + index: Index + ): string { const idx = this.resolveIndex_(index); if (idx) { - const predecessor = idx.getPredecessorKey(new NamedNode(childName, childNode)); + const predecessor = idx.getPredecessorKey( + new NamedNode(childName, childNode) + ); return predecessor ? predecessor.name : null; } else { return this.children_.getPredecessorKey(childName); @@ -300,14 +321,13 @@ export class ChildrenNode implements Node { } } - /** * @inheritDoc */ forEachChild(index: Index, action: (key: string, node: Node) => void): any { const idx = this.resolveIndex_(index); if (idx) { - return idx.inorderTraversal(function (wrappedNode) { + return idx.inorderTraversal(function(wrappedNode) { return action(wrappedNode.name, wrappedNode.node); }); } else { @@ -319,7 +339,9 @@ export class ChildrenNode implements Node { * @param {!Index} indexDefinition * @return {SortedMapIterator} */ - getIterator(indexDefinition: Index): SortedMapIterator { + getIterator( + indexDefinition: Index + ): SortedMapIterator { return this.getIteratorFrom(indexDefinition.minPost(), indexDefinition); } @@ -329,12 +351,18 @@ export class ChildrenNode implements Node { * @param {!Index} indexDefinition * @return {!SortedMapIterator} */ - getIteratorFrom(startPost: NamedNode, indexDefinition: Index): SortedMapIterator { + getIteratorFrom( + startPost: NamedNode, + indexDefinition: Index + ): SortedMapIterator { const idx = this.resolveIndex_(indexDefinition); if (idx) { - return idx.getIteratorFrom(startPost, (key) => key); + return idx.getIteratorFrom(startPost, key => key); } else { - const iterator = this.children_.getIteratorFrom(startPost.name, NamedNode.Wrap); + const iterator = this.children_.getIteratorFrom( + startPost.name, + NamedNode.Wrap + ); let next = iterator.peek(); while (next != null && indexDefinition.compare(next, startPost) < 0) { iterator.getNext(); @@ -348,8 +376,13 @@ export class ChildrenNode implements Node { * @param {!Index} indexDefinition * @return {!SortedMapIterator} */ - getReverseIterator(indexDefinition: Index): SortedMapIterator { - return this.getReverseIteratorFrom(indexDefinition.maxPost(), indexDefinition); + getReverseIterator( + indexDefinition: Index + ): SortedMapIterator { + return this.getReverseIteratorFrom( + indexDefinition.maxPost(), + indexDefinition + ); } /** @@ -357,13 +390,20 @@ export class ChildrenNode implements Node { * @param {!Index} indexDefinition * @return {!SortedMapIterator} */ - getReverseIteratorFrom(endPost: NamedNode, - indexDefinition: Index): SortedMapIterator { + getReverseIteratorFrom( + endPost: NamedNode, + indexDefinition: Index + ): SortedMapIterator { const idx = this.resolveIndex_(indexDefinition); if (idx) { - return idx.getReverseIteratorFrom(endPost, function (key) { return key; }); + return idx.getReverseIteratorFrom(endPost, function(key) { + return key; + }); } else { - const iterator = this.children_.getReverseIteratorFrom(endPost.name, NamedNode.Wrap); + const iterator = this.children_.getReverseIteratorFrom( + endPost.name, + NamedNode.Wrap + ); let next = iterator.peek(); while (next != null && indexDefinition.compare(next, endPost) > 0) { iterator.getNext(); @@ -397,10 +437,16 @@ export class ChildrenNode implements Node { * @inheritDoc */ withIndex(indexDefinition: Index): Node { - if (indexDefinition === KEY_INDEX || this.indexMap_.hasIndex(indexDefinition)) { + if ( + indexDefinition === KEY_INDEX || + this.indexMap_.hasIndex(indexDefinition) + ) { return this; } else { - const newIndexMap = this.indexMap_.addIndex(indexDefinition, this.children_); + const newIndexMap = this.indexMap_.addIndex( + indexDefinition, + this.children_ + ); return new ChildrenNode(this.children_, this.priorityNode_, newIndexMap); } } @@ -418,20 +464,24 @@ export class ChildrenNode implements Node { equals(other: Node): boolean { if (other === this) { return true; - } - else if (other.isLeafNode()) { + } else if (other.isLeafNode()) { return false; } else { const otherChildrenNode = other as ChildrenNode; if (!this.getPriority().equals(otherChildrenNode.getPriority())) { return false; - } else if (this.children_.count() === otherChildrenNode.children_.count()) { + } else if ( + this.children_.count() === otherChildrenNode.children_.count() + ) { const thisIter = this.getIterator(PRIORITY_INDEX); const otherIter = otherChildrenNode.getIterator(PRIORITY_INDEX); let thisCurrent = thisIter.getNext(); let otherCurrent = otherIter.getNext(); while (thisCurrent && otherCurrent) { - if (thisCurrent.name !== otherCurrent.name || !thisCurrent.node.equals(otherCurrent.node)) { + if ( + thisCurrent.name !== otherCurrent.name || + !thisCurrent.node.equals(otherCurrent.node) + ) { return false; } thisCurrent = thisIter.getNext(); @@ -444,7 +494,6 @@ export class ChildrenNode implements Node { } } - /** * Returns a SortedMap ordered by index, or null if the default (by-key) ordering can be used * instead. @@ -453,14 +502,15 @@ export class ChildrenNode implements Node { * @param {!Index} indexDefinition * @return {?SortedMap.} */ - private resolveIndex_(indexDefinition: Index): SortedMap | null { + private resolveIndex_( + indexDefinition: Index + ): SortedMap | null { if (indexDefinition === KEY_INDEX) { return null; } else { return this.indexMap_.get(indexDefinition.toString()); } } - } /** @@ -470,7 +520,11 @@ export class ChildrenNode implements Node { */ export class MaxNode extends ChildrenNode { constructor() { - super(new SortedMap(NAME_COMPARATOR), ChildrenNode.EMPTY_NODE, IndexMap.Default); + super( + new SortedMap(NAME_COMPARATOR), + ChildrenNode.EMPTY_NODE, + IndexMap.Default + ); } compareTo(other: Node): number { @@ -481,23 +535,19 @@ export class MaxNode extends ChildrenNode { } } - equals(other: Node): boolean { // Not that we every compare it, but MAX_NODE is only ever equal to itself return other === this; } - getPriority(): MaxNode { return this; } - getImmediateChild(childName: string): ChildrenNode { return ChildrenNode.EMPTY_NODE; } - isEmpty(): boolean { return false; } @@ -515,8 +565,8 @@ export const MAX_NODE = new MaxNode(); */ declare module './Node' { interface NamedNode { - MIN: NamedNode, - MAX: NamedNode + MIN: NamedNode; + MAX: NamedNode; } } @@ -535,4 +585,4 @@ Object.defineProperties(NamedNode, { KeyIndex.__EMPTY_NODE = ChildrenNode.EMPTY_NODE; LeafNode.__childrenNodeConstructor = ChildrenNode; setMaxNode(MAX_NODE); -setPriorityMaxNode(MAX_NODE); \ No newline at end of file +setPriorityMaxNode(MAX_NODE); diff --git a/src/database/core/snap/IndexMap.ts b/src/database/core/snap/IndexMap.ts index 0b8ce7bfc49..e4f60f10b5b 100644 --- a/src/database/core/snap/IndexMap.ts +++ b/src/database/core/snap/IndexMap.ts @@ -40,17 +40,25 @@ export class IndexMap { * @const */ static get Default(): IndexMap { - assert(fallbackObject && PRIORITY_INDEX, 'ChildrenNode.ts has not been loaded'); - _defaultIndexMap = _defaultIndexMap || new IndexMap( - {'.priority': fallbackObject}, - {'.priority': PRIORITY_INDEX} + assert( + fallbackObject && PRIORITY_INDEX, + 'ChildrenNode.ts has not been loaded' ); + _defaultIndexMap = + _defaultIndexMap || + new IndexMap( + { '.priority': fallbackObject }, + { '.priority': PRIORITY_INDEX } + ); return _defaultIndexMap; } - constructor(private indexes_: { [k: string]: SortedMap | /*FallbackType*/object }, - private indexSet_: { [k: string]: Index }) { - } + constructor( + private indexes_: { + [k: string]: SortedMap | /*FallbackType*/ object; + }, + private indexSet_: { [k: string]: Index } + ) {} /** * @@ -83,15 +91,21 @@ export class IndexMap { * @param {!SortedMap.} existingChildren * @return {!IndexMap} */ - addIndex(indexDefinition: Index, existingChildren: SortedMap): IndexMap { - assert(indexDefinition !== KEY_INDEX, - 'KeyIndex always exists and isn\'t meant to be added to the IndexMap.'); + addIndex( + indexDefinition: Index, + existingChildren: SortedMap + ): IndexMap { + assert( + indexDefinition !== KEY_INDEX, + "KeyIndex always exists and isn't meant to be added to the IndexMap." + ); const childList = []; let sawIndexedValue = false; const iter = existingChildren.getIterator(NamedNode.Wrap); let next = iter.getNext(); while (next) { - sawIndexedValue = sawIndexedValue || indexDefinition.isDefinedOn(next.node); + sawIndexedValue = + sawIndexedValue || indexDefinition.isDefinedOn(next.node); childList.push(next); next = iter.getNext(); } @@ -109,45 +123,52 @@ export class IndexMap { return new IndexMap(newIndexes, newIndexSet); } - /** * Ensure that this node is properly tracked in any indexes that we're maintaining * @param {!NamedNode} namedNode * @param {!SortedMap.} existingChildren * @return {!IndexMap} */ - addToIndexes(namedNode: NamedNode, existingChildren: SortedMap): IndexMap { - const newIndexes = map(this.indexes_, (indexedChildren: SortedMap, indexName: string) => { - const index = safeGet(this.indexSet_, indexName); - assert(index, 'Missing index implementation for ' + indexName); - if (indexedChildren === fallbackObject) { - // Check to see if we need to index everything - if (index.isDefinedOn(namedNode.node)) { - // We need to build this index - const childList = []; - const iter = existingChildren.getIterator(NamedNode.Wrap); - let next = iter.getNext(); - while (next) { - if (next.name != namedNode.name) { - childList.push(next); + addToIndexes( + namedNode: NamedNode, + existingChildren: SortedMap + ): IndexMap { + const newIndexes = map( + this.indexes_, + (indexedChildren: SortedMap, indexName: string) => { + const index = safeGet(this.indexSet_, indexName); + assert(index, 'Missing index implementation for ' + indexName); + if (indexedChildren === fallbackObject) { + // Check to see if we need to index everything + if (index.isDefinedOn(namedNode.node)) { + // We need to build this index + const childList = []; + const iter = existingChildren.getIterator(NamedNode.Wrap); + let next = iter.getNext(); + while (next) { + if (next.name != namedNode.name) { + childList.push(next); + } + next = iter.getNext(); } - next = iter.getNext(); + childList.push(namedNode); + return buildChildSet(childList, index.getCompare()); + } else { + // No change, this remains a fallback + return fallbackObject; } - childList.push(namedNode); - return buildChildSet(childList, index.getCompare()); } else { - // No change, this remains a fallback - return fallbackObject; - } - } else { - const existingSnap = existingChildren.get(namedNode.name); - let newChildren = indexedChildren; - if (existingSnap) { - newChildren = newChildren.remove(new NamedNode(namedNode.name, existingSnap)); + const existingSnap = existingChildren.get(namedNode.name); + let newChildren = indexedChildren; + if (existingSnap) { + newChildren = newChildren.remove( + new NamedNode(namedNode.name, existingSnap) + ); + } + return newChildren.insert(namedNode, namedNode.node); } - return newChildren.insert(namedNode, namedNode.node); } - }); + ); return new IndexMap(newIndexes, this.indexSet_); } @@ -157,15 +178,22 @@ export class IndexMap { * @param {!SortedMap.} existingChildren * @return {!IndexMap} */ - removeFromIndexes(namedNode: NamedNode, existingChildren: SortedMap): IndexMap { - const newIndexes = map(this.indexes_, function (indexedChildren: SortedMap) { + removeFromIndexes( + namedNode: NamedNode, + existingChildren: SortedMap + ): IndexMap { + const newIndexes = map(this.indexes_, function( + indexedChildren: SortedMap + ) { if (indexedChildren === fallbackObject) { // This is the fallback. Just return it, nothing to do in this case return indexedChildren; } else { const existingSnap = existingChildren.get(namedNode.name); if (existingSnap) { - return indexedChildren.remove(new NamedNode(namedNode.name, existingSnap)); + return indexedChildren.remove( + new NamedNode(namedNode.name, existingSnap) + ); } else { // No record of this child return indexedChildren; diff --git a/src/database/core/snap/LeafNode.ts b/src/database/core/snap/LeafNode.ts index 0f8e6aa9c0c..bd4caae1d51 100644 --- a/src/database/core/snap/LeafNode.ts +++ b/src/database/core/snap/LeafNode.ts @@ -14,15 +14,9 @@ * limitations under the License. */ -import { assert } from '../../../utils/assert' -import { - doubleToIEEE754String, - sha1 -} from '../util/util'; -import { - priorityHashText, - validatePriorityNode -} from './snap'; +import { assert } from '../../../utils/assert'; +import { doubleToIEEE754String, sha1 } from '../util/util'; +import { priorityHashText, validatePriorityNode } from './snap'; import { Node } from './Node'; import { Path } from '../util/Path'; import { Index } from './indexes/Index'; @@ -60,10 +54,14 @@ export class LeafNode implements Node { * The object type is possible in the event of a deferred value * @param {!Node=} priorityNode_ The priority of this node. */ - constructor(private readonly value_: string | number | boolean | object, - private priorityNode_: Node = LeafNode.__childrenNodeConstructor.EMPTY_NODE) { - assert(this.value_ !== undefined && this.value_ !== null, - 'LeafNode shouldn\'t be created with null/undefined value.'); + constructor( + private readonly value_: string | number | boolean | object, + private priorityNode_: Node = LeafNode.__childrenNodeConstructor.EMPTY_NODE + ) { + assert( + this.value_ !== undefined && this.value_ !== null, + "LeafNode shouldn't be created with null/undefined value." + ); validatePriorityNode(this.priorityNode_); } @@ -137,10 +135,18 @@ export class LeafNode implements Node { } else if (newChildNode.isEmpty() && front !== '.priority') { return this; } else { - assert(front !== '.priority' || path.getLength() === 1, - '.priority must be the last token in a path'); + assert( + front !== '.priority' || path.getLength() === 1, + '.priority must be the last token in a path' + ); - return this.updateImmediateChild(front, LeafNode.__childrenNodeConstructor.EMPTY_NODE.updateChild(path.popFront(), newChildNode)); + return this.updateImmediateChild( + front, + LeafNode.__childrenNodeConstructor.EMPTY_NODE.updateChild( + path.popFront(), + newChildNode + ) + ); } } @@ -164,9 +170,11 @@ export class LeafNode implements Node { */ val(exportFormat?: boolean): Object { if (exportFormat && !this.getPriority().isEmpty()) - return {'.value': this.getValue(), '.priority': this.getPriority().val()}; - else - return this.getValue(); + return { + '.value': this.getValue(), + '.priority': this.getPriority().val() + }; + else return this.getValue(); } /** @inheritDoc */ @@ -174,8 +182,10 @@ export class LeafNode implements Node { if (this.lazyHash_ === null) { let toHash = ''; if (!this.priorityNode_.isEmpty()) - toHash += 'priority:' + priorityHashText( - (this.priorityNode_.val() as number|string)) + ':'; + toHash += + 'priority:' + + priorityHashText(this.priorityNode_.val() as number | string) + + ':'; const type = typeof this.value_; toHash += type + ':'; @@ -267,12 +277,14 @@ export class LeafNode implements Node { */ if (other === this) { return true; - } - else if (other.isLeafNode()) { + } else if (other.isLeafNode()) { const otherLeaf = other as LeafNode; - return this.value_ === otherLeaf.value_ && this.priorityNode_.equals(otherLeaf.priorityNode_); + return ( + this.value_ === otherLeaf.value_ && + this.priorityNode_.equals(otherLeaf.priorityNode_) + ); } else { return false; } } -} \ No newline at end of file +} diff --git a/src/database/core/snap/Node.ts b/src/database/core/snap/Node.ts index c8483b63278..48c53ee1deb 100644 --- a/src/database/core/snap/Node.ts +++ b/src/database/core/snap/Node.ts @@ -14,8 +14,8 @@ * limitations under the License. */ -import { Path } from "../util/Path"; -import { Index } from "./indexes/Index"; +import { Path } from '../util/Path'; +import { Index } from './indexes/Index'; /** * Node is an interface defining the common functionality for nodes in @@ -30,14 +30,12 @@ export interface Node { */ isLeafNode(): boolean; - /** * Gets the priority of the node. * @return {!Node} The priority of the node. */ getPriority(): Node; - /** * Returns a duplicate node with the new priority. * @param {!Node} newPriorityNode New priority to set for the node. @@ -45,7 +43,6 @@ export interface Node { */ updatePriority(newPriorityNode: Node): Node; - /** * Returns the specified immediate child, or null if it doesn't exist. * @param {string} childName The name of the child to retrieve. @@ -53,7 +50,6 @@ export interface Node { */ getImmediateChild(childName: string): Node; - /** * Returns a child by path, or null if it doesn't exist. * @param {!Path} path The path of the child to retrieve. @@ -61,7 +57,6 @@ export interface Node { */ getChild(path: Path): Node; - /** * Returns the name of the child immediately prior to the specified childNode, or null. * @param {!string} childName The name of the child to find the predecessor of. @@ -69,7 +64,11 @@ export interface Node { * @param {!Index} index The index to use to determine the predecessor * @return {?string} The name of the predecessor child, or null if childNode is the first child. */ - getPredecessorChildName(childName: String, childNode: Node, index: Index): string | null; + getPredecessorChildName( + childName: String, + childNode: Node, + index: Index + ): string | null; /** * Returns a duplicate node, with the specified immediate child updated. @@ -80,7 +79,6 @@ export interface Node { */ updateImmediateChild(childName: string, newChildNode: Node): Node; - /** * Returns a duplicate node, with the specified child updated. Any value will * be removed. @@ -102,13 +100,11 @@ export interface Node { */ isEmpty(): boolean; - /** * @return {number} The number of children of this node. */ numChildren(): number; - /** * Calls action for each child. * @param {!Index} index @@ -174,4 +170,3 @@ export class NamedNode { return new NamedNode(name, node); } } - diff --git a/src/database/core/snap/childSet.ts b/src/database/core/snap/childSet.ts index 5d6e3f61947..3cb25725365 100644 --- a/src/database/core/snap/childSet.ts +++ b/src/database/core/snap/childSet.ts @@ -32,7 +32,8 @@ class Base12Num { * @param {number} length */ constructor(length: number) { - const logBase2 = (num: number) => parseInt((Math.log(num) / LOG_2 as any), 10); + const logBase2 = (num: number) => + parseInt((Math.log(num) / LOG_2) as any, 10); const bitMask = (bits: number) => parseInt(Array(bits + 1).join('1'), 2); this.count = logBase2(length + 1); this.current_ = this.count - 1; @@ -66,13 +67,18 @@ class Base12Num { * @param {(function(K, K):number)=} mapSortFn An optional override for comparator used by the generated sorted map * @return {SortedMap.} */ -export const buildChildSet = function(childList: NamedNode[], - cmp: (a: NamedNode, b: NamedNode) => number, - keyFn?: (a: NamedNode) => K, - mapSortFn?: (a: K, b: K) => number): SortedMap { +export const buildChildSet = function( + childList: NamedNode[], + cmp: (a: NamedNode, b: NamedNode) => number, + keyFn?: (a: NamedNode) => K, + mapSortFn?: (a: K, b: K) => number +): SortedMap { childList.sort(cmp); - const buildBalancedTree = function(low: number, high: number): LLRBNode | null { + const buildBalancedTree = function( + low: number, + high: number + ): LLRBNode | null { const length = high - low; let namedNode: NamedNode; let key: K; @@ -80,34 +86,48 @@ export const buildChildSet = function(childList: NamedNode[], return null; } else if (length == 1) { namedNode = childList[low]; - key = keyFn ? keyFn(namedNode) : namedNode as any as K; - return new LLRBNode(key, namedNode.node as any as V, LLRBNode.BLACK, null, null); + key = keyFn ? keyFn(namedNode) : (namedNode as any) as K; + return new LLRBNode( + key, + (namedNode.node as any) as V, + LLRBNode.BLACK, + null, + null + ); } else { - const middle = parseInt((length / 2 as any), 10) + low; + const middle = parseInt((length / 2) as any, 10) + low; const left = buildBalancedTree(low, middle); const right = buildBalancedTree(middle + 1, high); namedNode = childList[middle]; - key = keyFn ? keyFn(namedNode) : namedNode as any as K; - return new LLRBNode(key, namedNode.node as any as V, LLRBNode.BLACK, left, right); + key = keyFn ? keyFn(namedNode) : (namedNode as any) as K; + return new LLRBNode( + key, + (namedNode.node as any) as V, + LLRBNode.BLACK, + left, + right + ); } }; - const buildFrom12Array = function (base12: Base12Num): LLRBNode { + const buildFrom12Array = function(base12: Base12Num): LLRBNode { let node: LLRBNode = null; let root = null; let index = childList.length; - const buildPennant = function (chunkSize: number, color: boolean) { + const buildPennant = function(chunkSize: number, color: boolean) { const low = index - chunkSize; const high = index; index -= chunkSize; const childTree = buildBalancedTree(low + 1, high); const namedNode = childList[low]; - const key: K = keyFn ? keyFn(namedNode) : namedNode as any as K; - attachPennant(new LLRBNode(key, namedNode.node as any as V, color, null, childTree)); + const key: K = keyFn ? keyFn(namedNode) : (namedNode as any) as K; + attachPennant( + new LLRBNode(key, (namedNode.node as any) as V, color, null, childTree) + ); }; - const attachPennant = function (pennant: LLRBNode) { + const attachPennant = function(pennant: LLRBNode) { if (node) { node.left = pennant; node = pennant; @@ -136,4 +156,4 @@ export const buildChildSet = function(childList: NamedNode[], const root = buildFrom12Array(base12); return new SortedMap(mapSortFn || (cmp as any), root); -}; \ No newline at end of file +}; diff --git a/src/database/core/snap/comparators.ts b/src/database/core/snap/comparators.ts index d1a4914c0c2..a3d5c7f72ad 100644 --- a/src/database/core/snap/comparators.ts +++ b/src/database/core/snap/comparators.ts @@ -14,7 +14,7 @@ * limitations under the License. */ -import { nameCompare } from "../util/util"; +import { nameCompare } from '../util/util'; import { NamedNode } from './Node'; export function NAME_ONLY_COMPARATOR(left: NamedNode, right: NamedNode) { diff --git a/src/database/core/snap/indexes/Index.ts b/src/database/core/snap/indexes/Index.ts index e962c181a0b..60a375c47da 100644 --- a/src/database/core/snap/indexes/Index.ts +++ b/src/database/core/snap/indexes/Index.ts @@ -14,8 +14,8 @@ * limitations under the License. */ -import { Node, NamedNode } from "../Node"; -import { MIN_NAME, MAX_NAME } from "../../util/util"; +import { Node, NamedNode } from '../Node'; +import { MIN_NAME, MAX_NAME } from '../../util/util'; import { Comparator } from '../../util/SortedMap'; /** @@ -36,7 +36,6 @@ export abstract class Index { */ abstract isDefinedOn(node: Node): boolean; - /** * @return {function(!NamedNode, !NamedNode):number} A standalone comparison function for * this index @@ -45,7 +44,6 @@ export abstract class Index { return this.compare.bind(this); } - /** * Given a before and after value for a node, determine if the indexed value has changed. Even if they are different, * it's possible that the changes are isolated to parts of the snapshot that are not indexed. @@ -60,7 +58,6 @@ export abstract class Index { return this.compare(oldWrapped, newWrapped) !== 0; } - /** * @return {!NamedNode} a node wrapper that will sort equal to or less than * any other node wrapper, using this index @@ -69,14 +66,12 @@ export abstract class Index { return (NamedNode as any).MIN; } - /** * @return {!NamedNode} a node wrapper that will sort greater than or equal to * any other node wrapper, using this index */ abstract maxPost(): NamedNode; - /** * @param {*} indexValue * @param {string} name @@ -84,7 +79,6 @@ export abstract class Index { */ abstract makePost(indexValue: any, name: string): NamedNode; - /** * @return {!string} String representation for inclusion in a query spec */ diff --git a/src/database/core/snap/indexes/KeyIndex.ts b/src/database/core/snap/indexes/KeyIndex.ts index adaa82cdbf5..40a71cd1e0e 100644 --- a/src/database/core/snap/indexes/KeyIndex.ts +++ b/src/database/core/snap/indexes/KeyIndex.ts @@ -14,11 +14,11 @@ * limitations under the License. */ -import { Index } from "./Index"; -import { Node, NamedNode } from "../Node"; -import { nameCompare, MAX_NAME } from "../../util/util"; -import { assert, assertionError } from "../../../../utils/assert"; -import { ChildrenNode } from "../ChildrenNode"; +import { Index } from './Index'; +import { Node, NamedNode } from '../Node'; +import { nameCompare, MAX_NAME } from '../../util/util'; +import { assert, assertionError } from '../../../../utils/assert'; +import { ChildrenNode } from '../ChildrenNode'; let __EMPTY_NODE: ChildrenNode; @@ -47,7 +47,6 @@ export class KeyIndex extends Index { throw assertionError('KeyIndex.isDefinedOn not expected to be called.'); } - /** * @inheritDoc */ @@ -55,7 +54,6 @@ export class KeyIndex extends Index { return false; // The key for a node never changes. } - /** * @inheritDoc */ @@ -63,7 +61,6 @@ export class KeyIndex extends Index { return (NamedNode as any).MIN; } - /** * @inheritDoc */ @@ -73,19 +70,20 @@ export class KeyIndex extends Index { return new NamedNode(MAX_NAME, __EMPTY_NODE); } - /** * @param {*} indexValue * @param {string} name * @return {!NamedNode} */ makePost(indexValue: string, name: string): NamedNode { - assert(typeof indexValue === 'string', 'KeyIndex indexValue must always be a string.'); + assert( + typeof indexValue === 'string', + 'KeyIndex indexValue must always be a string.' + ); // We just use empty node, but it'll never be compared, since our comparator only looks at name. return new NamedNode(indexValue, __EMPTY_NODE); } - /** * @return {!string} String representation for inclusion in a query spec */ @@ -94,4 +92,4 @@ export class KeyIndex extends Index { } } -export const KEY_INDEX = new KeyIndex(); \ No newline at end of file +export const KEY_INDEX = new KeyIndex(); diff --git a/src/database/core/snap/indexes/PathIndex.ts b/src/database/core/snap/indexes/PathIndex.ts index a2081f74881..515a9a2ef4f 100644 --- a/src/database/core/snap/indexes/PathIndex.ts +++ b/src/database/core/snap/indexes/PathIndex.ts @@ -14,12 +14,12 @@ * limitations under the License. */ -import { assert } from "../../../../utils/assert"; -import { nameCompare, MAX_NAME } from "../../util/util"; -import { Index } from "./Index"; -import { ChildrenNode, MAX_NODE } from "../ChildrenNode"; +import { assert } from '../../../../utils/assert'; +import { nameCompare, MAX_NAME } from '../../util/util'; +import { Index } from './Index'; +import { ChildrenNode, MAX_NODE } from '../ChildrenNode'; import { NamedNode, Node } from '../Node'; -import { nodeFromJSON } from "../nodeFromJSON"; +import { nodeFromJSON } from '../nodeFromJSON'; import { Path } from '../../util/Path'; /** @@ -31,8 +31,10 @@ export class PathIndex extends Index { constructor(private indexPath_: Path) { super(); - assert(!indexPath_.isEmpty() && indexPath_.getFront() !== '.priority', - 'Can\'t create PathIndex with empty path or .priority key'); + assert( + !indexPath_.isEmpty() && indexPath_.getFront() !== '.priority', + "Can't create PathIndex with empty path or .priority key" + ); } /** @@ -44,7 +46,6 @@ export class PathIndex extends Index { return snap.getChild(this.indexPath_); } - /** * @inheritDoc */ @@ -52,7 +53,6 @@ export class PathIndex extends Index { return !node.getChild(this.indexPath_).isEmpty(); } - /** * @inheritDoc */ @@ -67,17 +67,18 @@ export class PathIndex extends Index { } } - /** * @inheritDoc */ makePost(indexValue: object, name: string): NamedNode { const valueNode = nodeFromJSON(indexValue); - const node = ChildrenNode.EMPTY_NODE.updateChild(this.indexPath_, valueNode); + const node = ChildrenNode.EMPTY_NODE.updateChild( + this.indexPath_, + valueNode + ); return new NamedNode(name, node); } - /** * @inheritDoc */ @@ -86,11 +87,10 @@ export class PathIndex extends Index { return new NamedNode(MAX_NAME, node); } - /** * @inheritDoc */ toString(): string { return this.indexPath_.slice().join('/'); } -} \ No newline at end of file +} diff --git a/src/database/core/snap/indexes/PriorityIndex.ts b/src/database/core/snap/indexes/PriorityIndex.ts index f54e923e7e8..a9ca95a084a 100644 --- a/src/database/core/snap/indexes/PriorityIndex.ts +++ b/src/database/core/snap/indexes/PriorityIndex.ts @@ -15,9 +15,9 @@ */ import { Index } from './Index'; -import { nameCompare, MAX_NAME } from "../../util/util"; +import { nameCompare, MAX_NAME } from '../../util/util'; import { NamedNode, Node } from '../Node'; -import { LeafNode } from "../LeafNode"; +import { LeafNode } from '../LeafNode'; let nodeFromJSON: (a: any) => Node; let MAX_NODE: Node; @@ -30,7 +30,6 @@ export function setMaxNode(val: Node) { MAX_NODE = val; } - /** * @constructor * @extends {Index} @@ -51,7 +50,6 @@ export class PriorityIndex extends Index { } } - /** * @inheritDoc */ @@ -59,7 +57,6 @@ export class PriorityIndex extends Index { return !node.getPriority().isEmpty(); } - /** * @inheritDoc */ @@ -67,7 +64,6 @@ export class PriorityIndex extends Index { return !oldNode.getPriority().equals(newNode.getPriority()); } - /** * @inheritDoc */ @@ -75,7 +71,6 @@ export class PriorityIndex extends Index { return (NamedNode as any).MIN; } - /** * @inheritDoc */ @@ -83,7 +78,6 @@ export class PriorityIndex extends Index { return new NamedNode(MAX_NAME, new LeafNode('[PRIORITY-POST]', MAX_NODE)); } - /** * @param {*} indexValue * @param {string} name @@ -94,7 +88,6 @@ export class PriorityIndex extends Index { return new NamedNode(name, new LeafNode('[PRIORITY-POST]', priorityNode)); } - /** * @return {!string} String representation for inclusion in a query spec */ diff --git a/src/database/core/snap/indexes/ValueIndex.ts b/src/database/core/snap/indexes/ValueIndex.ts index dbd705a1e41..707c381e638 100644 --- a/src/database/core/snap/indexes/ValueIndex.ts +++ b/src/database/core/snap/indexes/ValueIndex.ts @@ -14,10 +14,10 @@ * limitations under the License. */ -import { Index } from "./Index"; +import { Index } from './Index'; import { NamedNode, Node } from '../Node'; -import { nameCompare } from "../../util/util"; -import { nodeFromJSON } from "../nodeFromJSON"; +import { nameCompare } from '../../util/util'; +import { nodeFromJSON } from '../nodeFromJSON'; /** * @constructor @@ -80,7 +80,7 @@ export class ValueIndex extends Index { */ toString(): string { return '.value'; - }; + } } -export const VALUE_INDEX = new ValueIndex(); \ No newline at end of file +export const VALUE_INDEX = new ValueIndex(); diff --git a/src/database/core/snap/nodeFromJSON.ts b/src/database/core/snap/nodeFromJSON.ts index eb09e0b3d55..0d6a47001fa 100644 --- a/src/database/core/snap/nodeFromJSON.ts +++ b/src/database/core/snap/nodeFromJSON.ts @@ -34,8 +34,10 @@ const USE_HINZE = true; * passed JSON contains a .priority property. * @return {!Node} */ -export function nodeFromJSON(json: any | null, - priority: string | number | null = null): Node { +export function nodeFromJSON( + json: any | null, + priority: string | number | null = null +): Node { if (json === null) { return ChildrenNode.EMPTY_NODE; } @@ -46,10 +48,10 @@ export function nodeFromJSON(json: any | null, assert( priority === null || - typeof priority === 'string' || - typeof priority === 'number' || - (typeof priority === 'object' && '.sv' in (priority as object)), - 'Invalid priority type found: ' + (typeof priority) + typeof priority === 'string' || + typeof priority === 'number' || + (typeof priority === 'object' && '.sv' in (priority as object)), + 'Invalid priority type found: ' + typeof priority ); if (typeof json === 'object' && '.value' in json && json['.value'] !== null) { @@ -67,10 +69,12 @@ export function nodeFromJSON(json: any | null, let childrenHavePriority = false; const hinzeJsonObj: { [k: string]: any } = json as object; forEach(hinzeJsonObj, (key: string, child: any) => { - if (typeof key !== 'string' || key.substring(0, 1) !== '.') { // Ignore metadata nodes + if (typeof key !== 'string' || key.substring(0, 1) !== '.') { + // Ignore metadata nodes const childNode = nodeFromJSON(hinzeJsonObj[key]); if (!childNode.isEmpty()) { - childrenHavePriority = childrenHavePriority || !childNode.getPriority().isEmpty(); + childrenHavePriority = + childrenHavePriority || !childNode.getPriority().isEmpty(); children.push(new NamedNode(key, childNode)); } } @@ -80,22 +84,39 @@ export function nodeFromJSON(json: any | null, return ChildrenNode.EMPTY_NODE; } - const childSet = buildChildSet(children, NAME_ONLY_COMPARATOR, - (namedNode) => namedNode.name, NAME_COMPARATOR) as SortedMap; + const childSet = buildChildSet( + children, + NAME_ONLY_COMPARATOR, + namedNode => namedNode.name, + NAME_COMPARATOR + ) as SortedMap; if (childrenHavePriority) { - const sortedChildSet = buildChildSet(children, PRIORITY_INDEX.getCompare()); - return new ChildrenNode(childSet, nodeFromJSON(priority), - new IndexMap({'.priority': sortedChildSet}, {'.priority': PRIORITY_INDEX})); + const sortedChildSet = buildChildSet( + children, + PRIORITY_INDEX.getCompare() + ); + return new ChildrenNode( + childSet, + nodeFromJSON(priority), + new IndexMap( + { '.priority': sortedChildSet }, + { '.priority': PRIORITY_INDEX } + ) + ); } else { - return new ChildrenNode(childSet, nodeFromJSON(priority), - IndexMap.Default); + return new ChildrenNode( + childSet, + nodeFromJSON(priority), + IndexMap.Default + ); } } else { let node: Node = ChildrenNode.EMPTY_NODE; const jsonObj = json as object; forEach(jsonObj, (key: string, childData: any) => { if (contains(jsonObj, key)) { - if (key.substring(0, 1) !== '.') { // ignore metadata nodes. + if (key.substring(0, 1) !== '.') { + // ignore metadata nodes. const childNode = nodeFromJSON(childData); if (childNode.isLeafNode() || !childNode.isEmpty()) node = node.updateImmediateChild(key, childNode); @@ -107,4 +128,4 @@ export function nodeFromJSON(json: any | null, } } -setNodeFromJSON(nodeFromJSON); \ No newline at end of file +setNodeFromJSON(nodeFromJSON); diff --git a/src/database/core/snap/snap.ts b/src/database/core/snap/snap.ts index db4a1cefe2d..6e9a56e3f31 100644 --- a/src/database/core/snap/snap.ts +++ b/src/database/core/snap/snap.ts @@ -15,10 +15,8 @@ */ import { assert } from '../../../utils/assert'; -import { - doubleToIEEE754String, -} from "../util/util"; -import { contains } from "../../../utils/obj"; +import { doubleToIEEE754String } from '../util/util'; +import { contains } from '../../../utils/obj'; import { Node } from './Node'; let MAX_NODE: Node; @@ -34,8 +32,7 @@ export function setMaxNode(val: Node) { export const priorityHashText = function(priority: string | number): string { if (typeof priority === 'number') return 'number:' + doubleToIEEE754String(priority); - else - return 'string:' + priority; + else return 'string:' + priority; }; /** @@ -46,14 +43,21 @@ export const priorityHashText = function(priority: string | number): string { export const validatePriorityNode = function(priorityNode: Node) { if (priorityNode.isLeafNode()) { const val = priorityNode.val(); - assert(typeof val === 'string' || typeof val === 'number' || - (typeof val === 'object' && contains(val, '.sv')), - 'Priority must be a string or number.'); + assert( + typeof val === 'string' || + typeof val === 'number' || + (typeof val === 'object' && contains(val, '.sv')), + 'Priority must be a string or number.' + ); } else { - assert(priorityNode === MAX_NODE || priorityNode.isEmpty(), - 'priority of unexpected type.'); + assert( + priorityNode === MAX_NODE || priorityNode.isEmpty(), + 'priority of unexpected type.' + ); } // Don't call getPriority() on MAX_NODE to avoid hitting assertion. - assert(priorityNode === MAX_NODE || priorityNode.getPriority().isEmpty(), - "Priority nodes can't have a priority of their own."); + assert( + priorityNode === MAX_NODE || priorityNode.getPriority().isEmpty(), + "Priority nodes can't have a priority of their own." + ); }; diff --git a/src/database/core/stats/StatsCollection.ts b/src/database/core/stats/StatsCollection.ts index 7372a8ce850..097105ae633 100644 --- a/src/database/core/stats/StatsCollection.ts +++ b/src/database/core/stats/StatsCollection.ts @@ -26,8 +26,7 @@ export class StatsCollection { private counters_: { [k: string]: number } = {}; incrementCounter(name: string, amount: number = 1) { - if (!contains(this.counters_, name)) - this.counters_[name] = 0; + if (!contains(this.counters_, name)) this.counters_[name] = 0; this.counters_[name] += amount; } @@ -36,4 +35,3 @@ export class StatsCollection { return deepCopy(this.counters_); } } - diff --git a/src/database/core/stats/StatsListener.ts b/src/database/core/stats/StatsListener.ts index 9513112b1c8..a52e372891f 100644 --- a/src/database/core/stats/StatsListener.ts +++ b/src/database/core/stats/StatsListener.ts @@ -24,12 +24,11 @@ import { StatsCollection } from './StatsCollection'; * @constructor */ export class StatsListener { - private last_: {[k: string]: number} | null = null; + private last_: { [k: string]: number } | null = null; - constructor(private collection_: StatsCollection) { - } + constructor(private collection_: StatsCollection) {} - get(): {[k: string]: number} { + get(): { [k: string]: number } { const newStats = this.collection_.get(); const delta: typeof newStats = clone(newStats); @@ -43,4 +42,3 @@ export class StatsListener { return delta; } } - diff --git a/src/database/core/stats/StatsManager.ts b/src/database/core/stats/StatsManager.ts index 23129e7224d..791c7688d23 100644 --- a/src/database/core/stats/StatsManager.ts +++ b/src/database/core/stats/StatsManager.ts @@ -31,7 +31,10 @@ export class StatsManager { return this.collections_[hashString]; } - static getOrCreateReporter(repoInfo: RepoInfo, creatorFunction: () => T): T { + static getOrCreateReporter( + repoInfo: RepoInfo, + creatorFunction: () => T + ): T { const hashString = repoInfo.toString(); if (!this.reporters_[hashString]) { diff --git a/src/database/core/stats/StatsReporter.ts b/src/database/core/stats/StatsReporter.ts index 7b2ec7dce53..ad137a1dde4 100644 --- a/src/database/core/stats/StatsReporter.ts +++ b/src/database/core/stats/StatsReporter.ts @@ -43,7 +43,9 @@ export class StatsReporter { constructor(collection: StatsCollection, private server_: ServerActions) { this.statsListener_ = new StatsListener(collection); - const timeout = FIRST_STATS_MIN_TIME + (FIRST_STATS_MAX_TIME - FIRST_STATS_MIN_TIME) * Math.random(); + const timeout = + FIRST_STATS_MIN_TIME + + (FIRST_STATS_MAX_TIME - FIRST_STATS_MIN_TIME) * Math.random(); setTimeoutNonBlocking(this.reportStats_.bind(this), Math.floor(timeout)); } @@ -68,6 +70,9 @@ export class StatsReporter { } // queue our next run. - setTimeoutNonBlocking(this.reportStats_.bind(this), Math.floor(Math.random() * 2 * REPORT_STATS_INTERVAL)); + setTimeoutNonBlocking( + this.reportStats_.bind(this), + Math.floor(Math.random() * 2 * REPORT_STATS_INTERVAL) + ); } } diff --git a/src/database/core/storage/DOMStorageWrapper.ts b/src/database/core/storage/DOMStorageWrapper.ts index 567a279dc18..c12042239df 100644 --- a/src/database/core/storage/DOMStorageWrapper.ts +++ b/src/database/core/storage/DOMStorageWrapper.ts @@ -33,8 +33,7 @@ export class DOMStorageWrapper { /** * @param {Storage} domStorage_ The underlying storage object (e.g. localStorage or sessionStorage) */ - constructor(private domStorage_: Storage) { - } + constructor(private domStorage_: Storage) {} /** * @param {string} key The key to save the value under diff --git a/src/database/core/storage/storage.ts b/src/database/core/storage/storage.ts index 600c7dc4461..aeb764ea9ac 100644 --- a/src/database/core/storage/storage.ts +++ b/src/database/core/storage/storage.ts @@ -28,29 +28,31 @@ declare const window: any; * (e.g. 'localStorage' or 'sessionStorage'). * @return {?} Turning off type information until a common interface is defined. */ -const createStoragefor = function(domStorageName: string): DOMStorageWrapper | MemoryStorage { +const createStoragefor = function( + domStorageName: string +): DOMStorageWrapper | MemoryStorage { try { // NOTE: just accessing "localStorage" or "window['localStorage']" may throw a security exception, // so it must be inside the try/catch. - if (typeof window !== 'undefined' && typeof window[domStorageName] !== 'undefined') { + if ( + typeof window !== 'undefined' && + typeof window[domStorageName] !== 'undefined' + ) { // Need to test cache. Just because it's here doesn't mean it works const domStorage = window[domStorageName]; domStorage.setItem('firebase:sentinel', 'cache'); domStorage.removeItem('firebase:sentinel'); return new DOMStorageWrapper(domStorage); } - } catch (e) { - } - + } catch (e) {} + // Failed to create wrapper. Just return in-memory storage. // TODO: log? return new MemoryStorage(); }; - /** A storage object that lasts across sessions */ export const PersistentStorage = createStoragefor('localStorage'); - /** A storage object that only lasts one session */ export const SessionStorage = createStoragefor('sessionStorage'); diff --git a/src/database/core/util/CountedSet.ts b/src/database/core/util/CountedSet.ts index a9351d7d75e..04f3709913b 100644 --- a/src/database/core/util/CountedSet.ts +++ b/src/database/core/util/CountedSet.ts @@ -29,7 +29,7 @@ export class CountedSet { * @param {V} val */ add(item: K, val: V) { - this.set[item as any] = val !== null ? val : (true as any); + this.set[item as any] = val !== null ? val : true as any; } /** diff --git a/src/database/core/util/EventEmitter.ts b/src/database/core/util/EventEmitter.ts index aa998355e9d..1e6a8d6e526 100644 --- a/src/database/core/util/EventEmitter.ts +++ b/src/database/core/util/EventEmitter.ts @@ -21,14 +21,21 @@ import { assert } from '../../../utils/assert'; * the set of allowed event names. */ export abstract class EventEmitter { - private listeners_: { [eventType: string]: Array<{ callback(...args: any[]): void, context: any }> } = {}; + private listeners_: { + [eventType: string]: Array<{ + callback(...args: any[]): void; + context: any; + }>; + } = {}; /** * @param {!Array.} allowedEvents_ */ constructor(private allowedEvents_: Array) { - assert(Array.isArray(allowedEvents_) && allowedEvents_.length > 0, - 'Requires a non-empty array'); + assert( + Array.isArray(allowedEvents_) && allowedEvents_.length > 0, + 'Requires a non-empty array' + ); } /** @@ -48,9 +55,7 @@ export abstract class EventEmitter { protected trigger(eventType: string, ...var_args: any[]) { if (Array.isArray(this.listeners_[eventType])) { // Clone the list, since callbacks could add/remove listeners. - const listeners = [ - ...this.listeners_[eventType] - ]; + const listeners = [...this.listeners_[eventType]]; for (let i = 0; i < listeners.length; i++) { listeners[i].callback.apply(listeners[i].context, var_args); @@ -61,7 +66,7 @@ export abstract class EventEmitter { on(eventType: string, callback: (a: any) => void, context: any) { this.validateEventType_(eventType); this.listeners_[eventType] = this.listeners_[eventType] || []; - this.listeners_[eventType].push({callback, context}); + this.listeners_[eventType].push({ callback, context }); const eventData = this.getInitialEvent(eventType); if (eventData) { @@ -73,7 +78,10 @@ export abstract class EventEmitter { this.validateEventType_(eventType); const listeners = this.listeners_[eventType] || []; for (let i = 0; i < listeners.length; i++) { - if (listeners[i].callback === callback && (!context || context === listeners[i].context)) { + if ( + listeners[i].callback === callback && + (!context || context === listeners[i].context) + ) { listeners.splice(i, 1); return; } @@ -81,11 +89,11 @@ export abstract class EventEmitter { } private validateEventType_(eventType: string) { - assert(this.allowedEvents_.find(function (et) { + assert( + this.allowedEvents_.find(function(et) { return et === eventType; }), 'Unknown event: ' + eventType ); } } - diff --git a/src/database/core/util/ImmutableTree.ts b/src/database/core/util/ImmutableTree.ts index ed44c3c7ebb..b89c77c7c05 100644 --- a/src/database/core/util/ImmutableTree.ts +++ b/src/database/core/util/ImmutableTree.ts @@ -29,7 +29,9 @@ let emptyChildrenSingleton: SortedMap>; */ const EmptyChildren = (): SortedMap> => { if (!emptyChildrenSingleton) { - emptyChildrenSingleton = new SortedMap>(stringCompare); + emptyChildrenSingleton = new SortedMap>( + stringCompare + ); } return emptyChildrenSingleton; }; @@ -58,9 +60,13 @@ export class ImmutableTree { * @param {?T} value * @param {SortedMap.>=} children */ - constructor(public readonly value: T | null, - public readonly children: SortedMap> = EmptyChildren()) { - } + constructor( + public readonly value: T | null, + public readonly children: SortedMap< + string, + ImmutableTree + > = EmptyChildren() + ) {} /** * True if the value is empty and there are no children @@ -82,10 +88,12 @@ export class ImmutableTree { * node * @return {?{path:!Path, value:!T}} */ - findRootMostMatchingPathAndValue(relativePath: Path, - predicate: (a: T) => boolean): { path: Path, value: T } | null { + findRootMostMatchingPathAndValue( + relativePath: Path, + predicate: (a: T) => boolean + ): { path: Path; value: T } | null { if (this.value != null && predicate(this.value)) { - return {path: Path.Empty, value: this.value}; + return { path: Path.Empty, value: this.value }; } else { if (relativePath.isEmpty()) { return null; @@ -93,12 +101,15 @@ export class ImmutableTree { const front = relativePath.getFront(); const child = this.children.get(front); if (child !== null) { - const childExistingPathAndValue = - child.findRootMostMatchingPathAndValue(relativePath.popFront(), - predicate); + const childExistingPathAndValue = child.findRootMostMatchingPathAndValue( + relativePath.popFront(), + predicate + ); if (childExistingPathAndValue != null) { - const fullPath = new Path(front).child(childExistingPathAndValue.path); - return {path: fullPath, value: childExistingPathAndValue.value}; + const fullPath = new Path(front).child( + childExistingPathAndValue.path + ); + return { path: fullPath, value: childExistingPathAndValue.value }; } else { return null; } @@ -115,7 +126,9 @@ export class ImmutableTree { * @param {!Path} relativePath * @return {?{path: !Path, value: !T}} */ - findRootMostValueAndPath(relativePath: Path): { path: Path, value: T } | null { + findRootMostValueAndPath( + relativePath: Path + ): { path: Path; value: T } | null { return this.findRootMostMatchingPathAndValue(relativePath, () => true); } @@ -255,9 +268,15 @@ export class ImmutableTree { * @return {V} * @private */ - private fold_(pathSoFar: Path, fn: (path: Path, value: T | null, children: { [k: string]: V }) => V): V { - const accum: {[k: string]: V} = {}; - this.children.inorderTraversal(function (childKey: string, childTree: ImmutableTree) { + private fold_( + pathSoFar: Path, + fn: (path: Path, value: T | null, children: { [k: string]: V }) => V + ): V { + const accum: { [k: string]: V } = {}; + this.children.inorderTraversal(function( + childKey: string, + childTree: ImmutableTree + ) { accum[childKey] = childTree.fold_(pathSoFar.child(childKey), fn); }); return fn(pathSoFar, this.value, accum); @@ -270,11 +289,15 @@ export class ImmutableTree { * @param {!function(!Path, !T):?V} f * @return {?V} */ - findOnPath(path: Path, f: (path: Path, value: T) => (V | null)): V | null { + findOnPath(path: Path, f: (path: Path, value: T) => V | null): V | null { return this.findOnPath_(path, Path.Empty, f); } - private findOnPath_(pathToFollow: Path, pathSoFar: Path, f: (path: Path, value: T) => (V | null)): V | null { + private findOnPath_( + pathToFollow: Path, + pathSoFar: Path, + f: (path: Path, value: T) => V | null + ): V | null { const result = this.value ? f(pathSoFar, this.value) : false; if (result) { return result; @@ -285,7 +308,11 @@ export class ImmutableTree { const front = pathToFollow.getFront()!; const nextChild = this.children.get(front); if (nextChild) { - return nextChild.findOnPath_(pathToFollow.popFront(), pathSoFar.child(front), f); + return nextChild.findOnPath_( + pathToFollow.popFront(), + pathSoFar.child(front), + f + ); } else { return null; } @@ -299,12 +326,18 @@ export class ImmutableTree { * @param {!function(!Path, !T)} f * @returns {!ImmutableTree.} */ - foreachOnPath(path: Path, f: (path: Path, value: T) => void): ImmutableTree { + foreachOnPath( + path: Path, + f: (path: Path, value: T) => void + ): ImmutableTree { return this.foreachOnPath_(path, Path.Empty, f); } - private foreachOnPath_(pathToFollow: Path, currentRelativePath: Path, - f: (path: Path, value: T) => void): ImmutableTree { + private foreachOnPath_( + pathToFollow: Path, + currentRelativePath: Path, + f: (path: Path, value: T) => void + ): ImmutableTree { if (pathToFollow.isEmpty()) { return this; } else { @@ -314,8 +347,11 @@ export class ImmutableTree { const front = pathToFollow.getFront(); const nextChild = this.children.get(front); if (nextChild) { - return nextChild.foreachOnPath_(pathToFollow.popFront(), - currentRelativePath.child(front), f); + return nextChild.foreachOnPath_( + pathToFollow.popFront(), + currentRelativePath.child(front), + f + ); } else { return ImmutableTree.Empty; } @@ -333,8 +369,11 @@ export class ImmutableTree { this.foreach_(Path.Empty, f); } - private foreach_(currentRelativePath: Path, f: (path: Path, value: T) => void) { - this.children.inorderTraversal(function (childName, childTree) { + private foreach_( + currentRelativePath: Path, + f: (path: Path, value: T) => void + ) { + this.children.inorderTraversal(function(childName, childTree) { childTree.foreach_(currentRelativePath.child(childName), f); }); if (this.value) { @@ -347,10 +386,12 @@ export class ImmutableTree { * @param {function(string, !T)} f */ foreachChild(f: (name: string, value: T) => void) { - this.children.inorderTraversal((childName: string, childTree: ImmutableTree) => { - if (childTree.value) { - f(childName, childTree.value); + this.children.inorderTraversal( + (childName: string, childTree: ImmutableTree) => { + if (childTree.value) { + f(childName, childTree.value); + } } - }); + ); } } diff --git a/src/database/core/util/NextPushId.ts b/src/database/core/util/NextPushId.ts index 014f799591e..747a4fe782c 100644 --- a/src/database/core/util/NextPushId.ts +++ b/src/database/core/util/NextPushId.ts @@ -14,7 +14,7 @@ * limitations under the License. */ -import { assert } from "../../../utils/assert"; +import { assert } from '../../../utils/assert'; /** * Fancy ID generator that creates 20-character string identifiers with the @@ -32,7 +32,8 @@ import { assert } from "../../../utils/assert"; */ export const nextPushId = (function() { // Modeled after base64 web-safe chars, but ordered by ASCII. - const PUSH_CHARS = '-0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ_abcdefghijklmnopqrstuvwxyz'; + const PUSH_CHARS = + '-0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ_abcdefghijklmnopqrstuvwxyz'; // Timestamp of last push, used to prevent local collisions if you push twice // in one ms. @@ -45,7 +46,7 @@ export const nextPushId = (function() { const lastRandChars: number[] = []; return function(now: number) { - const duplicateTime = (now === lastPushTime); + const duplicateTime = now === lastPushTime; lastPushTime = now; let i; diff --git a/src/database/core/util/OnlineMonitor.ts b/src/database/core/util/OnlineMonitor.ts index 838223f7184..d8a5438a3df 100644 --- a/src/database/core/util/OnlineMonitor.ts +++ b/src/database/core/util/OnlineMonitor.ts @@ -14,9 +14,9 @@ * limitations under the License. */ -import { assert } from "../../../utils/assert"; -import { EventEmitter } from "./EventEmitter"; -import { isMobileCordova } from "../../../utils/environment"; +import { assert } from '../../../utils/assert'; +import { EventEmitter } from './EventEmitter'; +import { isMobileCordova } from '../../../utils/environment'; /** * Monitors online state (as reported by window.online/offline events). @@ -41,22 +41,32 @@ export class OnlineMonitor extends EventEmitter { // https://forum.ionicframework.com/t/firebase-connection-is-lost-and-never-come-back/43810 // It would seem that the 'online' event does not always fire consistently. So we disable it // for Cordova. - if (typeof window !== 'undefined' && - typeof window.addEventListener !== 'undefined' && - !isMobileCordova()) { - window.addEventListener('online', () => { - if (!this.online_) { - this.online_ = true; - this.trigger('online', true); - } - }, false); + if ( + typeof window !== 'undefined' && + typeof window.addEventListener !== 'undefined' && + !isMobileCordova() + ) { + window.addEventListener( + 'online', + () => { + if (!this.online_) { + this.online_ = true; + this.trigger('online', true); + } + }, + false + ); - window.addEventListener('offline', () => { - if (this.online_) { - this.online_ = false; - this.trigger('online', false); - } - }, false); + window.addEventListener( + 'offline', + () => { + if (this.online_) { + this.online_ = false; + this.trigger('online', false); + } + }, + false + ); } } diff --git a/src/database/core/util/Path.ts b/src/database/core/util/Path.ts index fc312ee057f..3616802bbef 100644 --- a/src/database/core/util/Path.ts +++ b/src/database/core/util/Path.ts @@ -62,8 +62,7 @@ export class Path { } getFront(): string | null { - if (this.pieceNum_ >= this.pieces_.length) - return null; + if (this.pieceNum_ >= this.pieces_.length) return null; return this.pieces_[this.pieceNum_]; } @@ -99,8 +98,7 @@ export class Path { toString(): string { let pathString = ''; for (let i = this.pieceNum_; i < this.pieces_.length; i++) { - if (this.pieces_[i] !== '') - pathString += '/' + this.pieces_[i]; + if (this.pieces_[i] !== '') pathString += '/' + this.pieces_[i]; } return pathString || '/'; @@ -130,8 +128,7 @@ export class Path { * @return {?Path} */ parent(): Path | null { - if (this.pieceNum_ >= this.pieces_.length) - return null; + if (this.pieceNum_ >= this.pieces_.length) return null; const pieces = []; for (let i = this.pieceNum_; i < this.pieces_.length - 1; i++) @@ -150,14 +147,17 @@ export class Path { pieces.push(this.pieces_[i]); if (childPathObj instanceof Path) { - for (let i = childPathObj.pieceNum_; i < childPathObj.pieces_.length; i++) { + for ( + let i = childPathObj.pieceNum_; + i < childPathObj.pieces_.length; + i++ + ) { pieces.push(childPathObj.pieces_[i]); } } else { const childPieces = childPathObj.split('/'); for (let i = 0; i < childPieces.length; i++) { - if (childPieces[i].length > 0) - pieces.push(childPieces[i]); + if (childPieces[i].length > 0) pieces.push(childPieces[i]); } } @@ -177,15 +177,21 @@ export class Path { * @return {!Path} The path from outerPath to innerPath */ static relativePath(outerPath: Path, innerPath: Path): Path { - const outer = outerPath.getFront(), inner = innerPath.getFront(); + const outer = outerPath.getFront(), + inner = innerPath.getFront(); if (outer === null) { return innerPath; } else if (outer === inner) { - return Path.relativePath(outerPath.popFront(), - innerPath.popFront()); + return Path.relativePath(outerPath.popFront(), innerPath.popFront()); } else { - throw new Error('INTERNAL ERROR: innerPath (' + innerPath + ') is not within ' + - 'outerPath (' + outerPath + ')'); + throw new Error( + 'INTERNAL ERROR: innerPath (' + + innerPath + + ') is not within ' + + 'outerPath (' + + outerPath + + ')' + ); } } @@ -202,7 +208,7 @@ export class Path { if (cmp !== 0) return cmp; } if (leftKeys.length === rightKeys.length) return 0; - return (leftKeys.length < rightKeys.length) ? -1 : 1; + return leftKeys.length < rightKeys.length ? -1 : 1; } /** @@ -215,7 +221,11 @@ export class Path { return false; } - for (let i = this.pieceNum_, j = other.pieceNum_; i <= this.pieces_.length; i++, j++) { + for ( + let i = this.pieceNum_, j = other.pieceNum_; + i <= this.pieces_.length; + i++, j++ + ) { if (this.pieces_[i] !== other.pieces_[j]) { return false; } @@ -285,7 +295,7 @@ export class ValidationPath { /** @const {number} Maximum number of (UTF8) bytes in a Firebase path. */ static get MAX_PATH_LENGTH_BYTES() { - return 768 + return 768; } /** @param {string} child */ @@ -310,14 +320,23 @@ export class ValidationPath { private checkValid_() { if (this.byteLength_ > ValidationPath.MAX_PATH_LENGTH_BYTES) { - throw new Error(this.errorPrefix_ + 'has a key path longer than ' + - ValidationPath.MAX_PATH_LENGTH_BYTES + ' bytes (' + - this.byteLength_ + ').'); + throw new Error( + this.errorPrefix_ + + 'has a key path longer than ' + + ValidationPath.MAX_PATH_LENGTH_BYTES + + ' bytes (' + + this.byteLength_ + + ').' + ); } if (this.parts_.length > ValidationPath.MAX_PATH_DEPTH) { - throw new Error(this.errorPrefix_ + 'path specified exceeds the maximum depth that can be written (' + - ValidationPath.MAX_PATH_DEPTH + - ') or object contains a cycle ' + this.toErrorString()); + throw new Error( + this.errorPrefix_ + + 'path specified exceeds the maximum depth that can be written (' + + ValidationPath.MAX_PATH_DEPTH + + ') or object contains a cycle ' + + this.toErrorString() + ); } } @@ -330,8 +349,6 @@ export class ValidationPath { if (this.parts_.length == 0) { return ''; } - return 'in property \'' + this.parts_.join('.') + '\''; + return "in property '" + this.parts_.join('.') + "'"; } - } - diff --git a/src/database/core/util/ServerValues.ts b/src/database/core/util/ServerValues.ts index 38276b63172..b8f0129b8f8 100644 --- a/src/database/core/util/ServerValues.ts +++ b/src/database/core/util/ServerValues.ts @@ -28,13 +28,14 @@ import { ChildrenNode } from '../snap/ChildrenNode'; * @param {?Object} values * @return {!Object} */ -export const generateWithValues = function (values: { [k: string]: any } | null): { [k: string]: any } { +export const generateWithValues = function(values: { + [k: string]: any; +} | null): { [k: string]: any } { values = values || {}; values['timestamp'] = values['timestamp'] || new Date().getTime(); return values; }; - /** * Value to use when firing local events. When writing server values, fire * local events with an approximate value, otherwise return value as-is. @@ -42,9 +43,11 @@ export const generateWithValues = function (values: { [k: string]: any } | null) * @param {!Object} serverValues * @return {!(string|number|boolean)} */ -export const resolveDeferredValue = function (value: { [k: string]: any } | string | number | boolean, - serverValues: { [k: string]: any }): string | number | boolean { - if (!value || (typeof value !== 'object')) { +export const resolveDeferredValue = function( + value: { [k: string]: any } | string | number | boolean, + serverValues: { [k: string]: any } +): string | number | boolean { + if (!value || typeof value !== 'object') { return value as string | number | boolean; } else { assert('.sv' in value, 'Unexpected leaf node or priority contents'); @@ -52,7 +55,6 @@ export const resolveDeferredValue = function (value: { [k: string]: any } | stri } }; - /** * Recursively replace all deferred values and priorities in the tree with the * specified generated replacement values. @@ -60,15 +62,20 @@ export const resolveDeferredValue = function (value: { [k: string]: any } | stri * @param {!Object} serverValues * @return {!SparseSnapshotTree} */ -export const resolveDeferredValueTree = function (tree: SparseSnapshotTree, serverValues: Object): SparseSnapshotTree { +export const resolveDeferredValueTree = function( + tree: SparseSnapshotTree, + serverValues: Object +): SparseSnapshotTree { const resolvedTree = new SparseSnapshotTree(); - tree.forEachTree(new Path(''), function (path, node) { - resolvedTree.remember(path, resolveDeferredValueSnapshot(node, serverValues)); + tree.forEachTree(new Path(''), function(path, node) { + resolvedTree.remember( + path, + resolveDeferredValueSnapshot(node, serverValues) + ); }); return resolvedTree; }; - /** * Recursively replace all deferred values and priorities in the node with the * specified generated replacement values. If there are no server values in the node, @@ -77,15 +84,26 @@ export const resolveDeferredValueTree = function (tree: SparseSnapshotTree, serv * @param {!Object} serverValues * @return {!Node} */ -export const resolveDeferredValueSnapshot = function (node: Node, serverValues: Object): Node { - const rawPri = node.getPriority().val() as object | boolean | null | number | string; +export const resolveDeferredValueSnapshot = function( + node: Node, + serverValues: Object +): Node { + const rawPri = node.getPriority().val() as + | object + | boolean + | null + | number + | string; const priority = resolveDeferredValue(rawPri, serverValues); let newNode: Node; if (node.isLeafNode()) { const leafNode = node as LeafNode; const value = resolveDeferredValue(leafNode.getValue(), serverValues); - if (value !== leafNode.getValue() || priority !== leafNode.getPriority().val()) { + if ( + value !== leafNode.getValue() || + priority !== leafNode.getPriority().val() + ) { return new LeafNode(value, nodeFromJSON(priority)); } else { return node; @@ -96,8 +114,11 @@ export const resolveDeferredValueSnapshot = function (node: Node, serverValues: if (priority !== childrenNode.getPriority().val()) { newNode = newNode.updatePriority(new LeafNode(priority)); } - childrenNode.forEachChild(PRIORITY_INDEX, function (childName, childNode) { - const newChildNode = resolveDeferredValueSnapshot(childNode, serverValues); + childrenNode.forEachChild(PRIORITY_INDEX, function(childName, childNode) { + const newChildNode = resolveDeferredValueSnapshot( + childNode, + serverValues + ); if (newChildNode !== childNode) { newNode = newNode.updateImmediateChild(childName, newChildNode); } diff --git a/src/database/core/util/SortedMap.ts b/src/database/core/util/SortedMap.ts index f8c97cd8b42..44d6f942a6d 100644 --- a/src/database/core/util/SortedMap.ts +++ b/src/database/core/util/SortedMap.ts @@ -28,7 +28,6 @@ * Invariant 3: Only the left child can be red (left leaning) */ - // TODO: There are some improvements I'd like to make to improve memory / perf: // * Create two prototypes, LLRedNode and LLBlackNode, instead of storing a // color property in every node. @@ -54,12 +53,13 @@ export class SortedMapIterator { * @param {boolean} isReverse_ Whether or not to iterate in reverse * @param {(function(K, V):T)=} resultGenerator_ */ - constructor(node: LLRBNode | LLRBEmptyNode, - startKey: K | null, - comparator: Comparator, - private isReverse_: boolean, - private resultGenerator_: ((k: K, v: V) => T) | null = null) { - + constructor( + node: LLRBNode | LLRBEmptyNode, + startKey: K | null, + comparator: Comparator, + private isReverse_: boolean, + private resultGenerator_: ((k: K, v: V) => T) | null = null + ) { let cmp = 1; while (!node.isEmpty()) { node = node as LLRBNode; @@ -91,15 +91,13 @@ export class SortedMapIterator { } getNext(): T { - if (this.nodeStack_.length === 0) - return null; + if (this.nodeStack_.length === 0) return null; let node = this.nodeStack_.pop(); let result: T; if (this.resultGenerator_) result = this.resultGenerator_(node.key, node.value); - else - result = {key: node.key, value: node.value} as any; + else result = { key: node.key, value: node.value } as any; if (this.isReverse_) { node = node.left; @@ -123,19 +121,17 @@ export class SortedMapIterator { } peek(): T { - if (this.nodeStack_.length === 0) - return null; + if (this.nodeStack_.length === 0) return null; const node = this.nodeStack_[this.nodeStack_.length - 1]; if (this.resultGenerator_) { return this.resultGenerator_(node.key, node.value); } else { - return {key: node.key, value: node.value} as any; + return { key: node.key, value: node.value } as any; } } } - /** * Represents a node in a Left-leaning Red-Black tree. */ @@ -152,14 +148,18 @@ export class LLRBNode { * @param {?(LLRBNode|LLRBEmptyNode)=} left Left child. * @param {?(LLRBNode|LLRBEmptyNode)=} right Right child. */ - constructor(public key: K, - public value: V, - color: boolean | null, - left?: LLRBNode | LLRBEmptyNode | null, - right?: LLRBNode | LLRBEmptyNode | null) { + constructor( + public key: K, + public value: V, + color: boolean | null, + left?: LLRBNode | LLRBEmptyNode | null, + right?: LLRBNode | LLRBEmptyNode | null + ) { this.color = color != null ? color : LLRBNode.RED; - this.left = left != null ? left : SortedMap.EMPTY_NODE as LLRBEmptyNode; - this.right = right != null ? right : SortedMap.EMPTY_NODE as LLRBEmptyNode; + this.left = + left != null ? left : SortedMap.EMPTY_NODE as LLRBEmptyNode; + this.right = + right != null ? right : SortedMap.EMPTY_NODE as LLRBEmptyNode; } static RED = true; @@ -175,15 +175,20 @@ export class LLRBNode { * @param {?LLRBNode|LLRBEmptyNode} right New right child for the node, or null. * @return {!LLRBNode} The node copy. */ - copy(key: K | null, value: V | null, color: boolean | null, - left: LLRBNode | LLRBEmptyNode | null, - right: LLRBNode | LLRBEmptyNode | null): LLRBNode { + copy( + key: K | null, + value: V | null, + color: boolean | null, + left: LLRBNode | LLRBEmptyNode | null, + right: LLRBNode | LLRBEmptyNode | null + ): LLRBNode { return new LLRBNode( - (key != null) ? key : this.key, - (value != null) ? value : this.value, - (color != null) ? color : this.color, - (left != null) ? left : this.left, - (right != null) ? right : this.right); + key != null ? key : this.key, + value != null ? value : this.value, + color != null ? color : this.color, + left != null ? left : this.left, + right != null ? right : this.right + ); } /** @@ -210,9 +215,11 @@ export class LLRBNode { * value returned by action */ inorderTraversal(action: (k: K, v: V) => any): boolean { - return this.left.inorderTraversal(action) || + return ( + this.left.inorderTraversal(action) || action(this.key, this.value) || - this.right.inorderTraversal(action); + this.right.inorderTraversal(action) + ); } /** @@ -224,20 +231,22 @@ export class LLRBNode { * @return {*} True if traversal was aborted. */ reverseTraversal(action: (k: K, v: V) => void): boolean { - return this.right.reverseTraversal(action) || + return ( + this.right.reverseTraversal(action) || action(this.key, this.value) || - this.left.reverseTraversal(action); + this.left.reverseTraversal(action) + ); } /** * @return {!Object} The minimum node in the tree. * @private */ - private min_(): LLRBNode { + private min_(): LLRBNode { if (this.left.isEmpty()) { return this; } else { - return (this.left as LLRBNode).min_(); + return (this.left as LLRBNode).min_(); } } @@ -266,7 +275,7 @@ export class LLRBNode { * @param {Comparator} comparator Comparator. * @return {!LLRBNode} New tree, with the key/value added. */ - insert(key: K, value: V, comparator: Comparator): LLRBNode { + insert(key: K, value: V, comparator: Comparator): LLRBNode { let cmp, n; n = this; cmp = comparator(key, n.key); @@ -275,7 +284,13 @@ export class LLRBNode { } else if (cmp === 0) { n = n.copy(null, value, null, null, null); } else { - n = n.copy(null, null, null, null, n.right.insert(key, value, comparator)); + n = n.copy( + null, + null, + null, + null, + n.right.insert(key, value, comparator) + ); } return n.fixUp_(); } @@ -284,14 +299,13 @@ export class LLRBNode { * @private * @return {!LLRBNode|LLRBEmptyNode} New tree, with the minimum key removed. */ - private removeMin_(): LLRBNode | LLRBEmptyNode { + private removeMin_(): LLRBNode | LLRBEmptyNode { if (this.left.isEmpty()) { - return SortedMap.EMPTY_NODE as LLRBEmptyNode; + return SortedMap.EMPTY_NODE as LLRBEmptyNode; } - let n: LLRBNode = this; - if (!n.left.isRed_() && !n.left.left.isRed_()) - n = n.moveRedLeft_(); - n = n.copy(null, null, null, (n.left as LLRBNode).removeMin_(), null); + let n: LLRBNode = this; + if (!n.left.isRed_() && !n.left.left.isRed_()) n = n.moveRedLeft_(); + n = n.copy(null, null, null, (n.left as LLRBNode).removeMin_(), null); return n.fixUp_(); } @@ -300,7 +314,10 @@ export class LLRBNode { * @param {Comparator} comparator Comparator. * @return {!LLRBNode|LLRBEmptyNode} New tree, with the specified item removed. */ - remove(key: K, comparator: Comparator): LLRBNode | LLRBEmptyNode { + remove( + key: K, + comparator: Comparator + ): LLRBNode | LLRBEmptyNode { let n, smallest; n = this; if (comparator(key, n.key) < 0) { @@ -315,11 +332,16 @@ export class LLRBNode { } if (comparator(key, n.key) === 0) { if (n.right.isEmpty()) { - return SortedMap.EMPTY_NODE as LLRBEmptyNode; + return SortedMap.EMPTY_NODE as LLRBEmptyNode; } else { - smallest = (n.right as LLRBNode).min_(); - n = n.copy(smallest.key, smallest.value, null, null, - (n.right as LLRBNode).removeMin_()); + smallest = (n.right as LLRBNode).min_(); + n = n.copy( + smallest.key, + smallest.value, + null, + null, + (n.right as LLRBNode).removeMin_() + ); } } n = n.copy(null, null, null, null, n.right.remove(key, comparator)); @@ -339,7 +361,7 @@ export class LLRBNode { * @private * @return {!LLRBNode} New tree after performing any needed rotations. */ - private fixUp_(): LLRBNode { + private fixUp_(): LLRBNode { let n = this as any; if (n.right.isRed_() && !n.left.isRed_()) n = n.rotateLeft_(); if (n.left.isRed_() && n.left.left.isRed_()) n = n.rotateRight_(); @@ -351,10 +373,16 @@ export class LLRBNode { * @private * @return {!LLRBNode} New tree, after moveRedLeft. */ - private moveRedLeft_(): LLRBNode { + private moveRedLeft_(): LLRBNode { let n = this.colorFlip_(); if (n.right.left.isRed_()) { - n = n.copy(null, null, null, null, (n.right as LLRBNode).rotateRight_()); + n = n.copy( + null, + null, + null, + null, + (n.right as LLRBNode).rotateRight_() + ); n = n.rotateLeft_(); n = n.colorFlip_(); } @@ -365,7 +393,7 @@ export class LLRBNode { * @private * @return {!LLRBNode} New tree, after moveRedRight. */ - private moveRedRight_(): LLRBNode { + private moveRedRight_(): LLRBNode { let n = this.colorFlip_(); if (n.left.left.isRed_()) { n = n.rotateRight_(); @@ -378,25 +406,25 @@ export class LLRBNode { * @private * @return {!LLRBNode} New tree, after rotateLeft. */ - private rotateLeft_(): LLRBNode { + private rotateLeft_(): LLRBNode { const nl = this.copy(null, null, LLRBNode.RED, null, this.right.left); - return this.right.copy(null, null, this.color, nl, null) as LLRBNode; + return this.right.copy(null, null, this.color, nl, null) as LLRBNode; } /** * @private * @return {!LLRBNode} New tree, after rotateRight. */ - private rotateRight_(): LLRBNode { + private rotateRight_(): LLRBNode { const nr = this.copy(null, null, LLRBNode.RED, this.left.right, null); - return this.left.copy(null, null, this.color, null, nr) as LLRBNode; + return this.left.copy(null, null, this.color, null, nr) as LLRBNode; } /** * @private * @return {!LLRBNode} New tree, after colorFlip. */ - private colorFlip_(): LLRBNode { + private colorFlip_(): LLRBNode { const left = this.left.copy(null, null, !this.left.color, null, null); const right = this.right.copy(null, null, !this.right.color, null, null); return this.copy(null, null, !this.color, left, right); @@ -410,7 +438,7 @@ export class LLRBNode { */ private checkMaxDepth_(): boolean { const blackDepth = this.check_(); - return (Math.pow(2.0, blackDepth) <= this.count() + 1); + return Math.pow(2.0, blackDepth) <= this.count() + 1; } /** @@ -420,12 +448,14 @@ export class LLRBNode { check_(): number { let blackDepth; if (this.isRed_() && this.left.isRed_()) { - throw new Error('Red node has red child(' + this.key + ',' + - this.value + ')'); + throw new Error( + 'Red node has red child(' + this.key + ',' + this.value + ')' + ); } if (this.right.isRed_()) { - throw new Error('Right child of (' + this.key + ',' + - this.value + ') is red'); + throw new Error( + 'Right child of (' + this.key + ',' + this.value + ') is red' + ); } blackDepth = this.left.check_(); if (blackDepth !== this.right.check_()) { @@ -436,7 +466,6 @@ export class LLRBNode { } } - /** * Represents an empty node (a leaf node in the Red-Black Tree). */ @@ -452,9 +481,13 @@ export class LLRBEmptyNode { * * @return {!LLRBEmptyNode} The node copy. */ - copy(key: K | null, value: V | null, color: boolean | null, - left: LLRBNode | LLRBEmptyNode | null, - right: LLRBNode | LLRBEmptyNode | null): LLRBEmptyNode { + copy( + key: K | null, + value: V | null, + color: boolean | null, + left: LLRBNode | LLRBEmptyNode | null, + right: LLRBNode | LLRBEmptyNode | null + ): LLRBEmptyNode { return this; } @@ -566,9 +599,12 @@ export class SortedMap { * @param {function(K, K):number} comparator_ Key comparator. * @param {LLRBNode=} root_ (Optional) Root node for the map. */ - constructor(private comparator_: Comparator, - private root_: LLRBNode | LLRBEmptyNode = SortedMap.EMPTY_NODE as LLRBEmptyNode) { - } + constructor( + private comparator_: Comparator, + private root_: + | LLRBNode + | LLRBEmptyNode = SortedMap.EMPTY_NODE as LLRBEmptyNode + ) {} /** * Returns a copy of the map, with the specified key/value added or replaced. @@ -581,8 +617,10 @@ export class SortedMap { insert(key: K, value: V): SortedMap { return new SortedMap( this.comparator_, - this.root_.insert(key, value, this.comparator_) - .copy(null, null, LLRBNode.BLACK, null, null)); + this.root_ + .insert(key, value, this.comparator_) + .copy(null, null, LLRBNode.BLACK, null, null) + ); } /** @@ -594,8 +632,10 @@ export class SortedMap { remove(key: K): SortedMap { return new SortedMap( this.comparator_, - this.root_.remove(key, this.comparator_) - .copy(null, null, LLRBNode.BLACK, null, null)); + this.root_ + .remove(key, this.comparator_) + .copy(null, null, LLRBNode.BLACK, null, null) + ); } /** @@ -627,14 +667,15 @@ export class SortedMap { * @return {?K} The predecessor key. */ getPredecessorKey(key: K): K | null { - let cmp, node = this.root_, rightParent = null; + let cmp, + node = this.root_, + rightParent = null; while (!node.isEmpty()) { cmp = this.comparator_(key, node.key); if (cmp === 0) { if (!node.left.isEmpty()) { node = node.left; - while (!node.right.isEmpty()) - node = node.right; + while (!node.right.isEmpty()) node = node.right; return node.key; } else if (rightParent) { return rightParent.key; @@ -649,7 +690,9 @@ export class SortedMap { } } - throw new Error('Attempted to find predecessor key for a nonexistent key. What gives?'); + throw new Error( + 'Attempted to find predecessor key for a nonexistent key. What gives?' + ); } /** @@ -711,35 +754,53 @@ export class SortedMap { * @param {(function(K, V):T)=} resultGenerator * @return {SortedMapIterator.} The iterator. */ - getIterator(resultGenerator?: (k: K, v: V) => T): SortedMapIterator { - return new SortedMapIterator(this.root_, + getIterator( + resultGenerator?: (k: K, v: V) => T + ): SortedMapIterator { + return new SortedMapIterator( + this.root_, null, this.comparator_, false, - resultGenerator); + resultGenerator + ); } - getIteratorFrom(key: K, resultGenerator?: (k: K, v: V) => T): SortedMapIterator { - return new SortedMapIterator(this.root_, + getIteratorFrom( + key: K, + resultGenerator?: (k: K, v: V) => T + ): SortedMapIterator { + return new SortedMapIterator( + this.root_, key, this.comparator_, false, - resultGenerator); + resultGenerator + ); } - getReverseIteratorFrom(key: K, resultGenerator?: (k: K, v: V) => T): SortedMapIterator { - return new SortedMapIterator(this.root_, + getReverseIteratorFrom( + key: K, + resultGenerator?: (k: K, v: V) => T + ): SortedMapIterator { + return new SortedMapIterator( + this.root_, key, this.comparator_, true, - resultGenerator); + resultGenerator + ); } - getReverseIterator(resultGenerator?: (k: K, v: V) => T): SortedMapIterator { - return new SortedMapIterator(this.root_, + getReverseIterator( + resultGenerator?: (k: K, v: V) => T + ): SortedMapIterator { + return new SortedMapIterator( + this.root_, null, this.comparator_, true, - resultGenerator); + resultGenerator + ); } -} \ No newline at end of file +} diff --git a/src/database/core/util/Tree.ts b/src/database/core/util/Tree.ts index 472b5614a3a..e6712c5cddf 100644 --- a/src/database/core/util/Tree.ts +++ b/src/database/core/util/Tree.ts @@ -16,7 +16,7 @@ import { assert } from '../../../utils/assert'; import { Path } from './Path'; -import { forEach, contains, safeGet } from '../../../utils/obj' +import { forEach, contains, safeGet } from '../../../utils/obj'; /** * Node in a Tree. @@ -29,7 +29,6 @@ export class TreeNode { value: T | null = null; } - /** * A light-weight tree, traversable by path. Nodes can have both values and children. * Nodes are not enumerated (by forEachChild) unless they have a value or non-empty @@ -42,10 +41,11 @@ export class Tree { * @param {Tree=} parent_ Optional parent node. * @param {TreeNode=} node_ Optional node to wrap. */ - constructor(private name_: string = '', - private parent_: Tree | null = null, - private node_: TreeNode = new TreeNode()) { - } + constructor( + private name_: string = '', + private parent_: Tree | null = null, + private node_: TreeNode = new TreeNode() + ) {} /** * Returns a sub-Tree for the given path. @@ -55,9 +55,9 @@ export class Tree { */ subTree(pathObj: string | Path): Tree { // TODO: Require pathObj to be Path? - let path = (pathObj instanceof Path) ? - pathObj : new Path(pathObj); - let child = this as any, next; + let path = pathObj instanceof Path ? pathObj : new Path(pathObj); + let child = this as any, + next; while ((next = path.getFront()) !== null) { const childNode = safeGet(child.node_.children, next) || new TreeNode(); child = new Tree(next, child, childNode); @@ -131,16 +131,18 @@ export class Tree { * @param {boolean=} childrenFirst Whether to call action on children before calling it on * parent. */ - forEachDescendant(action: (tree: Tree) => void, includeSelf?: boolean, childrenFirst?: boolean) { - if (includeSelf && !childrenFirst) - action(this); + forEachDescendant( + action: (tree: Tree) => void, + includeSelf?: boolean, + childrenFirst?: boolean + ) { + if (includeSelf && !childrenFirst) action(this); - this.forEachChild(function (child) { - child.forEachDescendant(action, /*includeSelf=*/true, childrenFirst); + this.forEachChild(function(child) { + child.forEachDescendant(action, /*includeSelf=*/ true, childrenFirst); }); - if (includeSelf && childrenFirst) - action(this); + if (includeSelf && childrenFirst) action(this); } /** @@ -151,7 +153,10 @@ export class Tree { * @param {boolean=} includeSelf Whether to call action on this node as well. * @return {boolean} true if the action callback returned true. */ - forEachAncestor(action: (tree: Tree) => void, includeSelf?: boolean): boolean { + forEachAncestor( + action: (tree: Tree) => void, + includeSelf?: boolean + ): boolean { let node = includeSelf ? this : this.parent(); while (node !== null) { if (action(node)) { @@ -170,11 +175,9 @@ export class Tree { * @param {function(!Tree.)} action Action to be called for each child. */ forEachImmediateDescendantWithValue(action: (tree: Tree) => void) { - this.forEachChild(function (child) { - if (child.getValue() !== null) - action(child); - else - child.forEachImmediateDescendantWithValue(action); + this.forEachChild(function(child) { + if (child.getValue() !== null) action(child); + else child.forEachImmediateDescendantWithValue(action); }); } @@ -182,8 +185,11 @@ export class Tree { * @return {!Path} The path of this tree node, as a Path. */ path(): Path { - return new Path(this.parent_ === null ? - this.name_ : this.parent_.path() + '/' + this.name_); + return new Path( + this.parent_ === null + ? this.name_ + : this.parent_.path() + '/' + this.name_ + ); } /** @@ -206,8 +212,7 @@ export class Tree { * @private */ private updateParents_() { - if (this.parent_ !== null) - this.parent_.updateChild_(this.name_, this); + if (this.parent_ !== null) this.parent_.updateChild_(this.name_, this); } /** @@ -221,11 +226,10 @@ export class Tree { const childEmpty = child.isEmpty(); const childExists = contains(this.node_.children, childName); if (childEmpty && childExists) { - delete (this.node_.children[childName]); + delete this.node_.children[childName]; this.node_.childCount--; this.updateParents_(); - } - else if (!childEmpty && !childExists) { + } else if (!childEmpty && !childExists) { this.node_.children[childName] = child.node_; this.node_.childCount++; this.updateParents_(); diff --git a/src/database/core/util/VisibilityMonitor.ts b/src/database/core/util/VisibilityMonitor.ts index fc518d7f773..7c17beb2db5 100644 --- a/src/database/core/util/VisibilityMonitor.ts +++ b/src/database/core/util/VisibilityMonitor.ts @@ -14,8 +14,8 @@ * limitations under the License. */ -import { EventEmitter } from "./EventEmitter"; -import { assert } from "../../../utils/assert"; +import { EventEmitter } from './EventEmitter'; +import { assert } from '../../../utils/assert'; declare const document: any; @@ -33,7 +33,10 @@ export class VisibilityMonitor extends EventEmitter { super(['visible']); let hidden: string; let visibilityChange: string; - if (typeof document !== 'undefined' && typeof document.addEventListener !== 'undefined') { + if ( + typeof document !== 'undefined' && + typeof document.addEventListener !== 'undefined' + ) { if (typeof document['hidden'] !== 'undefined') { // Opera 12.10 and Firefox 18 and later support visibilityChange = 'visibilitychange'; @@ -57,13 +60,17 @@ export class VisibilityMonitor extends EventEmitter { this.visible_ = true; if (visibilityChange) { - document.addEventListener(visibilityChange, () => { - const visible = !document[hidden]; - if (visible !== this.visible_) { - this.visible_ = visible; - this.trigger('visible', visible); - } - }, false); + document.addEventListener( + visibilityChange, + () => { + const visible = !document[hidden]; + if (visible !== this.visible_) { + this.visible_ = visible; + this.trigger('visible', visible); + } + }, + false + ); } } @@ -75,4 +82,4 @@ export class VisibilityMonitor extends EventEmitter { assert(eventType === 'visible', 'Unknown event type: ' + eventType); return [this.visible_]; } -} \ No newline at end of file +} diff --git a/src/database/core/util/libs/parser.ts b/src/database/core/util/libs/parser.ts index 9315f99639f..afe58fff2fd 100644 --- a/src/database/core/util/libs/parser.ts +++ b/src/database/core/util/libs/parser.ts @@ -42,29 +42,40 @@ function decodePath(pathString: string): string { * @param {!string} dataURL * @return {{repoInfo: !RepoInfo, path: !Path}} */ -export const parseRepoInfo = function (dataURL: string): { repoInfo: RepoInfo, path: Path } { +export const parseRepoInfo = function( + dataURL: string +): { repoInfo: RepoInfo; path: Path } { const parsedUrl = parseURL(dataURL), namespace = parsedUrl.subdomain; if (parsedUrl.domain === 'firebase') { - fatal(parsedUrl.host + - ' is no longer supported. ' + - 'Please use .firebaseio.com instead'); + fatal( + parsedUrl.host + + ' is no longer supported. ' + + 'Please use .firebaseio.com instead' + ); } // Catch common error of uninitialized namespace value. if (!namespace || namespace == 'undefined') { - fatal('Cannot parse Firebase url. Please use https://.firebaseio.com'); + fatal( + 'Cannot parse Firebase url. Please use https://.firebaseio.com' + ); } if (!parsedUrl.secure) { warnIfPageIsSecure(); } - const webSocketOnly = (parsedUrl.scheme === 'ws') || (parsedUrl.scheme === 'wss'); + const webSocketOnly = parsedUrl.scheme === 'ws' || parsedUrl.scheme === 'wss'; return { - repoInfo: new RepoInfo(parsedUrl.host, parsedUrl.secure, namespace, webSocketOnly), + repoInfo: new RepoInfo( + parsedUrl.host, + parsedUrl.secure, + namespace, + webSocketOnly + ), path: new Path(parsedUrl.pathString) }; }; @@ -74,20 +85,27 @@ export const parseRepoInfo = function (dataURL: string): { repoInfo: RepoInfo, p * @param {!string} dataURL * @return {{host: string, port: number, domain: string, subdomain: string, secure: boolean, scheme: string, pathString: string}} */ -export const parseURL = function (dataURL: string): { - host: string, - port: number, - domain: string, - subdomain: string, - secure: boolean, - scheme: string, - pathString: string +export const parseURL = function( + dataURL: string +): { + host: string; + port: number; + domain: string; + subdomain: string; + secure: boolean; + scheme: string; + pathString: string; } { // Default to empty strings in the event of a malformed string. - let host = '', domain = '', subdomain = '', pathString = ''; + let host = '', + domain = '', + subdomain = '', + pathString = ''; // Always default to SSL, unless otherwise specified. - let secure = true, scheme = 'https', port = 443; + let secure = true, + scheme = 'https', + port = 443; // Don't do any validation here. The caller is responsible for validating the result of parsing. if (typeof dataURL === 'string') { @@ -118,7 +136,7 @@ export const parseURL = function (dataURL: string): { // If we have a port, use scheme for determining if it's secure. colonInd = host.indexOf(':'); if (colonInd >= 0) { - secure = (scheme === 'https') || (scheme === 'wss'); + secure = scheme === 'https' || scheme === 'wss'; port = parseInt(host.substring(colonInd + 1), 10); } } @@ -130,6 +148,6 @@ export const parseURL = function (dataURL: string): { subdomain, secure, scheme, - pathString, + pathString }; -}; \ No newline at end of file +}; diff --git a/src/database/core/util/util.ts b/src/database/core/util/util.ts index 4e19229cf20..e1e14c5cfef 100644 --- a/src/database/core/util/util.ts +++ b/src/database/core/util/util.ts @@ -32,25 +32,23 @@ import { isNodeSdk } from '../../../utils/environment'; * Returns a locally-unique ID (generated by just incrementing up from 0 each time its called). * @type {function(): number} Generated ID. */ -export const LUIDGenerator: (() => number) = (function () { +export const LUIDGenerator: (() => number) = (function() { let id = 1; - return function () { + return function() { return id++; }; })(); - /** * URL-safe base64 encoding * @param {!string} str * @return {!string} */ -export const base64Encode = function (str: string): string { +export const base64Encode = function(str: string): string { const utf8Bytes = stringToByteArray(str); - return base64.encodeByteArray(utf8Bytes, /*useWebSafe=*/true); + return base64.encodeByteArray(utf8Bytes, /*useWebSafe=*/ true); }; - let BufferImpl; export function setBufferImpl(impl) { BufferImpl = impl; @@ -64,12 +62,12 @@ export function setBufferImpl(impl) { * @param {string} str To be decoded * @return {?string} Decoded result, if possible */ -export const base64Decode = function (str: string): string | null { +export const base64Decode = function(str: string): string | null { try { if (BufferImpl) { return new BufferImpl(str, 'base64').toString('utf8'); } else { - return base64.decodeString(str, /*useWebSafe=*/true); + return base64.decodeString(str, /*useWebSafe=*/ true); } } catch (e) { log('base64Decode failed: ', e); @@ -77,13 +75,12 @@ export const base64Decode = function (str: string): string | null { return null; }; - /** * Sha1 hash of the input string * @param {!string} str The string to hash * @return {!string} The resulting hash */ -export const sha1 = function (str: string): string { +export const sha1 = function(str: string): string { const utf8Bytes = stringToByteArray(str); const sha1 = new Sha1(); sha1.update(utf8Bytes); @@ -91,23 +88,24 @@ export const sha1 = function (str: string): string { return base64.encodeByteArray(sha1Bytes); }; - /** * @param {...*} var_args * @return {string} * @private */ -const buildLogMessage_ = function (...var_args: any[]): string { +const buildLogMessage_ = function(...var_args: any[]): string { let message = ''; for (let i = 0; i < var_args.length; i++) { - if (Array.isArray(var_args[i]) || - (var_args[i] && typeof var_args[i] === 'object' && typeof var_args[i].length === 'number')) { + if ( + Array.isArray(var_args[i]) || + (var_args[i] && + typeof var_args[i] === 'object' && + typeof var_args[i].length === 'number') + ) { message += buildLogMessage_.apply(null, var_args[i]); - } - else if (typeof var_args[i] === 'object') { + } else if (typeof var_args[i] === 'object') { message += stringify(var_args[i]); - } - else { + } else { message += var_args[i]; } message += ' '; @@ -116,14 +114,12 @@ const buildLogMessage_ = function (...var_args: any[]): string { return message; }; - /** * Use this for all debug messages in Firebase. * @type {?function(string)} */ export let logger: ((a: string) => void) | null = null; - /** * Flag to check for log availability on first log message * @type {boolean} @@ -131,27 +127,32 @@ export let logger: ((a: string) => void) | null = null; */ let firstLog_ = true; - /** * The implementation of Firebase.enableLogging (defined here to break dependencies) * @param {boolean|?function(string)} logger_ A flag to turn on logging, or a custom logger * @param {boolean=} persistent Whether or not to persist logging settings across refreshes */ -export const enableLogging = function (logger_?: boolean | ((a: string) => void) | null, persistent?: boolean) { - assert(!persistent || (logger_ === true || logger_ === false), 'Can\'t turn on custom loggers persistently.'); +export const enableLogging = function( + logger_?: boolean | ((a: string) => void) | null, + persistent?: boolean +) { + assert( + !persistent || (logger_ === true || logger_ === false), + "Can't turn on custom loggers persistently." + ); if (logger_ === true) { if (typeof console !== 'undefined') { if (typeof console.log === 'function') { logger = console.log.bind(console); } else if (typeof console.log === 'object') { // IE does this. - logger = function (message) { console.log(message); }; + logger = function(message) { + console.log(message); + }; } } - if (persistent) - SessionStorage.set('logging_enabled', true); - } - else if (typeof logger_ === 'function') { + if (persistent) SessionStorage.set('logging_enabled', true); + } else if (typeof logger_ === 'function') { logger = logger_; } else { logger = null; @@ -159,12 +160,11 @@ export const enableLogging = function (logger_?: boolean | ((a: string) => void) } }; - /** * * @param {...(string|Arguments)} var_args */ -export const log = function (...var_args: string[]) { +export const log = function(...var_args: string[]) { if (firstLog_ === true) { firstLog_ = false; if (logger === null && SessionStorage.get('logging_enabled') === true) @@ -177,25 +177,24 @@ export const log = function (...var_args: string[]) { } }; - /** * @param {!string} prefix * @return {function(...[*])} */ -export const logWrapper = function (prefix: string): (...var_args: any[]) => void { - return function (...var_args: any[]) { +export const logWrapper = function( + prefix: string +): (...var_args: any[]) => void { + return function(...var_args: any[]) { log(prefix, ...var_args); }; }; - /** * @param {...string} var_args */ -export const error = function (...var_args: string[]) { +export const error = function(...var_args: string[]) { if (typeof console !== 'undefined') { - const message = 'FIREBASE INTERNAL ERROR: ' + - buildLogMessage_(...var_args); + const message = 'FIREBASE INTERNAL ERROR: ' + buildLogMessage_(...var_args); if (typeof console.error !== 'undefined') { console.error(message); } else { @@ -204,20 +203,18 @@ export const error = function (...var_args: string[]) { } }; - /** * @param {...string} var_args */ -export const fatal = function (...var_args: string[]) { +export const fatal = function(...var_args: string[]) { const message = buildLogMessage_(...var_args); throw new Error('FIREBASE FATAL ERROR: ' + message); }; - /** * @param {...*} var_args */ -export const warn = function (...var_args: any[]) { +export const warn = function(...var_args: any[]) { if (typeof console !== 'undefined') { const message = 'FIREBASE WARNING: ' + buildLogMessage_(...var_args); if (typeof console.warn !== 'undefined') { @@ -228,48 +225,54 @@ export const warn = function (...var_args: any[]) { } }; - /** * Logs a warning if the containing page uses https. Called when a call to new Firebase * does not use https. */ -export const warnIfPageIsSecure = function () { +export const warnIfPageIsSecure = function() { // Be very careful accessing browser globals. Who knows what may or may not exist. - if (typeof window !== 'undefined' && window.location && window.location.protocol && - window.location.protocol.indexOf('https:') !== -1) { - warn('Insecure Firebase access from a secure page. ' + - 'Please use https in calls to new Firebase().'); + if ( + typeof window !== 'undefined' && + window.location && + window.location.protocol && + window.location.protocol.indexOf('https:') !== -1 + ) { + warn( + 'Insecure Firebase access from a secure page. ' + + 'Please use https in calls to new Firebase().' + ); } }; - /** * @param {!String} methodName */ -export const warnAboutUnsupportedMethod = function (methodName: string) { - warn(methodName + - ' is unsupported and will likely change soon. ' + - 'Please do not use.'); +export const warnAboutUnsupportedMethod = function(methodName: string) { + warn( + methodName + + ' is unsupported and will likely change soon. ' + + 'Please do not use.' + ); }; - /** * Returns true if data is NaN, or +/- Infinity. * @param {*} data * @return {boolean} */ -export const isInvalidJSONNumber = function (data: any): boolean { - return typeof data === 'number' && +export const isInvalidJSONNumber = function(data: any): boolean { + return ( + typeof data === 'number' && (data != data || // NaN - data == Number.POSITIVE_INFINITY || - data == Number.NEGATIVE_INFINITY); + data == Number.POSITIVE_INFINITY || + data == Number.NEGATIVE_INFINITY) + ); }; - /** * @param {function()} fn */ -export const executeWhenDOMReady = function (fn: () => void) { +export const executeWhenDOMReady = function(fn: () => void) { if (isNodeSdk() || document.readyState === 'complete') { fn(); } else { @@ -277,7 +280,7 @@ export const executeWhenDOMReady = function (fn: () => void) { // fire before onload), but fall back to onload. let called = false; - let wrappedFn = function () { + let wrappedFn = function() { if (!document.body) { setTimeout(wrappedFn, Math.floor(10)); return; @@ -295,12 +298,9 @@ export const executeWhenDOMReady = function (fn: () => void) { window.addEventListener('load', wrappedFn, false); } else if ((document as any).attachEvent) { // IE. - (document as any).attachEvent('onreadystatechange', - function () { - if (document.readyState === 'complete') - wrappedFn(); - } - ); + (document as any).attachEvent('onreadystatechange', function() { + if (document.readyState === 'complete') wrappedFn(); + }); // fallback to onload. (window as any).attachEvent('onload', wrappedFn); @@ -311,28 +311,25 @@ export const executeWhenDOMReady = function (fn: () => void) { } }; - /** * Minimum key name. Invalid for actual data, used as a marker to sort before any valid names * @type {!string} */ export const MIN_NAME = '[MIN_NAME]'; - /** * Maximum key name. Invalid for actual data, used as a marker to sort above any valid names * @type {!string} */ export const MAX_NAME = '[MAX_NAME]'; - /** * Compares valid Firebase key names, plus min and max name * @param {!string} a * @param {!string} b * @return {!number} */ -export const nameCompare = function (a: string, b: string): number { +export const nameCompare = function(a: string, b: string): number { if (a === b) { return 0; } else if (a === MIN_NAME || b === MAX_NAME) { @@ -345,25 +342,24 @@ export const nameCompare = function (a: string, b: string): number { if (aAsInt !== null) { if (bAsInt !== null) { - return (aAsInt - bAsInt) == 0 ? (a.length - b.length) : (aAsInt - bAsInt); + return aAsInt - bAsInt == 0 ? a.length - b.length : aAsInt - bAsInt; } else { return -1; } } else if (bAsInt !== null) { return 1; } else { - return (a < b) ? -1 : 1; + return a < b ? -1 : 1; } } }; - /** * @param {!string} a * @param {!string} b * @return {!number} comparison result. */ -export const stringCompare = function (a: string, b: string): number { +export const stringCompare = function(a: string, b: string): number { if (a === b) { return 0; } else if (a < b) { @@ -373,28 +369,30 @@ export const stringCompare = function (a: string, b: string): number { } }; - /** * @param {string} key * @param {Object} obj * @return {*} */ -export const requireKey = function (key: string, obj: { [k: string]: any }): any { - if (obj && (key in obj)) { +export const requireKey = function( + key: string, + obj: { [k: string]: any } +): any { + if (obj && key in obj) { return obj[key]; } else { - throw new Error('Missing required key (' + key + ') in object: ' + stringify(obj)); + throw new Error( + 'Missing required key (' + key + ') in object: ' + stringify(obj) + ); } }; - /** * @param {*} obj * @return {string} */ -export const ObjectToUniqueKey = function (obj: any): string { - if (typeof obj !== 'object' || obj === null) - return stringify(obj); +export const ObjectToUniqueKey = function(obj: any): string { + if (typeof obj !== 'object' || obj === null) return stringify(obj); const keys = []; for (let k in obj) { @@ -405,8 +403,7 @@ export const ObjectToUniqueKey = function (obj: any): string { keys.sort(); let key = '{'; for (let i = 0; i < keys.length; i++) { - if (i !== 0) - key += ','; + if (i !== 0) key += ','; key += stringify(keys[i]); key += ':'; key += ObjectToUniqueKey(obj[keys[i]]); @@ -416,14 +413,16 @@ export const ObjectToUniqueKey = function (obj: any): string { return key; }; - /** * Splits a string into a number of smaller segments of maximum size * @param {!string} str The string * @param {!number} segsize The maximum number of chars in the string. * @return {Array.} The string, split into appropriately-sized chunks */ -export const splitStringBySize = function (str: string, segsize: number): string[] { +export const splitStringBySize = function( + str: string, + segsize: number +): string[] { const len = str.length; if (len <= segsize) { @@ -434,22 +433,23 @@ export const splitStringBySize = function (str: string, segsize: number): string for (let c = 0; c < len; c += segsize) { if (c + segsize > len) { dataSegs.push(str.substring(c, len)); - } - else { + } else { dataSegs.push(str.substring(c, c + segsize)); } } return dataSegs; }; - /** * Apply a function to each (key, value) pair in an object or * apply a function to each (index, value) pair in an array * @param {!(Object|Array)} obj The object or array to iterate over * @param {function(?, ?)} fn The function to apply */ -export const each = function (obj: Object | Array, fn: (v?: any, k?: any) => void) { +export const each = function( + obj: Object | Array, + fn: (v?: any, k?: any) => void +) { if (Array.isArray(obj)) { for (let i = 0; i < obj.length; ++i) { fn(i, obj[i]); @@ -465,18 +465,19 @@ export const each = function (obj: Object | Array, fn: (v?: any, k?: any) = } }; - /** * Like goog.bind, but doesn't bother to create a closure if opt_context is null/undefined. * @param {function(*)} callback Callback function. * @param {?Object=} context Optional context to bind to. * @return {function(*)} */ -export const bindCallback = function (callback: (a: any) => void, context?: object | null): Function { +export const bindCallback = function( + callback: (a: any) => void, + context?: object | null +): Function { return context ? callback.bind(context) : callback; }; - /** * Borrowed from http://hg.secondlife.com/llsd/src/tip/js/typedarray.js (MIT License) * I made one modification at the end and removed the NaN / Infinity @@ -484,22 +485,27 @@ export const bindCallback = function (callback: (a: any) => void, context?: obje * @param {!number} v A double * @return {string} */ -export const doubleToIEEE754String = function (v: number): string { +export const doubleToIEEE754String = function(v: number): string { assert(!isInvalidJSONNumber(v), 'Invalid JSON number'); // MJL - const ebits = 11, fbits = 52; + const ebits = 11, + fbits = 52; let bias = (1 << (ebits - 1)) - 1, - s, e, f, ln, - i, bits, str; + s, + e, + f, + ln, + i, + bits, + str; // Compute sign, exponent, fraction // Skip NaN / Infinity handling --MJL. if (v === 0) { e = 0; f = 0; - s = (1 / v === -Infinity) ? 1 : 0; - } - else { + s = 1 / v === -Infinity ? 1 : 0; + } else { s = v < 0; v = Math.abs(v); @@ -508,8 +514,7 @@ export const doubleToIEEE754String = function (v: number): string { ln = Math.min(Math.floor(Math.log(v) / Math.LN2), bias); e = ln + bias; f = Math.round(v * Math.pow(2, fbits - ln) - Math.pow(2, fbits)); - } - else { + } else { // Denormalized e = 0; f = Math.round(v / Math.pow(2, 1 - bias - fbits)); @@ -534,61 +539,60 @@ export const doubleToIEEE754String = function (v: number): string { let hexByteString = ''; for (i = 0; i < 64; i += 8) { let hexByte = parseInt(str.substr(i, 8), 2).toString(16); - if (hexByte.length === 1) - hexByte = '0' + hexByte; + if (hexByte.length === 1) hexByte = '0' + hexByte; hexByteString = hexByteString + hexByte; } return hexByteString.toLowerCase(); }; - /** * Used to detect if we're in a Chrome content script (which executes in an * isolated environment where long-polling doesn't work). * @return {boolean} */ -export const isChromeExtensionContentScript = function (): boolean { - return !!(typeof window === 'object' && +export const isChromeExtensionContentScript = function(): boolean { + return !!( + typeof window === 'object' && window['chrome'] && window['chrome']['extension'] && !/^chrome/.test(window.location.href) ); }; - /** * Used to detect if we're in a Windows 8 Store app. * @return {boolean} */ -export const isWindowsStoreApp = function (): boolean { +export const isWindowsStoreApp = function(): boolean { // Check for the presence of a couple WinRT globals return typeof Windows === 'object' && typeof Windows.UI === 'object'; }; - /** * Converts a server error code to a Javascript Error * @param {!string} code * @param {!Query} query * @return {Error} */ -export const errorForServerCode = function (code: string, query: Query): Error { +export const errorForServerCode = function(code: string, query: Query): Error { let reason = 'Unknown Error'; if (code === 'too_big') { - reason = 'The data requested exceeds the maximum size ' + + reason = + 'The data requested exceeds the maximum size ' + 'that can be accessed with a single request.'; } else if (code == 'permission_denied') { - reason = 'Client doesn\'t have permission to access the desired data.'; + reason = "Client doesn't have permission to access the desired data."; } else if (code == 'unavailable') { reason = 'The service is unavailable'; } - const error = new Error(code + ' at ' + query.path.toString() + ': ' + reason); + const error = new Error( + code + ' at ' + query.path.toString() + ': ' + reason + ); (error as any).code = code.toUpperCase(); return error; }; - /** * Used to test for integer-looking strings * @type {RegExp} @@ -596,13 +600,12 @@ export const errorForServerCode = function (code: string, query: Query): Error { */ export const INTEGER_REGEXP_ = new RegExp('^-?\\d{1,10}$'); - /** * If the string contains a 32-bit integer, return it. Else return null. * @param {!string} str * @return {?number} */ -export const tryParseInt = function (str: string): number | null { +export const tryParseInt = function(str: string): number | null { if (INTEGER_REGEXP_.test(str)) { const intVal = Number(str); if (intVal >= -2147483648 && intVal <= 2147483647) { @@ -612,7 +615,6 @@ export const tryParseInt = function (str: string): number | null { return null; }; - /** * Helper to run some code but catch any exceptions and re-throw them later. * Useful for preventing user callbacks from breaking internal code. @@ -630,12 +632,12 @@ export const tryParseInt = function (str: string): number | null { * * @param {!function()} fn The code to guard. */ -export const exceptionGuard = function (fn: () => void) { +export const exceptionGuard = function(fn: () => void) { try { fn(); } catch (e) { // Re-throw exception when it's safe. - setTimeout(function () { + setTimeout(function() { // It used to be that "throw e" would result in a good console error with // relevant context, but as of Chrome 39, you just get the firebase.js // file/line number where we re-throw it, which is useless. So we log @@ -647,7 +649,6 @@ export const exceptionGuard = function (fn: () => void) { } }; - /** * Helper function to safely call opt_callback with the specified arguments. It: * 1. Turns into a no-op if opt_callback is null or undefined. @@ -656,27 +657,36 @@ export const exceptionGuard = function (fn: () => void) { * @param {?Function=} callback Optional onComplete callback. * @param {...*} var_args Arbitrary args to be passed to opt_onComplete */ -export const callUserCallback = function (callback?: Function | null, ...var_args: any[]) { +export const callUserCallback = function( + callback?: Function | null, + ...var_args: any[] +) { if (typeof callback === 'function') { - exceptionGuard(function () { + exceptionGuard(function() { callback(...var_args); }); } }; - /** * @return {boolean} true if we think we're currently being crawled. */ -export const beingCrawled = function (): boolean { - const userAgent = (typeof window === 'object' && window['navigator'] && window['navigator']['userAgent']) || ''; +export const beingCrawled = function(): boolean { + const userAgent = + (typeof window === 'object' && + window['navigator'] && + window['navigator']['userAgent']) || + ''; // For now we whitelist the most popular crawlers. We should refine this to be the set of crawlers we // believe to support JavaScript/AJAX rendering. // NOTE: Google Webmaster Tools doesn't really belong, but their "This is how a visitor to your website // would have seen the page" is flaky if we don't treat it as a crawler. - return userAgent.search(/googlebot|google webmaster tools|bingbot|yahoo! slurp|baiduspider|yandexbot|duckduckbot/i) >= - 0; + return ( + userAgent.search( + /googlebot|google webmaster tools|bingbot|yahoo! slurp|baiduspider|yandexbot|duckduckbot/i + ) >= 0 + ); }; /** @@ -686,8 +696,12 @@ export const beingCrawled = function (): boolean { * @param {string} name * @param {!function(): *} fnGet */ -export const exportPropGetter = function (object: Object, name: string, fnGet: () => any) { - Object.defineProperty(object, name, {get: fnGet}); +export const exportPropGetter = function( + object: Object, + name: string, + fnGet: () => any +) { + Object.defineProperty(object, name, { get: fnGet }); }; /** @@ -699,7 +713,10 @@ export const exportPropGetter = function (object: Object, name: string, fnGet: ( * @param {number} time Milliseconds to wait before running. * @return {number|Object} The setTimeout() return value. */ -export const setTimeoutNonBlocking = function (fn: Function, time: number): number | Object { +export const setTimeoutNonBlocking = function( + fn: Function, + time: number +): number | Object { const timeout: number | Object = setTimeout(fn, time); if (typeof timeout === 'object' && (timeout as any)['unref']) { (timeout as any)['unref'](); diff --git a/src/database/core/util/validation.ts b/src/database/core/util/validation.ts index 9134158e650..f3362da8575 100644 --- a/src/database/core/util/validation.ts +++ b/src/database/core/util/validation.ts @@ -43,30 +43,33 @@ export const INVALID_PATH_REGEX_ = /[\[\].#$\u0000-\u001F\u007F]/; */ export const MAX_LEAF_SIZE_ = 10 * 1024 * 1024; - /** * @param {*} key * @return {boolean} */ -export const isValidKey = function (key: any): boolean { - return typeof key === 'string' && key.length !== 0 && - !INVALID_KEY_REGEX_.test(key); +export const isValidKey = function(key: any): boolean { + return ( + typeof key === 'string' && key.length !== 0 && !INVALID_KEY_REGEX_.test(key) + ); }; /** * @param {string} pathString * @return {boolean} */ -export const isValidPathString = function (pathString: string): boolean { - return typeof pathString === 'string' && pathString.length !== 0 && - !INVALID_PATH_REGEX_.test(pathString); +export const isValidPathString = function(pathString: string): boolean { + return ( + typeof pathString === 'string' && + pathString.length !== 0 && + !INVALID_PATH_REGEX_.test(pathString) + ); }; /** * @param {string} pathString * @return {boolean} */ -export const isValidRootPathString = function (pathString: string): boolean { +export const isValidRootPathString = function(pathString: string): boolean { if (pathString) { // Allow '/.info/' at the beginning. pathString = pathString.replace(/^\/*\.info(\/|$)/, '/'); @@ -79,11 +82,13 @@ export const isValidRootPathString = function (pathString: string): boolean { * @param {*} priority * @return {boolean} */ -export const isValidPriority = function (priority: any): boolean { - return priority === null || +export const isValidPriority = function(priority: any): boolean { + return ( + priority === null || typeof priority === 'string' || (typeof priority === 'number' && !isInvalidJSONNumber(priority)) || - ((priority && typeof priority === 'object') && contains(priority, '.sv')); + (priority && typeof priority === 'object' && contains(priority, '.sv')) + ); }; /** @@ -95,14 +100,19 @@ export const isValidPriority = function (priority: any): boolean { * @param {!Path} path * @param {boolean} optional */ -export const validateFirebaseDataArg = function (fnName: string, argumentNumber: number, - data: any, path: Path, optional: boolean) { - if (optional && data === undefined) - return; +export const validateFirebaseDataArg = function( + fnName: string, + argumentNumber: number, + data: any, + path: Path, + optional: boolean +) { + if (optional && data === undefined) return; validateFirebaseData( errorPrefixFxn(fnName, argumentNumber, optional), - data, path + data, + path ); }; @@ -113,47 +123,70 @@ export const validateFirebaseDataArg = function (fnName: string, argumentNumber: * @param {*} data * @param {!Path|!ValidationPath} path_ */ -export const validateFirebaseData = function (errorPrefix: string, data: any, path_: Path | ValidationPath) { - const path = path_ instanceof Path - ? new ValidationPath(path_, errorPrefix) - : path_; +export const validateFirebaseData = function( + errorPrefix: string, + data: any, + path_: Path | ValidationPath +) { + const path = + path_ instanceof Path ? new ValidationPath(path_, errorPrefix) : path_; if (data === undefined) { throw new Error(errorPrefix + 'contains undefined ' + path.toErrorString()); } if (typeof data === 'function') { - throw new Error(errorPrefix + 'contains a function ' + path.toErrorString() + - ' with contents = ' + data.toString()); + throw new Error( + errorPrefix + + 'contains a function ' + + path.toErrorString() + + ' with contents = ' + + data.toString() + ); } if (isInvalidJSONNumber(data)) { - throw new Error(errorPrefix + 'contains ' + data.toString() + ' ' + path.toErrorString()); + throw new Error( + errorPrefix + 'contains ' + data.toString() + ' ' + path.toErrorString() + ); } // Check max leaf size, but try to avoid the utf8 conversion if we can. - if (typeof data === 'string' && + if ( + typeof data === 'string' && data.length > MAX_LEAF_SIZE_ / 3 && - stringLength(data) > MAX_LEAF_SIZE_) { - throw new Error(errorPrefix + 'contains a string greater than ' + - MAX_LEAF_SIZE_ + - ' utf8 bytes ' + path.toErrorString() + - ' (\'' + data.substring(0, 50) + '...\')'); + stringLength(data) > MAX_LEAF_SIZE_ + ) { + throw new Error( + errorPrefix + + 'contains a string greater than ' + + MAX_LEAF_SIZE_ + + ' utf8 bytes ' + + path.toErrorString() + + " ('" + + data.substring(0, 50) + + "...')" + ); } // TODO = Perf = Consider combining the recursive validation of keys into NodeFromJSON // to save extra walking of large objects. - if ((data && typeof data === 'object')) { - let hasDotValue = false, hasActualChild = false; - forEach(data, function (key: string, value: any) { + if (data && typeof data === 'object') { + let hasDotValue = false, + hasActualChild = false; + forEach(data, function(key: string, value: any) { if (key === '.value') { hasDotValue = true; - } - else if (key !== '.priority' && key !== '.sv') { + } else if (key !== '.priority' && key !== '.sv') { hasActualChild = true; if (!isValidKey(key)) { - throw new Error(errorPrefix + ' contains an invalid key (' + key + ') ' + - path.toErrorString() + - '. Keys must be non-empty strings ' + - 'and can\'t contain ".", "#", "$", "/", "[", or "]"'); + throw new Error( + errorPrefix + + ' contains an invalid key (' + + key + + ') ' + + path.toErrorString() + + '. Keys must be non-empty strings ' + + 'and can\'t contain ".", "#", "$", "/", "[", or "]"' + ); } } @@ -163,9 +196,12 @@ export const validateFirebaseData = function (errorPrefix: string, data: any, pa }); if (hasDotValue && hasActualChild) { - throw new Error(errorPrefix + ' contains ".value" child ' + - path.toErrorString() + - ' in addition to actual children.'); + throw new Error( + errorPrefix + + ' contains ".value" child ' + + path.toErrorString() + + ' in addition to actual children.' + ); } } }; @@ -176,19 +212,27 @@ export const validateFirebaseData = function (errorPrefix: string, data: any, pa * @param {string} errorPrefix * @param {Array} mergePaths */ -export const validateFirebaseMergePaths = function (errorPrefix: string, mergePaths: Path[]) { +export const validateFirebaseMergePaths = function( + errorPrefix: string, + mergePaths: Path[] +) { let i, curPath; for (i = 0; i < mergePaths.length; i++) { curPath = mergePaths[i]; const keys = curPath.slice(); for (let j = 0; j < keys.length; j++) { - if (keys[j] === '.priority' && j === (keys.length - 1)) { + if (keys[j] === '.priority' && j === keys.length - 1) { // .priority is OK } else if (!isValidKey(keys[j])) { - throw new Error(errorPrefix + 'contains an invalid key (' + keys[j] + ') in path ' + - curPath.toString() + - '. Keys must be non-empty strings ' + - 'and can\'t contain ".", "#", "$", "/", "[", or "]"'); + throw new Error( + errorPrefix + + 'contains an invalid key (' + + keys[j] + + ') in path ' + + curPath.toString() + + '. Keys must be non-empty strings ' + + 'and can\'t contain ".", "#", "$", "/", "[", or "]"' + ); } } } @@ -201,8 +245,13 @@ export const validateFirebaseMergePaths = function (errorPrefix: string, mergePa for (i = 0; i < mergePaths.length; i++) { curPath = mergePaths[i]; if (prevPath !== null && prevPath.contains(curPath)) { - throw new Error(errorPrefix + 'contains a path ' + prevPath.toString() + - ' that is ancestor of another path ' + curPath.toString()); + throw new Error( + errorPrefix + + 'contains a path ' + + prevPath.toString() + + ' that is ancestor of another path ' + + curPath.toString() + ); } prevPath = curPath; } @@ -218,26 +267,36 @@ export const validateFirebaseMergePaths = function (errorPrefix: string, mergePa * @param {!Path} path * @param {boolean} optional */ -export const validateFirebaseMergeDataArg = function (fnName: string, argumentNumber: number, - data: any, path: Path, optional: boolean) { - if (optional && data === undefined) - return; +export const validateFirebaseMergeDataArg = function( + fnName: string, + argumentNumber: number, + data: any, + path: Path, + optional: boolean +) { + if (optional && data === undefined) return; const errorPrefix = errorPrefixFxn(fnName, argumentNumber, optional); if (!(data && typeof data === 'object') || Array.isArray(data)) { - throw new Error(errorPrefix + ' must be an object containing the children to replace.'); + throw new Error( + errorPrefix + ' must be an object containing the children to replace.' + ); } const mergePaths: Path[] = []; - forEach(data, function (key: string, value: any) { + forEach(data, function(key: string, value: any) { const curPath = new Path(key); validateFirebaseData(errorPrefix, value, path.child(curPath)); if (curPath.getBack() === '.priority') { if (!isValidPriority(value)) { throw new Error( - errorPrefix + 'contains an invalid value for \'' + curPath.toString() + '\', which must be a valid ' + - 'Firebase priority (a string, finite number, server value, or null).'); + errorPrefix + + "contains an invalid value for '" + + curPath.toString() + + "', which must be a valid " + + 'Firebase priority (a string, finite number, server value, or null).' + ); } } mergePaths.push(curPath); @@ -245,27 +304,37 @@ export const validateFirebaseMergeDataArg = function (fnName: string, argumentNu validateFirebaseMergePaths(errorPrefix, mergePaths); }; -export const validatePriority = function (fnName: string, argumentNumber: number, priority: any, optional: boolean) { - if (optional && priority === undefined) - return; +export const validatePriority = function( + fnName: string, + argumentNumber: number, + priority: any, + optional: boolean +) { + if (optional && priority === undefined) return; if (isInvalidJSONNumber(priority)) throw new Error( errorPrefixFxn(fnName, argumentNumber, optional) + - 'is ' + priority.toString() + - ', but must be a valid Firebase priority (a string, finite number, ' + - 'server value, or null).'); + 'is ' + + priority.toString() + + ', but must be a valid Firebase priority (a string, finite number, ' + + 'server value, or null).' + ); // Special case to allow importing data with a .sv. if (!isValidPriority(priority)) throw new Error( errorPrefixFxn(fnName, argumentNumber, optional) + - 'must be a valid Firebase priority ' + - '(a string, finite number, server value, or null).'); + 'must be a valid Firebase priority ' + + '(a string, finite number, server value, or null).' + ); }; -export const validateEventType = function (fnName: string, argumentNumber: number, - eventType: string, optional: boolean) { - if (optional && eventType === undefined) - return; +export const validateEventType = function( + fnName: string, + argumentNumber: number, + eventType: string, + optional: boolean +) { + if (optional && eventType === undefined) return; switch (eventType) { case 'value': @@ -277,37 +346,53 @@ export const validateEventType = function (fnName: string, argumentNumber: numbe default: throw new Error( errorPrefixFxn(fnName, argumentNumber, optional) + - 'must be a valid event type = "value", "child_added", "child_removed", ' + - '"child_changed", or "child_moved".'); + 'must be a valid event type = "value", "child_added", "child_removed", ' + + '"child_changed", or "child_moved".' + ); } }; -export const validateKey = function (fnName: string, argumentNumber: number, - key: string, optional: boolean) { - if (optional && key === undefined) - return; +export const validateKey = function( + fnName: string, + argumentNumber: number, + key: string, + optional: boolean +) { + if (optional && key === undefined) return; if (!isValidKey(key)) - throw new Error(errorPrefixFxn(fnName, argumentNumber, optional) + - 'was an invalid key = "' + key + - '". Firebase keys must be non-empty strings and ' + - 'can\'t contain ".", "#", "$", "/", "[", or "]").'); + throw new Error( + errorPrefixFxn(fnName, argumentNumber, optional) + + 'was an invalid key = "' + + key + + '". Firebase keys must be non-empty strings and ' + + 'can\'t contain ".", "#", "$", "/", "[", or "]").' + ); }; -export const validatePathString = function (fnName: string, argumentNumber: number, - pathString: string, optional: boolean) { - if (optional && pathString === undefined) - return; +export const validatePathString = function( + fnName: string, + argumentNumber: number, + pathString: string, + optional: boolean +) { + if (optional && pathString === undefined) return; if (!isValidPathString(pathString)) - throw new Error(errorPrefixFxn(fnName, argumentNumber, optional) + - 'was an invalid path = "' + - pathString + - '". Paths must be non-empty strings and ' + - 'can\'t contain ".", "#", "$", "[", or "]"'); + throw new Error( + errorPrefixFxn(fnName, argumentNumber, optional) + + 'was an invalid path = "' + + pathString + + '". Paths must be non-empty strings and ' + + 'can\'t contain ".", "#", "$", "[", or "]"' + ); }; -export const validateRootPathString = function (fnName: string, argumentNumber: number, - pathString: string, optional: boolean) { +export const validateRootPathString = function( + fnName: string, + argumentNumber: number, + pathString: string, + optional: boolean +) { if (pathString) { // Allow '/.info/' at the beginning. pathString = pathString.replace(/^\/*\.info(\/|$)/, '/'); @@ -316,64 +401,100 @@ export const validateRootPathString = function (fnName: string, argumentNumber: validatePathString(fnName, argumentNumber, pathString, optional); }; -export const validateWritablePath = function (fnName: string, path: Path) { +export const validateWritablePath = function(fnName: string, path: Path) { if (path.getFront() === '.info') { - throw new Error(fnName + ' failed = Can\'t modify data under /.info/'); + throw new Error(fnName + " failed = Can't modify data under /.info/"); } }; -export const validateUrl = function (fnName: string, argumentNumber: number, - parsedUrl: { repoInfo: RepoInfo, path: Path }) { +export const validateUrl = function( + fnName: string, + argumentNumber: number, + parsedUrl: { repoInfo: RepoInfo; path: Path } +) { // TODO = Validate server better. const pathString = parsedUrl.path.toString(); - if (!(typeof parsedUrl.repoInfo.host === 'string') || parsedUrl.repoInfo.host.length === 0 || + if ( + !(typeof parsedUrl.repoInfo.host === 'string') || + parsedUrl.repoInfo.host.length === 0 || !isValidKey(parsedUrl.repoInfo.namespace) || - (pathString.length !== 0 && !isValidRootPathString(pathString))) { - throw new Error(errorPrefixFxn(fnName, argumentNumber, false) + - 'must be a valid firebase URL and ' + - 'the path can\'t contain ".", "#", "$", "[", or "]".'); + (pathString.length !== 0 && !isValidRootPathString(pathString)) + ) { + throw new Error( + errorPrefixFxn(fnName, argumentNumber, false) + + 'must be a valid firebase URL and ' + + 'the path can\'t contain ".", "#", "$", "[", or "]".' + ); } }; -export const validateCredential = function (fnName: string, argumentNumber: number, cred: any, optional: boolean) { - if (optional && cred === undefined) - return; +export const validateCredential = function( + fnName: string, + argumentNumber: number, + cred: any, + optional: boolean +) { + if (optional && cred === undefined) return; if (!(typeof cred === 'string')) throw new Error( errorPrefixFxn(fnName, argumentNumber, optional) + - 'must be a valid credential (a string).'); + 'must be a valid credential (a string).' + ); }; -export const validateBoolean = function (fnName: string, argumentNumber: number, bool: any, optional: boolean) { - if (optional && bool === undefined) - return; +export const validateBoolean = function( + fnName: string, + argumentNumber: number, + bool: any, + optional: boolean +) { + if (optional && bool === undefined) return; if (typeof bool !== 'boolean') - throw new Error(errorPrefixFxn(fnName, argumentNumber, optional) + - 'must be a boolean.'); + throw new Error( + errorPrefixFxn(fnName, argumentNumber, optional) + 'must be a boolean.' + ); }; -export const validateString = function (fnName: string, argumentNumber: number, string: any, optional: boolean) { - if (optional && string === undefined) - return; +export const validateString = function( + fnName: string, + argumentNumber: number, + string: any, + optional: boolean +) { + if (optional && string === undefined) return; if (!(typeof string === 'string')) { throw new Error( errorPrefixFxn(fnName, argumentNumber, optional) + - 'must be a valid string.'); + 'must be a valid string.' + ); } }; -export const validateObject = function (fnName: string, argumentNumber: number, obj: any, optional: boolean) { - if (optional && obj === undefined) - return; +export const validateObject = function( + fnName: string, + argumentNumber: number, + obj: any, + optional: boolean +) { + if (optional && obj === undefined) return; if (!(obj && typeof obj === 'object') || obj === null) { throw new Error( errorPrefixFxn(fnName, argumentNumber, optional) + - 'must be a valid object.'); + 'must be a valid object.' + ); } }; -export const validateObjectContainsKey = function (fnName: string, argumentNumber: number, obj: any, key: string, optional: boolean, opt_type?: string) { - const objectContainsKey = ((obj && typeof obj === 'object') && contains(obj, key)); +export const validateObjectContainsKey = function( + fnName: string, + argumentNumber: number, + obj: any, + key: string, + optional: boolean, + opt_type?: string +) { + const objectContainsKey = + obj && typeof obj === 'object' && contains(obj, key); if (!objectContainsKey) { if (optional) { @@ -381,23 +502,40 @@ export const validateObjectContainsKey = function (fnName: string, argumentNumbe } else { throw new Error( errorPrefixFxn(fnName, argumentNumber, optional) + - 'must contain the key "' + key + '"'); + 'must contain the key "' + + key + + '"' + ); } } if (opt_type) { const val = safeGet(obj, key); - if ((opt_type === 'number' && !(typeof val === 'number')) || + if ( + (opt_type === 'number' && !(typeof val === 'number')) || (opt_type === 'string' && !(typeof val === 'string')) || (opt_type === 'boolean' && !(typeof val === 'boolean')) || (opt_type === 'function' && !(typeof val === 'function')) || - (opt_type === 'object' && !(typeof val === 'object') && val)) { + (opt_type === 'object' && !(typeof val === 'object') && val) + ) { if (optional) { - throw new Error(errorPrefixFxn(fnName, argumentNumber, optional) + - 'contains invalid value for key "' + key + '" (must be of type "' + opt_type + '")'); + throw new Error( + errorPrefixFxn(fnName, argumentNumber, optional) + + 'contains invalid value for key "' + + key + + '" (must be of type "' + + opt_type + + '")' + ); } else { - throw new Error(errorPrefixFxn(fnName, argumentNumber, optional) + - 'must contain the key "' + key + '" with type "' + opt_type + '"'); + throw new Error( + errorPrefixFxn(fnName, argumentNumber, optional) + + 'must contain the key "' + + key + + '" with type "' + + opt_type + + '"' + ); } } } diff --git a/src/database/core/view/CacheNode.ts b/src/database/core/view/CacheNode.ts index b928031daa8..08775a63a3e 100644 --- a/src/database/core/view/CacheNode.ts +++ b/src/database/core/view/CacheNode.ts @@ -29,11 +29,11 @@ export class CacheNode { * @param {boolean} fullyInitialized_ * @param {boolean} filtered_ */ - constructor(private node_: Node, - private fullyInitialized_: boolean, - private filtered_: boolean) { - - } + constructor( + private node_: Node, + private fullyInitialized_: boolean, + private filtered_: boolean + ) {} /** * Returns whether this node was fully initialized with either server data or a complete overwrite by the client @@ -69,7 +69,9 @@ export class CacheNode { * @return {boolean} */ isCompleteForChild(key: string): boolean { - return (this.isFullyInitialized() && !this.filtered_) || this.node_.hasChild(key); + return ( + (this.isFullyInitialized() && !this.filtered_) || this.node_.hasChild(key) + ); } /** @@ -78,5 +80,4 @@ export class CacheNode { getNode(): Node { return this.node_; } - } diff --git a/src/database/core/view/Change.ts b/src/database/core/view/Change.ts index 3beb7e28300..77e320800d0 100644 --- a/src/database/core/view/Change.ts +++ b/src/database/core/view/Change.ts @@ -26,12 +26,13 @@ import { Node } from '../snap/Node'; * @param {string=} prevName The name for the previous child, if applicable */ export class Change { - constructor(public type: string, - public snapshotNode: Node, - public childName?: string, - public oldSnap?: Node, - public prevName?: string) { - }; + constructor( + public type: string, + public snapshotNode: Node, + public childName?: string, + public oldSnap?: Node, + public prevName?: string + ) {} /** * @param {!Node} snapshot @@ -39,7 +40,7 @@ export class Change { */ static valueChange(snapshot: Node): Change { return new Change(Change.VALUE, snapshot); - }; + } /** * @param {string} childKey @@ -48,7 +49,7 @@ export class Change { */ static childAddedChange(childKey: string, snapshot: Node): Change { return new Change(Change.CHILD_ADDED, snapshot, childKey); - }; + } /** * @param {string} childKey @@ -57,7 +58,7 @@ export class Change { */ static childRemovedChange(childKey: string, snapshot: Node): Change { return new Change(Change.CHILD_REMOVED, snapshot, childKey); - }; + } /** * @param {string} childKey @@ -65,9 +66,13 @@ export class Change { * @param {!Node} oldSnapshot * @return {!Change} */ - static childChangedChange(childKey: string, newSnapshot: Node, oldSnapshot: Node): Change { + static childChangedChange( + childKey: string, + newSnapshot: Node, + oldSnapshot: Node + ): Change { return new Change(Change.CHILD_CHANGED, newSnapshot, childKey, oldSnapshot); - }; + } /** * @param {string} childKey @@ -76,7 +81,7 @@ export class Change { */ static childMovedChange(childKey: string, snapshot: Node): Change { return new Change(Change.CHILD_MOVED, snapshot, childKey); - }; + } //event types /** Event type for a child added */ @@ -94,4 +99,3 @@ export class Change { /** Event type for a value change */ static VALUE = 'value'; } - diff --git a/src/database/core/view/ChildChangeAccumulator.ts b/src/database/core/view/ChildChangeAccumulator.ts index 43238489069..0e2c91f581a 100644 --- a/src/database/core/view/ChildChangeAccumulator.ts +++ b/src/database/core/view/ChildChangeAccumulator.ts @@ -29,33 +29,69 @@ export class ChildChangeAccumulator { */ trackChildChange(change: Change) { const type = change.type; - const childKey = /** @type {!string} */ (change.childName); - assert(type == Change.CHILD_ADDED || - type == Change.CHILD_CHANGED || - type == Change.CHILD_REMOVED, 'Only child changes supported for tracking'); - assert(childKey !== '.priority', 'Only non-priority child changes can be tracked.'); + const childKey /** @type {!string} */ = change.childName; + assert( + type == Change.CHILD_ADDED || + type == Change.CHILD_CHANGED || + type == Change.CHILD_REMOVED, + 'Only child changes supported for tracking' + ); + assert( + childKey !== '.priority', + 'Only non-priority child changes can be tracked.' + ); const oldChange = safeGet(this.changeMap_, childKey) as Change; if (oldChange) { const oldType = oldChange.type; if (type == Change.CHILD_ADDED && oldType == Change.CHILD_REMOVED) { - this.changeMap_[childKey] = Change.childChangedChange(childKey, change.snapshotNode, oldChange.snapshotNode); - } else if (type == Change.CHILD_REMOVED && oldType == Change.CHILD_ADDED) { + this.changeMap_[childKey] = Change.childChangedChange( + childKey, + change.snapshotNode, + oldChange.snapshotNode + ); + } else if ( + type == Change.CHILD_REMOVED && + oldType == Change.CHILD_ADDED + ) { delete this.changeMap_[childKey]; - } else if (type == Change.CHILD_REMOVED && oldType == Change.CHILD_CHANGED) { - this.changeMap_[childKey] = Change.childRemovedChange(childKey, oldChange.oldSnap); - } else if (type == Change.CHILD_CHANGED && oldType == Change.CHILD_ADDED) { - this.changeMap_[childKey] = Change.childAddedChange(childKey, change.snapshotNode); - } else if (type == Change.CHILD_CHANGED && oldType == Change.CHILD_CHANGED) { - this.changeMap_[childKey] = Change.childChangedChange(childKey, change.snapshotNode, oldChange.oldSnap); + } else if ( + type == Change.CHILD_REMOVED && + oldType == Change.CHILD_CHANGED + ) { + this.changeMap_[childKey] = Change.childRemovedChange( + childKey, + oldChange.oldSnap + ); + } else if ( + type == Change.CHILD_CHANGED && + oldType == Change.CHILD_ADDED + ) { + this.changeMap_[childKey] = Change.childAddedChange( + childKey, + change.snapshotNode + ); + } else if ( + type == Change.CHILD_CHANGED && + oldType == Change.CHILD_CHANGED + ) { + this.changeMap_[childKey] = Change.childChangedChange( + childKey, + change.snapshotNode, + oldChange.oldSnap + ); } else { - throw assertionError('Illegal combination of changes: ' + change + ' occurred after ' + oldChange); + throw assertionError( + 'Illegal combination of changes: ' + + change + + ' occurred after ' + + oldChange + ); } } else { this.changeMap_[childKey] = change; } } - /** * @return {!Array.} */ @@ -63,5 +99,3 @@ export class ChildChangeAccumulator { return getValues(this.changeMap_); } } - - diff --git a/src/database/core/view/CompleteChildSource.ts b/src/database/core/view/CompleteChildSource.ts index a734c7718ae..601ca3512dc 100644 --- a/src/database/core/view/CompleteChildSource.ts +++ b/src/database/core/view/CompleteChildSource.ts @@ -41,10 +41,13 @@ export interface CompleteChildSource { * @param {boolean} reverse * @return {?NamedNode} */ - getChildAfterChild(index: Index, child: NamedNode, reverse: boolean): NamedNode | null; + getChildAfterChild( + index: Index, + child: NamedNode, + reverse: boolean + ): NamedNode | null; } - /** * An implementation of CompleteChildSource that never returns any additional children * @@ -53,7 +56,6 @@ export interface CompleteChildSource { * @implements CompleteChildSource */ export class NoCompleteChildSource_ implements CompleteChildSource { - /** * @inheritDoc */ @@ -64,12 +66,15 @@ export class NoCompleteChildSource_ implements CompleteChildSource { /** * @inheritDoc */ - getChildAfterChild(index?: Index, child?: NamedNode, reverse?: boolean): NamedNode | null { + getChildAfterChild( + index?: Index, + child?: NamedNode, + reverse?: boolean + ): NamedNode | null { return null; } } - /** * Singleton instance. * @const @@ -77,7 +82,6 @@ export class NoCompleteChildSource_ implements CompleteChildSource { */ export const NO_COMPLETE_CHILD_SOURCE = new NoCompleteChildSource_(); - /** * An implementation of CompleteChildSource that uses a WriteTree in addition to any other server data or * old event caches available to calculate complete children. @@ -91,10 +95,11 @@ export class WriteTreeCompleteChildSource implements CompleteChildSource { * @param {!ViewCache} viewCache_ * @param {?Node} optCompleteServerCache_ */ - constructor(private writes_: WriteTreeRef, - private viewCache_: ViewCache, - private optCompleteServerCache_: Node | null = null) { - } + constructor( + private writes_: WriteTreeRef, + private viewCache_: ViewCache, + private optCompleteServerCache_: Node | null = null + ) {} /** * @inheritDoc @@ -104,8 +109,10 @@ export class WriteTreeCompleteChildSource implements CompleteChildSource { if (node.isCompleteForChild(childKey)) { return node.getNode().getImmediateChild(childKey); } else { - const serverNode = this.optCompleteServerCache_ != null ? - new CacheNode(this.optCompleteServerCache_, true, false) : this.viewCache_.getServerCache(); + const serverNode = + this.optCompleteServerCache_ != null + ? new CacheNode(this.optCompleteServerCache_, true, false) + : this.viewCache_.getServerCache(); return this.writes_.calcCompleteChild(childKey, serverNode); } } @@ -113,10 +120,22 @@ export class WriteTreeCompleteChildSource implements CompleteChildSource { /** * @inheritDoc */ - getChildAfterChild(index: Index, child: NamedNode, reverse: boolean): NamedNode | null { - const completeServerData = this.optCompleteServerCache_ != null ? this.optCompleteServerCache_ : - this.viewCache_.getCompleteServerSnap(); - const nodes = this.writes_.calcIndexedSlice(completeServerData, child, 1, reverse, index); + getChildAfterChild( + index: Index, + child: NamedNode, + reverse: boolean + ): NamedNode | null { + const completeServerData = + this.optCompleteServerCache_ != null + ? this.optCompleteServerCache_ + : this.viewCache_.getCompleteServerSnap(); + const nodes = this.writes_.calcIndexedSlice( + completeServerData, + child, + 1, + reverse, + index + ); if (nodes.length === 0) { return null; } else { diff --git a/src/database/core/view/Event.ts b/src/database/core/view/Event.ts index b9be0a721b2..43f21ad7603 100644 --- a/src/database/core/view/Event.ts +++ b/src/database/core/view/Event.ts @@ -45,7 +45,6 @@ export interface Event { toString(): string; } - /** * Encapsulates the data needed to raise an event * @implements {Event} @@ -57,11 +56,17 @@ export class DataEvent implements Event { * @param {!DataSnapshot} snapshot The data backing the event * @param {?string=} prevName Optional, the name of the previous child for child_* events. */ - constructor(public eventType: 'value' | ' child_added' | ' child_changed' | ' child_moved' | ' child_removed', - public eventRegistration: EventRegistration, - public snapshot: DataSnapshot, - public prevName?: string | null) { - } + constructor( + public eventType: + | 'value' + | ' child_added' + | ' child_changed' + | ' child_moved' + | ' child_removed', + public eventRegistration: EventRegistration, + public snapshot: DataSnapshot, + public prevName?: string | null + ) {} /** * @inheritDoc @@ -93,22 +98,27 @@ export class DataEvent implements Event { * @inheritDoc */ toString(): string { - return this.getPath().toString() + ':' + this.eventType + ':' + - stringify(this.snapshot.exportVal()); + return ( + this.getPath().toString() + + ':' + + this.eventType + + ':' + + stringify(this.snapshot.exportVal()) + ); } } - export class CancelEvent implements Event { /** * @param {EventRegistration} eventRegistration * @param {Error} error * @param {!Path} path */ - constructor(public eventRegistration: EventRegistration, - public error: Error, - public path: Path) { - } + constructor( + public eventRegistration: EventRegistration, + public error: Error, + public path: Path + ) {} /** * @inheritDoc diff --git a/src/database/core/view/EventGenerator.ts b/src/database/core/view/EventGenerator.ts index f05e1395f2a..1f89a349f21 100644 --- a/src/database/core/view/EventGenerator.ts +++ b/src/database/core/view/EventGenerator.ts @@ -58,22 +58,66 @@ export class EventGenerator { * @param {!Array.} eventRegistrations * @return {!Array.} */ - generateEventsForChanges(changes: Change[], eventCache: Node, eventRegistrations: EventRegistration[]): Event[] { + generateEventsForChanges( + changes: Change[], + eventCache: Node, + eventRegistrations: EventRegistration[] + ): Event[] { const events: Event[] = []; const moves: Change[] = []; - changes.forEach((change) => { - if (change.type === Change.CHILD_CHANGED && - this.index_.indexedValueChanged(change.oldSnap as Node, change.snapshotNode)) { - moves.push(Change.childMovedChange(change.childName as string, change.snapshotNode)); + changes.forEach(change => { + if ( + change.type === Change.CHILD_CHANGED && + this.index_.indexedValueChanged( + change.oldSnap as Node, + change.snapshotNode + ) + ) { + moves.push( + Change.childMovedChange( + change.childName as string, + change.snapshotNode + ) + ); } }); - this.generateEventsForType_(events, Change.CHILD_REMOVED, changes, eventRegistrations, eventCache); - this.generateEventsForType_(events, Change.CHILD_ADDED, changes, eventRegistrations, eventCache); - this.generateEventsForType_(events, Change.CHILD_MOVED, moves, eventRegistrations, eventCache); - this.generateEventsForType_(events, Change.CHILD_CHANGED, changes, eventRegistrations, eventCache); - this.generateEventsForType_(events, Change.VALUE, changes, eventRegistrations, eventCache); + this.generateEventsForType_( + events, + Change.CHILD_REMOVED, + changes, + eventRegistrations, + eventCache + ); + this.generateEventsForType_( + events, + Change.CHILD_ADDED, + changes, + eventRegistrations, + eventCache + ); + this.generateEventsForType_( + events, + Change.CHILD_MOVED, + moves, + eventRegistrations, + eventCache + ); + this.generateEventsForType_( + events, + Change.CHILD_CHANGED, + changes, + eventRegistrations, + eventCache + ); + this.generateEventsForType_( + events, + Change.VALUE, + changes, + eventRegistrations, + eventCache + ); return events; } @@ -88,16 +132,26 @@ export class EventGenerator { * @param {!Node} eventCache * @private */ - private generateEventsForType_(events: Event[], eventType: string, changes: Change[], - registrations: EventRegistration[], eventCache: Node) { - const filteredChanges = changes.filter((change) => change.type === eventType); + private generateEventsForType_( + events: Event[], + eventType: string, + changes: Change[], + registrations: EventRegistration[], + eventCache: Node + ) { + const filteredChanges = changes.filter(change => change.type === eventType); filteredChanges.sort(this.compareChanges_.bind(this)); - filteredChanges.forEach((change) => { - const materializedChange = this.materializeSingleChange_(change, eventCache); - registrations.forEach((registration) => { + filteredChanges.forEach(change => { + const materializedChange = this.materializeSingleChange_( + change, + eventCache + ); + registrations.forEach(registration => { if (registration.respondsTo(change.type)) { - events.push(registration.createEvent(materializedChange, this.query_)); + events.push( + registration.createEvent(materializedChange, this.query_) + ); } }); }); @@ -113,8 +167,12 @@ export class EventGenerator { if (change.type === 'value' || change.type === 'child_removed') { return change; } else { - change.prevName = eventCache.getPredecessorChildName(/** @type {!string} */ (change.childName), change.snapshotNode, - this.index_); + change.prevName = eventCache.getPredecessorChildName( + /** @type {!string} */ + change.childName, + change.snapshotNode, + this.index_ + ); return change; } } diff --git a/src/database/core/view/EventQueue.ts b/src/database/core/view/EventQueue.ts index 20c4e7af337..c7f13e0df8f 100644 --- a/src/database/core/view/EventQueue.ts +++ b/src/database/core/view/EventQueue.ts @@ -46,7 +46,6 @@ export class EventQueue { */ private recursionDepth_ = 0; - /** * @param {!Array.} eventDataList The new events to queue. */ @@ -83,7 +82,9 @@ export class EventQueue { */ raiseEventsAtPath(path: Path, eventDataList: Event[]) { this.queueEvents(eventDataList); - this.raiseQueuedEventsMatchingPredicate_((eventPath: Path) => eventPath.equals(path)); + this.raiseQueuedEventsMatchingPredicate_((eventPath: Path) => + eventPath.equals(path) + ); } /** @@ -101,13 +102,15 @@ export class EventQueue { this.raiseQueuedEventsMatchingPredicate_((eventPath: Path) => { return eventPath.contains(changedPath) || changedPath.contains(eventPath); }); - }; + } /** * @param {!function(!Path):boolean} predicate * @private */ - private raiseQueuedEventsMatchingPredicate_(predicate: (path: Path) => boolean) { + private raiseQueuedEventsMatchingPredicate_( + predicate: (path: Path) => boolean + ) { this.recursionDepth_++; let sentAll = true; @@ -132,7 +135,6 @@ export class EventQueue { } } - /** * @param {!Path} path * @constructor @@ -144,8 +146,7 @@ export class EventList { */ private events_: Event[] = []; - constructor(private readonly path_: Path) { - } + constructor(private readonly path_: Path) {} /** * @param {!Event} eventData @@ -178,4 +179,3 @@ export class EventList { return this.path_; } } - diff --git a/src/database/core/view/EventRegistration.ts b/src/database/core/view/EventRegistration.ts index 37abc02e6a5..10880c91376 100644 --- a/src/database/core/view/EventRegistration.ts +++ b/src/database/core/view/EventRegistration.ts @@ -77,7 +77,6 @@ export interface EventRegistration { hasAnyCallback(): boolean; } - /** * Represents registration for 'value' events. */ @@ -87,10 +86,11 @@ export class ValueEventRegistration implements EventRegistration { * @param {?function(Error)} cancelCallback_ * @param {?Object} context_ */ - constructor(private callback_: ((d: DataSnapshot) => void) | null, - private cancelCallback_: ((e: Error) => void) | null, - private context_: Object | null) { - } + constructor( + private callback_: ((d: DataSnapshot) => void) | null, + private cancelCallback_: ((e: Error) => void) | null, + private context_: Object | null + ) {} /** * @inheritDoc @@ -104,7 +104,11 @@ export class ValueEventRegistration implements EventRegistration { */ createEvent(change: Change, query: Query): DataEvent { const index = query.getQueryParams().getIndex(); - return new DataEvent('value', this, new DataSnapshot(change.snapshotNode, query.getRef(), index)); + return new DataEvent( + 'value', + this, + new DataSnapshot(change.snapshotNode, query.getRef(), index) + ); } /** @@ -113,15 +117,18 @@ export class ValueEventRegistration implements EventRegistration { getEventRunner(eventData: CancelEvent | DataEvent): () => void { const ctx = this.context_; if (eventData.getEventType() === 'cancel') { - assert(this.cancelCallback_, 'Raising a cancel event on a listener with no cancel callback'); + assert( + this.cancelCallback_, + 'Raising a cancel event on a listener with no cancel callback' + ); const cancelCB = this.cancelCallback_; - return function () { + return function() { // We know that error exists, we checked above that this is a cancel event cancelCB.call(ctx, (eventData as CancelEvent).error); }; } else { const cb = this.callback_; - return function () { + return function() { cb.call(ctx, (eventData as DataEvent).snapshot); }; } @@ -148,7 +155,9 @@ export class ValueEventRegistration implements EventRegistration { // If no callback specified, we consider it to match any callback. return true; } else { - return other.callback_ === this.callback_ && other.context_ === this.context_; + return ( + other.callback_ === this.callback_ && other.context_ === this.context_ + ); } } @@ -175,17 +184,22 @@ export class ChildEventRegistration implements EventRegistration { * @param {?function(Error)} cancelCallback_ * @param {Object=} context_ */ - constructor(private callbacks_: ({ [k: string]: (d: DataSnapshot, s?: string | null) => void }) | null, - private cancelCallback_: ((e: Error) => void) | null, - private context_?: Object) { - } + constructor( + private callbacks_: + | ({ [k: string]: (d: DataSnapshot, s?: string | null) => void }) + | null, + private cancelCallback_: ((e: Error) => void) | null, + private context_?: Object + ) {} /** * @inheritDoc */ respondsTo(eventType: string): boolean { - let eventToCheck = eventType === 'children_added' ? 'child_added' : eventType; - eventToCheck = eventToCheck === 'children_removed' ? 'child_removed' : eventToCheck; + let eventToCheck = + eventType === 'children_added' ? 'child_added' : eventType; + eventToCheck = + eventToCheck === 'children_removed' ? 'child_removed' : eventToCheck; return contains(this.callbacks_, eventToCheck); } @@ -205,10 +219,14 @@ export class ChildEventRegistration implements EventRegistration { */ createEvent(change: Change, query: Query): DataEvent { assert(change.childName != null, 'Child events should have a childName.'); - const ref = query.getRef().child(/** @type {!string} */ (change.childName)); + const ref = query.getRef().child /** @type {!string} */(change.childName); const index = query.getQueryParams().getIndex(); - return new DataEvent(change.type as any, this, new DataSnapshot(change.snapshotNode, ref, index as any), - change.prevName); + return new DataEvent( + change.type as any, + this, + new DataSnapshot(change.snapshotNode, ref, index as any), + change.prevName + ); } /** @@ -217,17 +235,24 @@ export class ChildEventRegistration implements EventRegistration { getEventRunner(eventData: CancelEvent | DataEvent): () => void { const ctx = this.context_; if (eventData.getEventType() === 'cancel') { - assert(this.cancelCallback_, 'Raising a cancel event on a listener with no cancel callback'); + assert( + this.cancelCallback_, + 'Raising a cancel event on a listener with no cancel callback' + ); const cancelCB = this.cancelCallback_; - return function () { + return function() { // We know that error exists, we checked above that this is a cancel event cancelCB.call(ctx, (eventData as CancelEvent).error); }; } else { const cb = this.callbacks_[(eventData as DataEvent).eventType]; - return function () { - cb.call(ctx, (eventData as DataEvent).snapshot, (eventData as DataEvent).prevName); - } + return function() { + cb.call( + ctx, + (eventData as DataEvent).snapshot, + (eventData as DataEvent).prevName + ); + }; } } @@ -247,17 +272,20 @@ export class ChildEventRegistration implements EventRegistration { // If count is not 1, exact match across all if (otherCount === 1) { - const otherKey = /** @type {!string} */ (getAnyKey(other.callbacks_)); - const thisKey = /** @type {!string} */ (getAnyKey(this.callbacks_)); - return (thisKey === otherKey && ( - !other.callbacks_[otherKey] || + const otherKey /** @type {!string} */ = getAnyKey(other.callbacks_); + const thisKey /** @type {!string} */ = getAnyKey(this.callbacks_); + return ( + thisKey === otherKey && + (!other.callbacks_[otherKey] || !this.callbacks_[thisKey] || - other.callbacks_[otherKey] === this.callbacks_[thisKey] - ) + other.callbacks_[otherKey] === this.callbacks_[thisKey]) ); } else { // Exact match on each key. - return every(this.callbacks_, (eventType, cb) => other.callbacks_[eventType] === cb); + return every( + this.callbacks_, + (eventType, cb) => other.callbacks_[eventType] === cb + ); } } } @@ -270,7 +298,6 @@ export class ChildEventRegistration implements EventRegistration { * @inheritDoc */ hasAnyCallback(): boolean { - return (this.callbacks_ !== null); + return this.callbacks_ !== null; } } - diff --git a/src/database/core/view/QueryParams.ts b/src/database/core/view/QueryParams.ts index f17fa19efc9..d3f61782c9c 100644 --- a/src/database/core/view/QueryParams.ts +++ b/src/database/core/view/QueryParams.ts @@ -15,10 +15,7 @@ */ import { assert } from '../../../utils/assert'; -import { - MIN_NAME, - MAX_NAME -} from '../util/util'; +import { MIN_NAME, MAX_NAME } from '../util/util'; import { KEY_INDEX } from '../snap/indexes/KeyIndex'; import { PRIORITY_INDEX } from '../snap/indexes/PriorityIndex'; import { VALUE_INDEX } from '../snap/indexes/ValueIndex'; @@ -112,7 +109,9 @@ export class QueryParams { // anchor to the end. return this.startSet_; } else { - return this.viewFrom_ === QueryParams.WIRE_PROTOCOL_CONSTANTS_.VIEW_FROM_LEFT; + return ( + this.viewFrom_ === QueryParams.WIRE_PROTOCOL_CONSTANTS_.VIEW_FROM_LEFT + ); } } @@ -254,7 +253,7 @@ export class QueryParams { newParams.limit_ = newLimit; newParams.viewFrom_ = QueryParams.WIRE_PROTOCOL_CONSTANTS_.VIEW_FROM_RIGHT; return newParams; - }; + } /** * @param {*} indexValue @@ -298,7 +297,7 @@ export class QueryParams { newParams.indexEndName_ = ''; } return newParams; - }; + } /** * @param {!Index} index @@ -374,7 +373,6 @@ export class QueryParams { } } - /** * Returns a set of REST query string parameters representing this query. * diff --git a/src/database/core/view/View.ts b/src/database/core/view/View.ts index f3fb5b10f6b..aa8fc214a92 100644 --- a/src/database/core/view/View.ts +++ b/src/database/core/view/View.ts @@ -68,12 +68,26 @@ export class View { const initialEventCache = initialViewCache.getEventCache(); // Don't filter server node with other filter than index, wait for tagged listen - const serverSnap = indexFilter.updateFullNode(ChildrenNode.EMPTY_NODE, initialServerCache.getNode(), null); - const eventSnap = filter.updateFullNode(ChildrenNode.EMPTY_NODE, initialEventCache.getNode(), null); - const newServerCache = new CacheNode(serverSnap, initialServerCache.isFullyInitialized(), - indexFilter.filtersNodes()); - const newEventCache = new CacheNode(eventSnap, initialEventCache.isFullyInitialized(), - filter.filtersNodes()); + const serverSnap = indexFilter.updateFullNode( + ChildrenNode.EMPTY_NODE, + initialServerCache.getNode(), + null + ); + const eventSnap = filter.updateFullNode( + ChildrenNode.EMPTY_NODE, + initialEventCache.getNode(), + null + ); + const newServerCache = new CacheNode( + serverSnap, + initialServerCache.isFullyInitialized(), + indexFilter.filtersNodes() + ); + const newEventCache = new CacheNode( + eventSnap, + initialEventCache.isFullyInitialized(), + filter.filtersNodes() + ); /** * @type {!ViewCache} @@ -86,21 +100,21 @@ export class View { * @private */ this.eventGenerator_ = new EventGenerator(this.query_); - }; + } /** * @return {!Query} */ getQuery(): Query { return this.query_; - }; + } /** * @return {?Node} */ getServerCache(): Node | null { return this.viewCache_.getServerCache().getNode(); - }; + } /** * @param {!Path} path @@ -111,40 +125,48 @@ export class View { if (cache) { // If this isn't a "loadsAllData" view, then cache isn't actually a complete cache and // we need to see if it contains the child we're interested in. - if (this.query_.getQueryParams().loadsAllData() || - (!path.isEmpty() && !cache.getImmediateChild(path.getFront()).isEmpty())) { + if ( + this.query_.getQueryParams().loadsAllData() || + (!path.isEmpty() && !cache.getImmediateChild(path.getFront()).isEmpty()) + ) { return cache.getChild(path); } } return null; - }; + } /** * @return {boolean} */ isEmpty(): boolean { return this.eventRegistrations_.length === 0; - }; + } /** * @param {!EventRegistration} eventRegistration */ addEventRegistration(eventRegistration: EventRegistration) { this.eventRegistrations_.push(eventRegistration); - }; + } /** * @param {?EventRegistration} eventRegistration If null, remove all callbacks. * @param {Error=} cancelError If a cancelError is provided, appropriate cancel events will be returned. * @return {!Array.} Cancel events, if cancelError was provided. */ - removeEventRegistration(eventRegistration: EventRegistration | null, cancelError?: Error): Event[] { + removeEventRegistration( + eventRegistration: EventRegistration | null, + cancelError?: Error + ): Event[] { const cancelEvents: CancelEvent[] = []; if (cancelError) { - assert(eventRegistration == null, 'A cancel should cancel all event registrations.'); + assert( + eventRegistration == null, + 'A cancel should cancel all event registrations.' + ); const path = this.query_.path; - this.eventRegistrations_.forEach(function (registration) { - cancelError = /** @type {!Error} */ (cancelError); + this.eventRegistrations_.forEach(function(registration) { + cancelError /** @type {!Error} */ = cancelError; const maybeEvent = registration.createCancelEvent(cancelError, path); if (maybeEvent) { cancelEvents.push(maybeEvent); @@ -169,7 +191,7 @@ export class View { this.eventRegistrations_ = []; } return cancelEvents; - }; + } /** * Applies the given Operation, updates our cache, and returns the appropriate events. @@ -179,28 +201,48 @@ export class View { * @param {?Node} completeServerCache * @return {!Array.} */ - applyOperation(operation: Operation, writesCache: WriteTreeRef, completeServerCache: Node | null): Event[] { - if (operation.type === OperationType.MERGE && - operation.source.queryId !== null) { - - assert(this.viewCache_.getCompleteServerSnap(), - 'We should always have a full cache before handling merges'); - assert(this.viewCache_.getCompleteEventSnap(), - 'Missing event cache, even though we have a server cache'); + applyOperation( + operation: Operation, + writesCache: WriteTreeRef, + completeServerCache: Node | null + ): Event[] { + if ( + operation.type === OperationType.MERGE && + operation.source.queryId !== null + ) { + assert( + this.viewCache_.getCompleteServerSnap(), + 'We should always have a full cache before handling merges' + ); + assert( + this.viewCache_.getCompleteEventSnap(), + 'Missing event cache, even though we have a server cache' + ); } const oldViewCache = this.viewCache_; - const result = this.processor_.applyOperation(oldViewCache, operation, writesCache, completeServerCache); + const result = this.processor_.applyOperation( + oldViewCache, + operation, + writesCache, + completeServerCache + ); this.processor_.assertIndexed(result.viewCache); - assert(result.viewCache.getServerCache().isFullyInitialized() || - !oldViewCache.getServerCache().isFullyInitialized(), - 'Once a server snap is complete, it should never go back'); + assert( + result.viewCache.getServerCache().isFullyInitialized() || + !oldViewCache.getServerCache().isFullyInitialized(), + 'Once a server snap is complete, it should never go back' + ); this.viewCache_ = result.viewCache; - return this.generateEventsForChanges_(result.changes, result.viewCache.getEventCache().getNode(), null); - }; + return this.generateEventsForChanges_( + result.changes, + result.viewCache.getEventCache().getNode(), + null + ); + } /** * @param {!EventRegistration} registration @@ -211,15 +253,19 @@ export class View { const initialChanges: Change[] = []; if (!eventSnap.getNode().isLeafNode()) { const eventNode = eventSnap.getNode() as ChildrenNode; - eventNode.forEachChild(PRIORITY_INDEX, function (key, childNode) { + eventNode.forEachChild(PRIORITY_INDEX, function(key, childNode) { initialChanges.push(Change.childAddedChange(key, childNode)); }); } if (eventSnap.isFullyInitialized()) { initialChanges.push(Change.valueChange(eventSnap.getNode())); } - return this.generateEventsForChanges_(initialChanges, eventSnap.getNode(), registration); - }; + return this.generateEventsForChanges_( + initialChanges, + eventSnap.getNode(), + registration + ); + } /** * @private @@ -228,9 +274,18 @@ export class View { * @param {EventRegistration=} eventRegistration * @return {!Array.} */ - generateEventsForChanges_(changes: Change[], eventCache: Node, eventRegistration?: EventRegistration): Event[] { - const registrations = eventRegistration ? [eventRegistration] : this.eventRegistrations_; - return this.eventGenerator_.generateEventsForChanges(changes, eventCache, registrations); - }; + generateEventsForChanges_( + changes: Change[], + eventCache: Node, + eventRegistration?: EventRegistration + ): Event[] { + const registrations = eventRegistration + ? [eventRegistration] + : this.eventRegistrations_; + return this.eventGenerator_.generateEventsForChanges( + changes, + eventCache, + registrations + ); + } } - diff --git a/src/database/core/view/ViewCache.ts b/src/database/core/view/ViewCache.ts index 808cd07487b..7b00d5b79bc 100644 --- a/src/database/core/view/ViewCache.ts +++ b/src/database/core/view/ViewCache.ts @@ -31,17 +31,26 @@ export class ViewCache { * @param {!CacheNode} eventCache_ * @param {!CacheNode} serverCache_ */ - constructor(private readonly eventCache_: CacheNode, - private readonly serverCache_: CacheNode) { - } + constructor( + private readonly eventCache_: CacheNode, + private readonly serverCache_: CacheNode + ) {} /** * @const * @type {ViewCache} */ static Empty = new ViewCache( - new CacheNode(ChildrenNode.EMPTY_NODE, /*fullyInitialized=*/false, /*filtered=*/false), - new CacheNode(ChildrenNode.EMPTY_NODE, /*fullyInitialized=*/false, /*filtered=*/false) + new CacheNode( + ChildrenNode.EMPTY_NODE, + /*fullyInitialized=*/ false, + /*filtered=*/ false + ), + new CacheNode( + ChildrenNode.EMPTY_NODE, + /*fullyInitialized=*/ false, + /*filtered=*/ false + ) ); /** @@ -50,8 +59,15 @@ export class ViewCache { * @param {boolean} filtered * @return {!ViewCache} */ - updateEventSnap(eventSnap: Node, complete: boolean, filtered: boolean): ViewCache { - return new ViewCache(new CacheNode(eventSnap, complete, filtered), this.serverCache_); + updateEventSnap( + eventSnap: Node, + complete: boolean, + filtered: boolean + ): ViewCache { + return new ViewCache( + new CacheNode(eventSnap, complete, filtered), + this.serverCache_ + ); } /** @@ -60,8 +76,15 @@ export class ViewCache { * @param {boolean} filtered * @return {!ViewCache} */ - updateServerSnap(serverSnap: Node, complete: boolean, filtered: boolean): ViewCache { - return new ViewCache(this.eventCache_, new CacheNode(serverSnap, complete, filtered)); + updateServerSnap( + serverSnap: Node, + complete: boolean, + filtered: boolean + ): ViewCache { + return new ViewCache( + this.eventCache_, + new CacheNode(serverSnap, complete, filtered) + ); } /** @@ -75,7 +98,9 @@ export class ViewCache { * @return {?Node} */ getCompleteEventSnap(): Node | null { - return (this.eventCache_.isFullyInitialized()) ? this.eventCache_.getNode() : null; + return this.eventCache_.isFullyInitialized() + ? this.eventCache_.getNode() + : null; } /** @@ -89,7 +114,8 @@ export class ViewCache { * @return {?Node} */ getCompleteServerSnap(): Node | null { - return this.serverCache_.isFullyInitialized() ? this.serverCache_.getNode() : null; + return this.serverCache_.isFullyInitialized() + ? this.serverCache_.getNode() + : null; } } - diff --git a/src/database/core/view/ViewProcessor.ts b/src/database/core/view/ViewProcessor.ts index 6b1813c4535..8e671e21888 100644 --- a/src/database/core/view/ViewProcessor.ts +++ b/src/database/core/view/ViewProcessor.ts @@ -22,7 +22,11 @@ import { ChildrenNode } from '../snap/ChildrenNode'; import { KEY_INDEX } from '../snap/indexes/KeyIndex'; import { ImmutableTree } from '../util/ImmutableTree'; import { Path } from '../util/Path'; -import { WriteTreeCompleteChildSource, NO_COMPLETE_CHILD_SOURCE, CompleteChildSource } from './CompleteChildSource'; +import { + WriteTreeCompleteChildSource, + NO_COMPLETE_CHILD_SOURCE, + CompleteChildSource +} from './CompleteChildSource'; import { ViewCache } from './ViewCache'; import { NodeFilter } from './filter/NodeFilter'; import { WriteTreeRef } from '../WriteTree'; @@ -40,9 +44,10 @@ export class ProcessorResult { * @param {!ViewCache} viewCache * @param {!Array.} changes */ - constructor(public readonly viewCache: ViewCache, - public readonly changes: Change[]) { - } + constructor( + public readonly viewCache: ViewCache, + public readonly changes: Change[] + ) {} } /** @@ -52,16 +57,20 @@ export class ViewProcessor { /** * @param {!NodeFilter} filter_ */ - constructor(private readonly filter_: NodeFilter) { - } + constructor(private readonly filter_: NodeFilter) {} /** * @param {!ViewCache} viewCache */ assertIndexed(viewCache: ViewCache) { - assert(viewCache.getEventCache().getNode().isIndexed(this.filter_.getIndex()), 'Event snap not indexed'); - assert(viewCache.getServerCache().getNode().isIndexed(this.filter_.getIndex()), - 'Server snap not indexed'); + assert( + viewCache.getEventCache().getNode().isIndexed(this.filter_.getIndex()), + 'Event snap not indexed' + ); + assert( + viewCache.getServerCache().getNode().isIndexed(this.filter_.getIndex()), + 'Server snap not indexed' + ); } /** @@ -71,47 +80,97 @@ export class ViewProcessor { * @param {?Node} completeCache * @return {!ProcessorResult} */ - applyOperation(oldViewCache: ViewCache, operation: Operation, - writesCache: WriteTreeRef, completeCache: Node | null): ProcessorResult { + applyOperation( + oldViewCache: ViewCache, + operation: Operation, + writesCache: WriteTreeRef, + completeCache: Node | null + ): ProcessorResult { const accumulator = new ChildChangeAccumulator(); let newViewCache, filterServerNode; if (operation.type === OperationType.OVERWRITE) { const overwrite = operation as Overwrite; if (overwrite.source.fromUser) { - newViewCache = this.applyUserOverwrite_(oldViewCache, overwrite.path, overwrite.snap, - writesCache, completeCache, accumulator); + newViewCache = this.applyUserOverwrite_( + oldViewCache, + overwrite.path, + overwrite.snap, + writesCache, + completeCache, + accumulator + ); } else { assert(overwrite.source.fromServer, 'Unknown source.'); // We filter the node if it's a tagged update or the node has been previously filtered and the // update is not at the root in which case it is ok (and necessary) to mark the node unfiltered // again - filterServerNode = overwrite.source.tagged || - (oldViewCache.getServerCache().isFiltered() && !overwrite.path.isEmpty()); - newViewCache = this.applyServerOverwrite_(oldViewCache, overwrite.path, overwrite.snap, writesCache, - completeCache, filterServerNode, accumulator); + filterServerNode = + overwrite.source.tagged || + (oldViewCache.getServerCache().isFiltered() && + !overwrite.path.isEmpty()); + newViewCache = this.applyServerOverwrite_( + oldViewCache, + overwrite.path, + overwrite.snap, + writesCache, + completeCache, + filterServerNode, + accumulator + ); } } else if (operation.type === OperationType.MERGE) { const merge = operation as Merge; if (merge.source.fromUser) { - newViewCache = this.applyUserMerge_(oldViewCache, merge.path, merge.children, writesCache, - completeCache, accumulator); + newViewCache = this.applyUserMerge_( + oldViewCache, + merge.path, + merge.children, + writesCache, + completeCache, + accumulator + ); } else { assert(merge.source.fromServer, 'Unknown source.'); // We filter the node if it's a tagged update or the node has been previously filtered - filterServerNode = merge.source.tagged || oldViewCache.getServerCache().isFiltered(); - newViewCache = this.applyServerMerge_(oldViewCache, merge.path, merge.children, writesCache, completeCache, - filterServerNode, accumulator); + filterServerNode = + merge.source.tagged || oldViewCache.getServerCache().isFiltered(); + newViewCache = this.applyServerMerge_( + oldViewCache, + merge.path, + merge.children, + writesCache, + completeCache, + filterServerNode, + accumulator + ); } } else if (operation.type === OperationType.ACK_USER_WRITE) { const ackUserWrite = operation as AckUserWrite; if (!ackUserWrite.revert) { - newViewCache = this.ackUserWrite_(oldViewCache, ackUserWrite.path, ackUserWrite.affectedTree, writesCache, - completeCache, accumulator); + newViewCache = this.ackUserWrite_( + oldViewCache, + ackUserWrite.path, + ackUserWrite.affectedTree, + writesCache, + completeCache, + accumulator + ); } else { - newViewCache = this.revertUserWrite_(oldViewCache, ackUserWrite.path, writesCache, completeCache, accumulator); + newViewCache = this.revertUserWrite_( + oldViewCache, + ackUserWrite.path, + writesCache, + completeCache, + accumulator + ); } } else if (operation.type === OperationType.LISTEN_COMPLETE) { - newViewCache = this.listenComplete_(oldViewCache, operation.path, writesCache, accumulator); + newViewCache = this.listenComplete_( + oldViewCache, + operation.path, + writesCache, + accumulator + ); } else { throw assertionError('Unknown operation type: ' + operation.type); } @@ -126,17 +185,28 @@ export class ViewProcessor { * @param {!Array.} accumulator * @private */ - private static maybeAddValueEvent_(oldViewCache: ViewCache, newViewCache: ViewCache, accumulator: Change[]) { + private static maybeAddValueEvent_( + oldViewCache: ViewCache, + newViewCache: ViewCache, + accumulator: Change[] + ) { const eventSnap = newViewCache.getEventCache(); if (eventSnap.isFullyInitialized()) { - const isLeafOrEmpty = eventSnap.getNode().isLeafNode() || eventSnap.getNode().isEmpty(); + const isLeafOrEmpty = + eventSnap.getNode().isLeafNode() || eventSnap.getNode().isEmpty(); const oldCompleteSnap = oldViewCache.getCompleteEventSnap(); - if (accumulator.length > 0 || + if ( + accumulator.length > 0 || !oldViewCache.getEventCache().isFullyInitialized() || - (isLeafOrEmpty && !eventSnap.getNode().equals(/** @type {!Node} */ (oldCompleteSnap))) || - !eventSnap.getNode().getPriority().equals(oldCompleteSnap.getPriority())) { - accumulator.push(Change.valueChange( - /** @type {!Node} */ (newViewCache.getCompleteEventSnap()))); + (isLeafOrEmpty && + !eventSnap.getNode().equals /** @type {!Node} */(oldCompleteSnap)) || + !eventSnap.getNode().getPriority().equals(oldCompleteSnap.getPriority()) + ) { + accumulator.push( + Change.valueChange( + /** @type {!Node} */ newViewCache.getCompleteEventSnap() + ) + ); } } } @@ -150,9 +220,13 @@ export class ViewProcessor { * @return {!ViewCache} * @private */ - private generateEventCacheAfterServerEvent_(viewCache: ViewCache, changePath: Path, - writesCache: WriteTreeRef, source: CompleteChildSource, - accumulator: ChildChangeAccumulator): ViewCache { + private generateEventCacheAfterServerEvent_( + viewCache: ViewCache, + changePath: Path, + writesCache: WriteTreeRef, + source: CompleteChildSource, + accumulator: ChildChangeAccumulator + ): ViewCache { const oldEventSnap = viewCache.getEventCache(); if (writesCache.shadowingWrite(changePath) != null) { // we have a shadowing write, ignore changes @@ -161,32 +235,57 @@ export class ViewProcessor { let newEventCache, serverNode; if (changePath.isEmpty()) { // TODO: figure out how this plays with "sliding ack windows" - assert(viewCache.getServerCache().isFullyInitialized(), - 'If change path is empty, we must have complete server data'); + assert( + viewCache.getServerCache().isFullyInitialized(), + 'If change path is empty, we must have complete server data' + ); if (viewCache.getServerCache().isFiltered()) { // We need to special case this, because we need to only apply writes to complete children, or // we might end up raising events for incomplete children. If the server data is filtered deep // writes cannot be guaranteed to be complete const serverCache = viewCache.getCompleteServerSnap(); - const completeChildren = (serverCache instanceof ChildrenNode) ? serverCache : - ChildrenNode.EMPTY_NODE; - const completeEventChildren = writesCache.calcCompleteEventChildren(completeChildren); - newEventCache = this.filter_.updateFullNode(viewCache.getEventCache().getNode(), completeEventChildren, - accumulator); + const completeChildren = + serverCache instanceof ChildrenNode + ? serverCache + : ChildrenNode.EMPTY_NODE; + const completeEventChildren = writesCache.calcCompleteEventChildren( + completeChildren + ); + newEventCache = this.filter_.updateFullNode( + viewCache.getEventCache().getNode(), + completeEventChildren, + accumulator + ); } else { - const completeNode = writesCache.calcCompleteEventCache(viewCache.getCompleteServerSnap()); - newEventCache = this.filter_.updateFullNode(viewCache.getEventCache().getNode(), completeNode, accumulator); + const completeNode = writesCache.calcCompleteEventCache( + viewCache.getCompleteServerSnap() + ); + newEventCache = this.filter_.updateFullNode( + viewCache.getEventCache().getNode(), + completeNode, + accumulator + ); } } else { const childKey = changePath.getFront(); if (childKey == '.priority') { - assert(changePath.getLength() == 1, 'Can\'t have a priority with additional path components'); + assert( + changePath.getLength() == 1, + "Can't have a priority with additional path components" + ); const oldEventNode = oldEventSnap.getNode(); serverNode = viewCache.getServerCache().getNode(); // we might have overwrites for this priority - const updatedPriority = writesCache.calcEventCacheAfterServerOverwrite(changePath, oldEventNode, serverNode); + const updatedPriority = writesCache.calcEventCacheAfterServerOverwrite( + changePath, + oldEventNode, + serverNode + ); if (updatedPriority != null) { - newEventCache = this.filter_.updatePriority(oldEventNode, updatedPriority); + newEventCache = this.filter_.updatePriority( + oldEventNode, + updatedPriority + ); } else { // priority didn't change, keep old node newEventCache = oldEventSnap.getNode(); @@ -197,29 +296,48 @@ export class ViewProcessor { let newEventChild; if (oldEventSnap.isCompleteForChild(childKey)) { serverNode = viewCache.getServerCache().getNode(); - const eventChildUpdate = writesCache.calcEventCacheAfterServerOverwrite(changePath, oldEventSnap.getNode(), - serverNode); + const eventChildUpdate = writesCache.calcEventCacheAfterServerOverwrite( + changePath, + oldEventSnap.getNode(), + serverNode + ); if (eventChildUpdate != null) { - newEventChild = oldEventSnap.getNode().getImmediateChild(childKey).updateChild(childChangePath, - eventChildUpdate); + newEventChild = oldEventSnap + .getNode() + .getImmediateChild(childKey) + .updateChild(childChangePath, eventChildUpdate); } else { // Nothing changed, just keep the old child - newEventChild = oldEventSnap.getNode().getImmediateChild(childKey); + newEventChild = oldEventSnap + .getNode() + .getImmediateChild(childKey); } } else { - newEventChild = writesCache.calcCompleteChild(childKey, viewCache.getServerCache()); + newEventChild = writesCache.calcCompleteChild( + childKey, + viewCache.getServerCache() + ); } if (newEventChild != null) { - newEventCache = this.filter_.updateChild(oldEventSnap.getNode(), childKey, newEventChild, childChangePath, - source, accumulator); + newEventCache = this.filter_.updateChild( + oldEventSnap.getNode(), + childKey, + newEventChild, + childChangePath, + source, + accumulator + ); } else { // no complete child available or no change newEventCache = oldEventSnap.getNode(); } } } - return viewCache.updateEventSnap(newEventCache, oldEventSnap.isFullyInitialized() || changePath.isEmpty(), - this.filter_.filtersNodes()); + return viewCache.updateEventSnap( + newEventCache, + oldEventSnap.isFullyInitialized() || changePath.isEmpty(), + this.filter_.filtersNodes() + ); } } @@ -234,21 +352,42 @@ export class ViewProcessor { * @return {!ViewCache} * @private */ - applyServerOverwrite_(oldViewCache: ViewCache, changePath: Path, changedSnap: Node, - writesCache: WriteTreeRef, completeCache: Node | null, filterServerNode: boolean, - accumulator: ChildChangeAccumulator): ViewCache { + applyServerOverwrite_( + oldViewCache: ViewCache, + changePath: Path, + changedSnap: Node, + writesCache: WriteTreeRef, + completeCache: Node | null, + filterServerNode: boolean, + accumulator: ChildChangeAccumulator + ): ViewCache { const oldServerSnap = oldViewCache.getServerCache(); let newServerCache; - const serverFilter = filterServerNode ? this.filter_ : this.filter_.getIndexedFilter(); + const serverFilter = filterServerNode + ? this.filter_ + : this.filter_.getIndexedFilter(); if (changePath.isEmpty()) { - newServerCache = serverFilter.updateFullNode(oldServerSnap.getNode(), changedSnap, null); + newServerCache = serverFilter.updateFullNode( + oldServerSnap.getNode(), + changedSnap, + null + ); } else if (serverFilter.filtersNodes() && !oldServerSnap.isFiltered()) { // we want to filter the server node, but we didn't filter the server node yet, so simulate a full update - const newServerNode = oldServerSnap.getNode().updateChild(changePath, changedSnap); - newServerCache = serverFilter.updateFullNode(oldServerSnap.getNode(), newServerNode, null); + const newServerNode = oldServerSnap + .getNode() + .updateChild(changePath, changedSnap); + newServerCache = serverFilter.updateFullNode( + oldServerSnap.getNode(), + newServerNode, + null + ); } else { const childKey = changePath.getFront(); - if (!oldServerSnap.isCompleteForPath(changePath) && changePath.getLength() > 1) { + if ( + !oldServerSnap.isCompleteForPath(changePath) && + changePath.getLength() > 1 + ) { // We don't update incomplete nodes with updates intended for other listeners return oldViewCache; } @@ -256,16 +395,38 @@ export class ViewProcessor { const childNode = oldServerSnap.getNode().getImmediateChild(childKey); const newChildNode = childNode.updateChild(childChangePath, changedSnap); if (childKey == '.priority') { - newServerCache = serverFilter.updatePriority(oldServerSnap.getNode(), newChildNode); + newServerCache = serverFilter.updatePriority( + oldServerSnap.getNode(), + newChildNode + ); } else { - newServerCache = serverFilter.updateChild(oldServerSnap.getNode(), childKey, newChildNode, childChangePath, - NO_COMPLETE_CHILD_SOURCE, null); + newServerCache = serverFilter.updateChild( + oldServerSnap.getNode(), + childKey, + newChildNode, + childChangePath, + NO_COMPLETE_CHILD_SOURCE, + null + ); } } - const newViewCache = oldViewCache.updateServerSnap(newServerCache, - oldServerSnap.isFullyInitialized() || changePath.isEmpty(), serverFilter.filtersNodes()); - const source = new WriteTreeCompleteChildSource(writesCache, newViewCache, completeCache); - return this.generateEventCacheAfterServerEvent_(newViewCache, changePath, writesCache, source, accumulator); + const newViewCache = oldViewCache.updateServerSnap( + newServerCache, + oldServerSnap.isFullyInitialized() || changePath.isEmpty(), + serverFilter.filtersNodes() + ); + const source = new WriteTreeCompleteChildSource( + writesCache, + newViewCache, + completeCache + ); + return this.generateEventCacheAfterServerEvent_( + newViewCache, + changePath, + writesCache, + source, + accumulator + ); } /** @@ -278,20 +439,44 @@ export class ViewProcessor { * @return {!ViewCache} * @private */ - applyUserOverwrite_(oldViewCache: ViewCache, changePath: Path, changedSnap: Node, writesCache: WriteTreeRef, - completeCache: Node | null, accumulator: ChildChangeAccumulator): ViewCache { + applyUserOverwrite_( + oldViewCache: ViewCache, + changePath: Path, + changedSnap: Node, + writesCache: WriteTreeRef, + completeCache: Node | null, + accumulator: ChildChangeAccumulator + ): ViewCache { const oldEventSnap = oldViewCache.getEventCache(); let newViewCache, newEventCache; - const source = new WriteTreeCompleteChildSource(writesCache, oldViewCache, completeCache); + const source = new WriteTreeCompleteChildSource( + writesCache, + oldViewCache, + completeCache + ); if (changePath.isEmpty()) { - newEventCache = this.filter_.updateFullNode(oldViewCache.getEventCache().getNode(), changedSnap, accumulator); - newViewCache = oldViewCache.updateEventSnap(newEventCache, true, this.filter_.filtersNodes()); + newEventCache = this.filter_.updateFullNode( + oldViewCache.getEventCache().getNode(), + changedSnap, + accumulator + ); + newViewCache = oldViewCache.updateEventSnap( + newEventCache, + true, + this.filter_.filtersNodes() + ); } else { const childKey = changePath.getFront(); if (childKey === '.priority') { - newEventCache = this.filter_.updatePriority(oldViewCache.getEventCache().getNode(), changedSnap); - newViewCache = oldViewCache.updateEventSnap(newEventCache, oldEventSnap.isFullyInitialized(), - oldEventSnap.isFiltered()); + newEventCache = this.filter_.updatePriority( + oldViewCache.getEventCache().getNode(), + changedSnap + ); + newViewCache = oldViewCache.updateEventSnap( + newEventCache, + oldEventSnap.isFullyInitialized(), + oldEventSnap.isFiltered() + ); } else { const childChangePath = changePath.popFront(); const oldChild = oldEventSnap.getNode().getImmediateChild(childKey); @@ -302,8 +487,10 @@ export class ViewProcessor { } else { const childNode = source.getCompleteChild(childKey); if (childNode != null) { - if (childChangePath.getBack() === '.priority' && - childNode.getChild(childChangePath.parent()).isEmpty()) { + if ( + childChangePath.getBack() === '.priority' && + childNode.getChild(childChangePath.parent()).isEmpty() + ) { // This is a priority update on an empty node. If this node exists on the server, the // server will send down the priority in the update, so ignore for now newChild = childNode; @@ -316,10 +503,19 @@ export class ViewProcessor { } } if (!oldChild.equals(newChild)) { - const newEventSnap = this.filter_.updateChild(oldEventSnap.getNode(), childKey, newChild, childChangePath, - source, accumulator); - newViewCache = oldViewCache.updateEventSnap(newEventSnap, oldEventSnap.isFullyInitialized(), - this.filter_.filtersNodes()); + const newEventSnap = this.filter_.updateChild( + oldEventSnap.getNode(), + childKey, + newChild, + childChangePath, + source, + accumulator + ); + newViewCache = oldViewCache.updateEventSnap( + newEventSnap, + oldEventSnap.isFullyInitialized(), + this.filter_.filtersNodes() + ); } else { newViewCache = oldViewCache; } @@ -334,7 +530,10 @@ export class ViewProcessor { * @return {boolean} * @private */ - private static cacheHasChild_(viewCache: ViewCache, childKey: string): boolean { + private static cacheHasChild_( + viewCache: ViewCache, + childKey: string + ): boolean { return viewCache.getEventCache().isCompleteForChild(childKey); } @@ -348,8 +547,14 @@ export class ViewProcessor { * @return {!ViewCache} * @private */ - private applyUserMerge_(viewCache: ViewCache, path: Path, changedChildren: ImmutableTree, writesCache: WriteTreeRef, - serverCache: Node | null, accumulator: ChildChangeAccumulator): ViewCache { + private applyUserMerge_( + viewCache: ViewCache, + path: Path, + changedChildren: ImmutableTree, + writesCache: WriteTreeRef, + serverCache: Node | null, + accumulator: ChildChangeAccumulator + ): ViewCache { // HACK: In the case of a limit query, there may be some changes that bump things out of the // window leaving room for new items. It's important we process these changes first, so we // iterate the changes twice, first processing any that affect items currently in view. @@ -360,16 +565,28 @@ export class ViewProcessor { changedChildren.foreach((relativePath, childNode) => { const writePath = path.child(relativePath); if (ViewProcessor.cacheHasChild_(viewCache, writePath.getFront())) { - curViewCache = this.applyUserOverwrite_(curViewCache, writePath, childNode, writesCache, - serverCache, accumulator); + curViewCache = this.applyUserOverwrite_( + curViewCache, + writePath, + childNode, + writesCache, + serverCache, + accumulator + ); } }); changedChildren.foreach((relativePath, childNode) => { const writePath = path.child(relativePath); if (!ViewProcessor.cacheHasChild_(viewCache, writePath.getFront())) { - curViewCache = this.applyUserOverwrite_(curViewCache, writePath, childNode, writesCache, - serverCache, accumulator); + curViewCache = this.applyUserOverwrite_( + curViewCache, + writePath, + childNode, + writesCache, + serverCache, + accumulator + ); } }); @@ -383,7 +600,7 @@ export class ViewProcessor { * @private */ private applyMerge_(node: Node, merge: ImmutableTree): Node { - merge.foreach(function (relativePath, childNode) { + merge.foreach(function(relativePath, childNode) { node = node.updateChild(relativePath, childNode); }); return node; @@ -400,12 +617,21 @@ export class ViewProcessor { * @return {!ViewCache} * @private */ - private applyServerMerge_(viewCache: ViewCache, path: Path, changedChildren: ImmutableTree, - writesCache: WriteTreeRef, serverCache: Node | null, filterServerNode: boolean, - accumulator: ChildChangeAccumulator): ViewCache { + private applyServerMerge_( + viewCache: ViewCache, + path: Path, + changedChildren: ImmutableTree, + writesCache: WriteTreeRef, + serverCache: Node | null, + filterServerNode: boolean, + accumulator: ChildChangeAccumulator + ): ViewCache { // If we don't have a cache yet, this merge was intended for a previously listen in the same location. Ignore it and // wait for the complete data update coming soon. - if (viewCache.getServerCache().getNode().isEmpty() && !viewCache.getServerCache().isFullyInitialized()) { + if ( + viewCache.getServerCache().getNode().isEmpty() && + !viewCache.getServerCache().isFullyInitialized() + ) { return viewCache; } @@ -425,20 +651,41 @@ export class ViewProcessor { const serverNode = viewCache.getServerCache().getNode(); viewMergeTree.children.inorderTraversal((childKey, childTree) => { if (serverNode.hasChild(childKey)) { - const serverChild = viewCache.getServerCache().getNode().getImmediateChild(childKey); + const serverChild = viewCache + .getServerCache() + .getNode() + .getImmediateChild(childKey); const newChild = this.applyMerge_(serverChild, childTree); - curViewCache = this.applyServerOverwrite_(curViewCache, new Path(childKey), newChild, - writesCache, serverCache, filterServerNode, accumulator); + curViewCache = this.applyServerOverwrite_( + curViewCache, + new Path(childKey), + newChild, + writesCache, + serverCache, + filterServerNode, + accumulator + ); } }); viewMergeTree.children.inorderTraversal((childKey, childMergeTree) => { - const isUnknownDeepMerge = !viewCache.getServerCache().isCompleteForChild(childKey) - && (childMergeTree.value == null); + const isUnknownDeepMerge = + !viewCache.getServerCache().isCompleteForChild(childKey) && + childMergeTree.value == null; if (!serverNode.hasChild(childKey) && !isUnknownDeepMerge) { - const serverChild = viewCache.getServerCache().getNode().getImmediateChild(childKey); + const serverChild = viewCache + .getServerCache() + .getNode() + .getImmediateChild(childKey); const newChild = this.applyMerge_(serverChild, childMergeTree); - curViewCache = this.applyServerOverwrite_(curViewCache, new Path(childKey), newChild, writesCache, - serverCache, filterServerNode, accumulator); + curViewCache = this.applyServerOverwrite_( + curViewCache, + new Path(childKey), + newChild, + writesCache, + serverCache, + filterServerNode, + accumulator + ); } }); @@ -455,8 +702,14 @@ export class ViewProcessor { * @return {!ViewCache} * @private */ - private ackUserWrite_(viewCache: ViewCache, ackPath: Path, affectedTree: ImmutableTree, writesCache: WriteTreeRef, - completeCache: Node | null, accumulator: ChildChangeAccumulator): ViewCache { + private ackUserWrite_( + viewCache: ViewCache, + ackPath: Path, + affectedTree: ImmutableTree, + writesCache: WriteTreeRef, + completeCache: Node | null, + accumulator: ChildChangeAccumulator + ): ViewCache { if (writesCache.shadowingWrite(ackPath) != null) { return viewCache; } @@ -469,32 +722,59 @@ export class ViewProcessor { const serverCache = viewCache.getServerCache(); if (affectedTree.value != null) { // This is an overwrite. - if ((ackPath.isEmpty() && serverCache.isFullyInitialized()) || serverCache.isCompleteForPath(ackPath)) { - return this.applyServerOverwrite_(viewCache, ackPath, serverCache.getNode().getChild(ackPath), - writesCache, completeCache, filterServerNode, accumulator); + if ( + (ackPath.isEmpty() && serverCache.isFullyInitialized()) || + serverCache.isCompleteForPath(ackPath) + ) { + return this.applyServerOverwrite_( + viewCache, + ackPath, + serverCache.getNode().getChild(ackPath), + writesCache, + completeCache, + filterServerNode, + accumulator + ); } else if (ackPath.isEmpty()) { // This is a goofy edge case where we are acking data at this location but don't have full data. We // should just re-apply whatever we have in our cache as a merge. let changedChildren = ImmutableTree.Empty; - serverCache.getNode().forEachChild(KEY_INDEX, function (name, node) { + serverCache.getNode().forEachChild(KEY_INDEX, function(name, node) { changedChildren = changedChildren.set(new Path(name), node); }); - return this.applyServerMerge_(viewCache, ackPath, changedChildren, writesCache, completeCache, - filterServerNode, accumulator); + return this.applyServerMerge_( + viewCache, + ackPath, + changedChildren, + writesCache, + completeCache, + filterServerNode, + accumulator + ); } else { return viewCache; } } else { // This is a merge. let changedChildren = ImmutableTree.Empty; - affectedTree.foreach(function (mergePath, value) { + affectedTree.foreach(function(mergePath, value) { const serverCachePath = ackPath.child(mergePath); if (serverCache.isCompleteForPath(serverCachePath)) { - changedChildren = changedChildren.set(mergePath, serverCache.getNode().getChild(serverCachePath)); + changedChildren = changedChildren.set( + mergePath, + serverCache.getNode().getChild(serverCachePath) + ); } }); - return this.applyServerMerge_(viewCache, ackPath, changedChildren, writesCache, completeCache, - filterServerNode, accumulator); + return this.applyServerMerge_( + viewCache, + ackPath, + changedChildren, + writesCache, + completeCache, + filterServerNode, + accumulator + ); } } @@ -506,13 +786,25 @@ export class ViewProcessor { * @return {!ViewCache} * @private */ - private listenComplete_(viewCache: ViewCache, path: Path, writesCache: WriteTreeRef, - accumulator: ChildChangeAccumulator): ViewCache { + private listenComplete_( + viewCache: ViewCache, + path: Path, + writesCache: WriteTreeRef, + accumulator: ChildChangeAccumulator + ): ViewCache { const oldServerNode = viewCache.getServerCache(); - const newViewCache = viewCache.updateServerSnap(oldServerNode.getNode(), - oldServerNode.isFullyInitialized() || path.isEmpty(), oldServerNode.isFiltered()); - return this.generateEventCacheAfterServerEvent_(newViewCache, path, writesCache, - NO_COMPLETE_CHILD_SOURCE, accumulator); + const newViewCache = viewCache.updateServerSnap( + oldServerNode.getNode(), + oldServerNode.isFullyInitialized() || path.isEmpty(), + oldServerNode.isFiltered() + ); + return this.generateEventCacheAfterServerEvent_( + newViewCache, + path, + writesCache, + NO_COMPLETE_CHILD_SOURCE, + accumulator + ); } /** @@ -524,55 +816,105 @@ export class ViewProcessor { * @return {!ViewCache} * @private */ - private revertUserWrite_(viewCache: ViewCache, path: Path, writesCache: WriteTreeRef, completeServerCache: Node | null, - accumulator: ChildChangeAccumulator): ViewCache { + private revertUserWrite_( + viewCache: ViewCache, + path: Path, + writesCache: WriteTreeRef, + completeServerCache: Node | null, + accumulator: ChildChangeAccumulator + ): ViewCache { let complete; if (writesCache.shadowingWrite(path) != null) { return viewCache; } else { - const source = new WriteTreeCompleteChildSource(writesCache, viewCache, completeServerCache); + const source = new WriteTreeCompleteChildSource( + writesCache, + viewCache, + completeServerCache + ); const oldEventCache = viewCache.getEventCache().getNode(); let newEventCache; if (path.isEmpty() || path.getFront() === '.priority') { let newNode; if (viewCache.getServerCache().isFullyInitialized()) { - newNode = writesCache.calcCompleteEventCache(viewCache.getCompleteServerSnap()); + newNode = writesCache.calcCompleteEventCache( + viewCache.getCompleteServerSnap() + ); } else { const serverChildren = viewCache.getServerCache().getNode(); - assert(serverChildren instanceof ChildrenNode, - 'serverChildren would be complete if leaf node'); - newNode = writesCache.calcCompleteEventChildren(serverChildren as ChildrenNode); + assert( + serverChildren instanceof ChildrenNode, + 'serverChildren would be complete if leaf node' + ); + newNode = writesCache.calcCompleteEventChildren( + serverChildren as ChildrenNode + ); } newNode = newNode as Node; - newEventCache = this.filter_.updateFullNode(oldEventCache, newNode, accumulator); + newEventCache = this.filter_.updateFullNode( + oldEventCache, + newNode, + accumulator + ); } else { const childKey = path.getFront(); - let newChild = writesCache.calcCompleteChild(childKey, viewCache.getServerCache()); - if (newChild == null && viewCache.getServerCache().isCompleteForChild(childKey)) { + let newChild = writesCache.calcCompleteChild( + childKey, + viewCache.getServerCache() + ); + if ( + newChild == null && + viewCache.getServerCache().isCompleteForChild(childKey) + ) { newChild = oldEventCache.getImmediateChild(childKey); } if (newChild != null) { - newEventCache = this.filter_.updateChild(oldEventCache, childKey, newChild, path.popFront(), source, - accumulator); + newEventCache = this.filter_.updateChild( + oldEventCache, + childKey, + newChild, + path.popFront(), + source, + accumulator + ); } else if (viewCache.getEventCache().getNode().hasChild(childKey)) { // No complete child available, delete the existing one, if any - newEventCache = this.filter_.updateChild(oldEventCache, childKey, ChildrenNode.EMPTY_NODE, path.popFront(), - source, accumulator); + newEventCache = this.filter_.updateChild( + oldEventCache, + childKey, + ChildrenNode.EMPTY_NODE, + path.popFront(), + source, + accumulator + ); } else { newEventCache = oldEventCache; } - if (newEventCache.isEmpty() && viewCache.getServerCache().isFullyInitialized()) { + if ( + newEventCache.isEmpty() && + viewCache.getServerCache().isFullyInitialized() + ) { // We might have reverted all child writes. Maybe the old event was a leaf node - complete = writesCache.calcCompleteEventCache(viewCache.getCompleteServerSnap()); + complete = writesCache.calcCompleteEventCache( + viewCache.getCompleteServerSnap() + ); if (complete.isLeafNode()) { - newEventCache = this.filter_.updateFullNode(newEventCache, complete, accumulator); + newEventCache = this.filter_.updateFullNode( + newEventCache, + complete, + accumulator + ); } } } - complete = viewCache.getServerCache().isFullyInitialized() || + complete = + viewCache.getServerCache().isFullyInitialized() || writesCache.shadowingWrite(Path.Empty) != null; - return viewCache.updateEventSnap(newEventCache, complete, this.filter_.filtersNodes()); + return viewCache.updateEventSnap( + newEventCache, + complete, + this.filter_.filtersNodes() + ); } } } - diff --git a/src/database/core/view/filter/IndexedFilter.ts b/src/database/core/view/filter/IndexedFilter.ts index 96b49780364..4a95b6e8d15 100644 --- a/src/database/core/view/filter/IndexedFilter.ts +++ b/src/database/core/view/filter/IndexedFilter.ts @@ -14,10 +14,10 @@ * limitations under the License. */ -import { assert } from "../../../../utils/assert"; -import { Change } from "../Change"; -import { ChildrenNode } from "../../snap/ChildrenNode"; -import { PRIORITY_INDEX } from "../../snap/indexes/PriorityIndex"; +import { assert } from '../../../../utils/assert'; +import { Change } from '../Change'; +import { ChildrenNode } from '../../snap/ChildrenNode'; +import { PRIORITY_INDEX } from '../../snap/indexes/PriorityIndex'; import { NodeFilter } from './NodeFilter'; import { Index } from '../../snap/indexes/Index'; import { Path } from '../../util/Path'; @@ -33,16 +33,25 @@ import { Node } from '../../snap/Node'; * @param {!Index} index */ export class IndexedFilter implements NodeFilter { - constructor(private readonly index_: Index) { - } + constructor(private readonly index_: Index) {} - updateChild(snap: Node, key: string, newChild: Node, affectedPath: Path, - source: CompleteChildSource, - optChangeAccumulator: ChildChangeAccumulator | null): Node { - assert(snap.isIndexed(this.index_), 'A node must be indexed if only a child is updated'); + updateChild( + snap: Node, + key: string, + newChild: Node, + affectedPath: Path, + source: CompleteChildSource, + optChangeAccumulator: ChildChangeAccumulator | null + ): Node { + assert( + snap.isIndexed(this.index_), + 'A node must be indexed if only a child is updated' + ); const oldChild = snap.getImmediateChild(key); // Check if anything actually changed. - if (oldChild.getChild(affectedPath).equals(newChild.getChild(affectedPath))) { + if ( + oldChild.getChild(affectedPath).equals(newChild.getChild(affectedPath)) + ) { // There's an edge case where a child can enter or leave the view because affectedPath was set to null. // In this case, affectedPath will appear null in both the old and new snapshots. So we need // to avoid treating these cases as "nothing changed." @@ -58,14 +67,23 @@ export class IndexedFilter implements NodeFilter { if (optChangeAccumulator != null) { if (newChild.isEmpty()) { if (snap.hasChild(key)) { - optChangeAccumulator.trackChildChange(Change.childRemovedChange(key, oldChild)); + optChangeAccumulator.trackChildChange( + Change.childRemovedChange(key, oldChild) + ); } else { - assert(snap.isLeafNode(), 'A child remove without an old child only makes sense on a leaf node'); + assert( + snap.isLeafNode(), + 'A child remove without an old child only makes sense on a leaf node' + ); } } else if (oldChild.isEmpty()) { - optChangeAccumulator.trackChildChange(Change.childAddedChange(key, newChild)); + optChangeAccumulator.trackChildChange( + Change.childAddedChange(key, newChild) + ); } else { - optChangeAccumulator.trackChildChange(Change.childChangedChange(key, newChild, oldChild)); + optChangeAccumulator.trackChildChange( + Change.childChangedChange(key, newChild, oldChild) + ); } } if (snap.isLeafNode() && newChild.isEmpty()) { @@ -74,18 +92,23 @@ export class IndexedFilter implements NodeFilter { // Make sure the node is indexed return snap.updateImmediateChild(key, newChild).withIndex(this.index_); } - }; + } /** * @inheritDoc */ - updateFullNode(oldSnap: Node, newSnap: Node, - optChangeAccumulator: ChildChangeAccumulator | null): Node { + updateFullNode( + oldSnap: Node, + newSnap: Node, + optChangeAccumulator: ChildChangeAccumulator | null + ): Node { if (optChangeAccumulator != null) { if (!oldSnap.isLeafNode()) { oldSnap.forEachChild(PRIORITY_INDEX, function(key, childNode) { if (!newSnap.hasChild(key)) { - optChangeAccumulator.trackChildChange(Change.childRemovedChange(key, childNode)); + optChangeAccumulator.trackChildChange( + Change.childRemovedChange(key, childNode) + ); } }); } @@ -94,16 +117,20 @@ export class IndexedFilter implements NodeFilter { if (oldSnap.hasChild(key)) { const oldChild = oldSnap.getImmediateChild(key); if (!oldChild.equals(childNode)) { - optChangeAccumulator.trackChildChange(Change.childChangedChange(key, childNode, oldChild)); + optChangeAccumulator.trackChildChange( + Change.childChangedChange(key, childNode, oldChild) + ); } } else { - optChangeAccumulator.trackChildChange(Change.childAddedChange(key, childNode)); + optChangeAccumulator.trackChildChange( + Change.childAddedChange(key, childNode) + ); } }); } } return newSnap.withIndex(this.index_); - }; + } /** * @inheritDoc @@ -114,26 +141,26 @@ export class IndexedFilter implements NodeFilter { } else { return oldSnap.updatePriority(newPriority); } - }; + } /** * @inheritDoc */ filtersNodes(): boolean { return false; - }; + } /** * @inheritDoc */ getIndexedFilter(): IndexedFilter { return this; - }; + } /** * @inheritDoc */ getIndex(): Index { return this.index_; - }; + } } diff --git a/src/database/core/view/filter/LimitedFilter.ts b/src/database/core/view/filter/LimitedFilter.ts index ce10db8a2b3..f0079a90a85 100644 --- a/src/database/core/view/filter/LimitedFilter.ts +++ b/src/database/core/view/filter/LimitedFilter.ts @@ -75,9 +75,14 @@ export class LimitedFilter implements NodeFilter { /** * @inheritDoc */ - updateChild(snap: Node, key: string, newChild: Node, affectedPath: Path, - source: CompleteChildSource, - optChangeAccumulator: ChildChangeAccumulator | null): Node { + updateChild( + snap: Node, + key: string, + newChild: Node, + affectedPath: Path, + source: CompleteChildSource, + optChangeAccumulator: ChildChangeAccumulator | null + ): Node { if (!this.rangedFilter_.matches(new NamedNode(key, newChild))) { newChild = ChildrenNode.EMPTY_NODE; } @@ -85,41 +90,69 @@ export class LimitedFilter implements NodeFilter { // No change return snap; } else if (snap.numChildren() < this.limit_) { - return this.rangedFilter_.getIndexedFilter().updateChild(snap, key, newChild, affectedPath, source, - optChangeAccumulator); + return this.rangedFilter_ + .getIndexedFilter() + .updateChild( + snap, + key, + newChild, + affectedPath, + source, + optChangeAccumulator + ); } else { - return this.fullLimitUpdateChild_(snap, key, newChild, source, optChangeAccumulator); + return this.fullLimitUpdateChild_( + snap, + key, + newChild, + source, + optChangeAccumulator + ); } } /** * @inheritDoc */ - updateFullNode(oldSnap: Node, newSnap: Node, - optChangeAccumulator: ChildChangeAccumulator | null): Node { + updateFullNode( + oldSnap: Node, + newSnap: Node, + optChangeAccumulator: ChildChangeAccumulator | null + ): Node { let filtered; if (newSnap.isLeafNode() || newSnap.isEmpty()) { // Make sure we have a children node with the correct index, not a leaf node; filtered = ChildrenNode.EMPTY_NODE.withIndex(this.index_); } else { - if (this.limit_ * 2 < newSnap.numChildren() && newSnap.isIndexed(this.index_)) { + if ( + this.limit_ * 2 < newSnap.numChildren() && + newSnap.isIndexed(this.index_) + ) { // Easier to build up a snapshot, since what we're given has more than twice the elements we want filtered = ChildrenNode.EMPTY_NODE.withIndex(this.index_); // anchor to the startPost, endPost, or last element as appropriate let iterator; if (this.reverse_) { - iterator = (newSnap as ChildrenNode).getReverseIteratorFrom(this.rangedFilter_.getEndPost(), this.index_); + iterator = (newSnap as ChildrenNode).getReverseIteratorFrom( + this.rangedFilter_.getEndPost(), + this.index_ + ); } else { - iterator = (newSnap as ChildrenNode).getIteratorFrom(this.rangedFilter_.getStartPost(), this.index_); + iterator = (newSnap as ChildrenNode).getIteratorFrom( + this.rangedFilter_.getStartPost(), + this.index_ + ); } let count = 0; while (iterator.hasNext() && count < this.limit_) { const next = iterator.getNext(); let inRange; if (this.reverse_) { - inRange = this.index_.compare(this.rangedFilter_.getStartPost(), next) <= 0; + inRange = + this.index_.compare(this.rangedFilter_.getStartPost(), next) <= 0; } else { - inRange = this.index_.compare(next, this.rangedFilter_.getEndPost()) <= 0; + inRange = + this.index_.compare(next, this.rangedFilter_.getEndPost()) <= 0; } if (inRange) { filtered = filtered.updateImmediateChild(next.name, next.node); @@ -133,7 +166,9 @@ export class LimitedFilter implements NodeFilter { // The snap contains less than twice the limit. Faster to delete from the snap than build up a new one filtered = newSnap.withIndex(this.index_); // Don't support priorities on queries - filtered = filtered.updatePriority(ChildrenNode.EMPTY_NODE) as ChildrenNode; + filtered = filtered.updatePriority( + ChildrenNode.EMPTY_NODE + ) as ChildrenNode; let startPost; let endPost; let cmp; @@ -159,16 +194,22 @@ export class LimitedFilter implements NodeFilter { // start adding foundStartPost = true; } - let inRange = foundStartPost && count < this.limit_ && cmp(next, endPost) <= 0; + let inRange = + foundStartPost && count < this.limit_ && cmp(next, endPost) <= 0; if (inRange) { count++; } else { - filtered = filtered.updateImmediateChild(next.name, ChildrenNode.EMPTY_NODE); + filtered = filtered.updateImmediateChild( + next.name, + ChildrenNode.EMPTY_NODE + ); } } } } - return this.rangedFilter_.getIndexedFilter().updateFullNode(oldSnap, filtered, optChangeAccumulator); + return this.rangedFilter_ + .getIndexedFilter() + .updateFullNode(oldSnap, filtered, optChangeAccumulator); } /** @@ -209,8 +250,13 @@ export class LimitedFilter implements NodeFilter { * @return {!Node} * @private */ - private fullLimitUpdateChild_(snap: Node, childKey: string, childSnap: Node, source: CompleteChildSource, - changeAccumulator: ChildChangeAccumulator | null): Node { + private fullLimitUpdateChild_( + snap: Node, + childKey: string, + childSnap: Node, + source: CompleteChildSource, + changeAccumulator: ChildChangeAccumulator | null + ): Node { // TODO: rename all cache stuff etc to general snap terminology let cmp; if (this.reverse_) { @@ -222,35 +268,63 @@ export class LimitedFilter implements NodeFilter { const oldEventCache = snap as ChildrenNode; assert(oldEventCache.numChildren() == this.limit_, ''); const newChildNamedNode = new NamedNode(childKey, childSnap); - const windowBoundary = this.reverse_ ? oldEventCache.getFirstChild(this.index_) : oldEventCache.getLastChild(this.index_) as NamedNode; + const windowBoundary = this.reverse_ + ? oldEventCache.getFirstChild(this.index_) + : oldEventCache.getLastChild(this.index_) as NamedNode; const inRange = this.rangedFilter_.matches(newChildNamedNode); if (oldEventCache.hasChild(childKey)) { const oldChildSnap = oldEventCache.getImmediateChild(childKey); - let nextChild = source.getChildAfterChild(this.index_, windowBoundary, this.reverse_); - while (nextChild != null && (nextChild.name == childKey || oldEventCache.hasChild(nextChild.name))) { + let nextChild = source.getChildAfterChild( + this.index_, + windowBoundary, + this.reverse_ + ); + while ( + nextChild != null && + (nextChild.name == childKey || oldEventCache.hasChild(nextChild.name)) + ) { // There is a weird edge case where a node is updated as part of a merge in the write tree, but hasn't // been applied to the limited filter yet. Ignore this next child which will be updated later in // the limited filter... - nextChild = source.getChildAfterChild(this.index_, nextChild, this.reverse_); + nextChild = source.getChildAfterChild( + this.index_, + nextChild, + this.reverse_ + ); } - const compareNext = nextChild == null ? 1 : cmp(nextChild, newChildNamedNode); - const remainsInWindow = inRange && !childSnap.isEmpty() && compareNext >= 0; + const compareNext = + nextChild == null ? 1 : cmp(nextChild, newChildNamedNode); + const remainsInWindow = + inRange && !childSnap.isEmpty() && compareNext >= 0; if (remainsInWindow) { if (changeAccumulator != null) { - changeAccumulator.trackChildChange(Change.childChangedChange(childKey, childSnap, oldChildSnap)); + changeAccumulator.trackChildChange( + Change.childChangedChange(childKey, childSnap, oldChildSnap) + ); } return oldEventCache.updateImmediateChild(childKey, childSnap); } else { if (changeAccumulator != null) { - changeAccumulator.trackChildChange(Change.childRemovedChange(childKey, oldChildSnap)); + changeAccumulator.trackChildChange( + Change.childRemovedChange(childKey, oldChildSnap) + ); } - const newEventCache = oldEventCache.updateImmediateChild(childKey, ChildrenNode.EMPTY_NODE); - const nextChildInRange = nextChild != null && this.rangedFilter_.matches(nextChild); + const newEventCache = oldEventCache.updateImmediateChild( + childKey, + ChildrenNode.EMPTY_NODE + ); + const nextChildInRange = + nextChild != null && this.rangedFilter_.matches(nextChild); if (nextChildInRange) { if (changeAccumulator != null) { - changeAccumulator.trackChildChange(Change.childAddedChange(nextChild.name, nextChild.node)); + changeAccumulator.trackChildChange( + Change.childAddedChange(nextChild.name, nextChild.node) + ); } - return newEventCache.updateImmediateChild(nextChild.name, nextChild.node); + return newEventCache.updateImmediateChild( + nextChild.name, + nextChild.node + ); } else { return newEventCache; } @@ -261,11 +335,16 @@ export class LimitedFilter implements NodeFilter { } else if (inRange) { if (cmp(windowBoundary, newChildNamedNode) >= 0) { if (changeAccumulator != null) { - changeAccumulator.trackChildChange(Change.childRemovedChange(windowBoundary.name, windowBoundary.node)); - changeAccumulator.trackChildChange(Change.childAddedChange(childKey, childSnap)); + changeAccumulator.trackChildChange( + Change.childRemovedChange(windowBoundary.name, windowBoundary.node) + ); + changeAccumulator.trackChildChange( + Change.childAddedChange(childKey, childSnap) + ); } - return oldEventCache.updateImmediateChild(childKey, childSnap).updateImmediateChild(windowBoundary.name, - ChildrenNode.EMPTY_NODE); + return oldEventCache + .updateImmediateChild(childKey, childSnap) + .updateImmediateChild(windowBoundary.name, ChildrenNode.EMPTY_NODE); } else { return snap; } diff --git a/src/database/core/view/filter/NodeFilter.ts b/src/database/core/view/filter/NodeFilter.ts index 82b1684e9f5..cf289d2a92b 100644 --- a/src/database/core/view/filter/NodeFilter.ts +++ b/src/database/core/view/filter/NodeFilter.ts @@ -27,7 +27,6 @@ import { Index } from '../../snap/indexes/Index'; * @interface */ export interface NodeFilter { - /** * Update a single complete child in the snap. If the child equals the old child in the snap, this is a no-op. * The method expects an indexed snap. @@ -40,9 +39,14 @@ export interface NodeFilter { * @param {?ChildChangeAccumulator} optChangeAccumulator * @return {!Node} */ - updateChild(snap: Node, key: string, newChild: Node, affectedPath: Path, - source: CompleteChildSource, - optChangeAccumulator: ChildChangeAccumulator | null): Node; + updateChild( + snap: Node, + key: string, + newChild: Node, + affectedPath: Path, + source: CompleteChildSource, + optChangeAccumulator: ChildChangeAccumulator | null + ): Node; /** * Update a node in full and output any resulting change from this complete update. @@ -52,8 +56,11 @@ export interface NodeFilter { * @param {?ChildChangeAccumulator} optChangeAccumulator * @return {!Node} */ - updateFullNode(oldSnap: Node, newSnap: Node, - optChangeAccumulator: ChildChangeAccumulator | null): Node; + updateFullNode( + oldSnap: Node, + newSnap: Node, + optChangeAccumulator: ChildChangeAccumulator | null + ): Node; /** * Update the priority of the root node diff --git a/src/database/core/view/filter/RangedFilter.ts b/src/database/core/view/filter/RangedFilter.ts index 037dae26053..ebbbebdf308 100644 --- a/src/database/core/view/filter/RangedFilter.ts +++ b/src/database/core/view/filter/RangedFilter.ts @@ -89,26 +89,44 @@ export class RangedFilter implements NodeFilter { * @return {boolean} */ matches(node: NamedNode): boolean { - return (this.index_.compare(this.getStartPost(), node) <= 0 && this.index_.compare(node, this.getEndPost()) <= 0); + return ( + this.index_.compare(this.getStartPost(), node) <= 0 && + this.index_.compare(node, this.getEndPost()) <= 0 + ); } /** * @inheritDoc */ - updateChild(snap: Node, key: string, newChild: Node, affectedPath: Path, - source: CompleteChildSource, - optChangeAccumulator: ChildChangeAccumulator | null): Node { + updateChild( + snap: Node, + key: string, + newChild: Node, + affectedPath: Path, + source: CompleteChildSource, + optChangeAccumulator: ChildChangeAccumulator | null + ): Node { if (!this.matches(new NamedNode(key, newChild))) { newChild = ChildrenNode.EMPTY_NODE; } - return this.indexedFilter_.updateChild(snap, key, newChild, affectedPath, source, optChangeAccumulator); + return this.indexedFilter_.updateChild( + snap, + key, + newChild, + affectedPath, + source, + optChangeAccumulator + ); } /** * @inheritDoc */ - updateFullNode(oldSnap: Node, newSnap: Node, - optChangeAccumulator: ChildChangeAccumulator | null): Node { + updateFullNode( + oldSnap: Node, + newSnap: Node, + optChangeAccumulator: ChildChangeAccumulator | null + ): Node { if (newSnap.isLeafNode()) { // Make sure we have a children node with the correct index, not a leaf node; newSnap = ChildrenNode.EMPTY_NODE; @@ -117,12 +135,16 @@ export class RangedFilter implements NodeFilter { // Don't support priorities on queries filtered = filtered.updatePriority(ChildrenNode.EMPTY_NODE); const self = this; - newSnap.forEachChild(PRIORITY_INDEX, function (key, childNode) { + newSnap.forEachChild(PRIORITY_INDEX, function(key, childNode) { if (!self.matches(new NamedNode(key, childNode))) { filtered = filtered.updateImmediateChild(key, ChildrenNode.EMPTY_NODE); } }); - return this.indexedFilter_.updateFullNode(oldSnap, filtered, optChangeAccumulator); + return this.indexedFilter_.updateFullNode( + oldSnap, + filtered, + optChangeAccumulator + ); } /** diff --git a/src/database/realtime/BrowserPollConnection.ts b/src/database/realtime/BrowserPollConnection.ts index f083664dba6..e3cc7bfa162 100644 --- a/src/database/realtime/BrowserPollConnection.ts +++ b/src/database/realtime/BrowserPollConnection.ts @@ -112,11 +112,16 @@ export class BrowserPollConnection implements Transport { * @param {string=} lastSessionId Optional lastSessionId if the PersistentConnection has already created a * connection previously */ - constructor(public connId: string, public repoInfo: RepoInfo, - public transportSessionId?: string, public lastSessionId?: string) { + constructor( + public connId: string, + public repoInfo: RepoInfo, + public transportSessionId?: string, + public lastSessionId?: string + ) { this.log_ = logWrapper(connId); this.stats_ = StatsManager.getCollection(repoInfo); - this.urlFn = (params: { [k: string]: string }) => repoInfo.connectionURL(LONG_POLLING, params); + this.urlFn = (params: { [k: string]: string }) => + repoInfo.connectionURL(LONG_POLLING, params); } /** @@ -139,55 +144,64 @@ export class BrowserPollConnection implements Transport { // Ensure we delay the creation of the iframe until the DOM is loaded. executeWhenDOMReady(() => { - if (this.isClosed_) - return; + if (this.isClosed_) return; //Set up a callback that gets triggered once a connection is set up. - this.scriptTagHolder = new FirebaseIFrameScriptHolder((...args) => { - const [command, arg1, arg2, arg3, arg4] = args; - this.incrementIncomingBytes_(args); - if (!this.scriptTagHolder) - return; // we closed the connection. - - if (this.connectTimeoutTimer_) { - clearTimeout(this.connectTimeoutTimer_); - this.connectTimeoutTimer_ = null; - } - this.everConnected_ = true; - if (command == FIREBASE_LONGPOLL_START_PARAM) { - this.id = arg1; - this.password = arg2; - } else if (command === FIREBASE_LONGPOLL_CLOSE_COMMAND) { - // Don't clear the host cache. We got a response from the server, so we know it's reachable - if (arg1) { - // We aren't expecting any more data (other than what the server's already in the process of sending us - // through our already open polls), so don't send any more. - this.scriptTagHolder.sendNewPolls = false; - - // arg1 in this case is the last response number sent by the server. We should try to receive - // all of the responses up to this one before closing - this.myPacketOrderer.closeAfter(arg1, () => { this.onClosed_(); }); + this.scriptTagHolder = new FirebaseIFrameScriptHolder( + (...args) => { + const [command, arg1, arg2, arg3, arg4] = args; + this.incrementIncomingBytes_(args); + if (!this.scriptTagHolder) return; // we closed the connection. + + if (this.connectTimeoutTimer_) { + clearTimeout(this.connectTimeoutTimer_); + this.connectTimeoutTimer_ = null; + } + this.everConnected_ = true; + if (command == FIREBASE_LONGPOLL_START_PARAM) { + this.id = arg1; + this.password = arg2; + } else if (command === FIREBASE_LONGPOLL_CLOSE_COMMAND) { + // Don't clear the host cache. We got a response from the server, so we know it's reachable + if (arg1) { + // We aren't expecting any more data (other than what the server's already in the process of sending us + // through our already open polls), so don't send any more. + this.scriptTagHolder.sendNewPolls = false; + + // arg1 in this case is the last response number sent by the server. We should try to receive + // all of the responses up to this one before closing + this.myPacketOrderer.closeAfter(arg1, () => { + this.onClosed_(); + }); + } else { + this.onClosed_(); + } } else { - this.onClosed_(); + throw new Error('Unrecognized command received: ' + command); } - } else { - throw new Error('Unrecognized command received: ' + command); - } - }, (...args) => { - const [pN, data] = args; - this.incrementIncomingBytes_(args); - this.myPacketOrderer.handleResponse(pN, data); - }, () => { - this.onClosed_(); - }, this.urlFn); + }, + (...args) => { + const [pN, data] = args; + this.incrementIncomingBytes_(args); + this.myPacketOrderer.handleResponse(pN, data); + }, + () => { + this.onClosed_(); + }, + this.urlFn + ); //Send the initial request to connect. The serial number is simply to keep the browser from pulling previous results //from cache. const urlParams: { [k: string]: string | number } = {}; urlParams[FIREBASE_LONGPOLL_START_PARAM] = 't'; - urlParams[FIREBASE_LONGPOLL_SERIAL_PARAM] = Math.floor(Math.random() * 100000000); + urlParams[FIREBASE_LONGPOLL_SERIAL_PARAM] = Math.floor( + Math.random() * 100000000 + ); if (this.scriptTagHolder.uniqueCallbackIdentifier) - urlParams[FIREBASE_LONGPOLL_CALLBACK_ID_PARAM] = this.scriptTagHolder.uniqueCallbackIdentifier; + urlParams[ + FIREBASE_LONGPOLL_CALLBACK_ID_PARAM + ] = this.scriptTagHolder.uniqueCallbackIdentifier; urlParams[VERSION_PARAM] = PROTOCOL_VERSION; if (this.transportSessionId) { urlParams[TRANSPORT_SESSION_PARAM] = this.transportSessionId; @@ -195,17 +209,21 @@ export class BrowserPollConnection implements Transport { if (this.lastSessionId) { urlParams[LAST_SESSION_PARAM] = this.lastSessionId; } - if (!isNodeSdk() && + if ( + !isNodeSdk() && typeof location !== 'undefined' && location.href && - location.href.indexOf(FORGE_DOMAIN) !== -1) { + location.href.indexOf(FORGE_DOMAIN) !== -1 + ) { urlParams[REFERER_PARAM] = FORGE_REF; } const connectURL = this.urlFn(urlParams); this.log_('Connecting via long-poll to ' + connectURL); - this.scriptTagHolder.addTag(connectURL, () => { /* do nothing */ }); + this.scriptTagHolder.addTag(connectURL, () => { + /* do nothing */ + }); }); - }; + } /** * Call this when a handshake has completed successfully and we want to consider the connection established @@ -213,7 +231,7 @@ export class BrowserPollConnection implements Transport { start() { this.scriptTagHolder.startLongPoll(this.id, this.password); this.addDisconnectPingFrame(this.id, this.password); - }; + } private static forceAllow_: boolean; @@ -222,7 +240,7 @@ export class BrowserPollConnection implements Transport { */ static forceAllow() { BrowserPollConnection.forceAllow_ = true; - }; + } private static forceDisallow_: boolean; @@ -231,25 +249,27 @@ export class BrowserPollConnection implements Transport { */ static forceDisallow() { BrowserPollConnection.forceDisallow_ = true; - }; + } // Static method, use string literal so it can be accessed in a generic way static isAvailable() { // NOTE: In React-Native there's normally no 'document', but if you debug a React-Native app in // the Chrome debugger, 'document' is defined, but document.createElement is null (2015/06/08). - return BrowserPollConnection.forceAllow_ || ( - !BrowserPollConnection.forceDisallow_ && - typeof document !== 'undefined' && document.createElement != null && - !isChromeExtensionContentScript() && - !isWindowsStoreApp() && - !isNodeSdk() + return ( + BrowserPollConnection.forceAllow_ || + (!BrowserPollConnection.forceDisallow_ && + typeof document !== 'undefined' && + document.createElement != null && + !isChromeExtensionContentScript() && + !isWindowsStoreApp() && + !isNodeSdk()) ); - }; + } /** * No-op for polling */ - markConnectionHealthy() { }; + markConnectionHealthy() {} /** * Stops polling and cleans up the iframe @@ -273,7 +293,7 @@ export class BrowserPollConnection implements Transport { clearTimeout(this.connectTimeoutTimer_); this.connectTimeoutTimer_ = null; } - }; + } /** * Triggered when this transport is closed @@ -289,7 +309,7 @@ export class BrowserPollConnection implements Transport { this.onDisconnect_ = null; } } - }; + } /** * External-facing close handler. RealTime has requested we shut down. Kill our connection and tell the server @@ -300,7 +320,7 @@ export class BrowserPollConnection implements Transport { this.log_('Longpoll is being closed.'); this.shutdown_(); } - }; + } /** * Send the JSON object down to the server. It will need to be stringified, base64 encoded, and then @@ -322,10 +342,14 @@ export class BrowserPollConnection implements Transport { //Enqueue each segment for transmission. We assign each chunk a sequential ID and a total number //of segments so that we can reassemble the packet on the server. for (let i = 0; i < dataSegs.length; i++) { - this.scriptTagHolder.enqueueSegment(this.curSegmentNum, dataSegs.length, dataSegs[i]); + this.scriptTagHolder.enqueueSegment( + this.curSegmentNum, + dataSegs.length, + dataSegs[i] + ); this.curSegmentNum++; } - }; + } /** * This is how we notify the server that we're leaving. @@ -345,7 +369,7 @@ export class BrowserPollConnection implements Transport { this.myDisconnFrame.style.display = 'none'; document.body.appendChild(this.myDisconnFrame); - }; + } /** * Used to track the bytes received by this client @@ -357,11 +381,11 @@ export class BrowserPollConnection implements Transport { const bytesReceived = stringify(args).length; this.bytesReceived += bytesReceived; this.stats_.incrementCounter('bytes_received', bytesReceived); - }; + } } export interface IFrameElement extends HTMLIFrameElement { - doc: Document; + doc: Document; } /********************************************************************************************* @@ -377,7 +401,7 @@ export class FirebaseIFrameScriptHolder { outstandingRequests = new CountedSet(); //A queue of the pending segments waiting for transmission to the server. - pendingSegs: { seg: number, ts: number, d: any }[] = []; + pendingSegs: { seg: number; ts: number; d: any }[] = []; //A serial number. We use this for two things: // 1) A way to ensure the browser doesn't cache responses to polls @@ -404,18 +428,24 @@ export class FirebaseIFrameScriptHolder { * @param onDisconnect - The callback to be triggered when this tag holder is closed * @param urlFn - A function that provides the URL of the endpoint to send data to. */ - constructor(commandCB: (command: string, ...args: any[]) => void, - onMessageCB: (...args: any[]) => void, - public onDisconnect: () => void, - public urlFn: (a: object) => string) { + constructor( + commandCB: (command: string, ...args: any[]) => void, + onMessageCB: (...args: any[]) => void, + public onDisconnect: () => void, + public urlFn: (a: object) => string + ) { if (!isNodeSdk()) { //Each script holder registers a couple of uniquely named callbacks with the window. These are called from the //iframes where we put the long-polling script tags. We have two callbacks: // 1) Command Callback - Triggered for control issues, like starting a connection. // 2) Message Callback - Triggered when new data arrives. this.uniqueCallbackIdentifier = LUIDGenerator(); - (window as any)[FIREBASE_LONGPOLL_COMMAND_CB_NAME + this.uniqueCallbackIdentifier] = commandCB; - (window as any)[FIREBASE_LONGPOLL_DATA_CB_NAME + this.uniqueCallbackIdentifier] = onMessageCB; + (window as any)[ + FIREBASE_LONGPOLL_COMMAND_CB_NAME + this.uniqueCallbackIdentifier + ] = commandCB; + (window as any)[ + FIREBASE_LONGPOLL_DATA_CB_NAME + this.uniqueCallbackIdentifier + ] = onMessageCB; //Create an iframe for us to add script tags to. this.myIFrame = FirebaseIFrameScriptHolder.createIFrame_(); @@ -424,7 +454,10 @@ export class FirebaseIFrameScriptHolder { let script = ''; // if we set a javascript url, it's IE and we need to set the document domain. The javascript url is sufficient // for ie9, but ie8 needs to do it again in the document itself. - if (this.myIFrame.src && this.myIFrame.src.substr(0, 'javascript:'.length) === 'javascript:') { + if ( + this.myIFrame.src && + this.myIFrame.src.substr(0, 'javascript:'.length) === 'javascript:' + ) { const currentDomain = document.domain; script = ''; } @@ -470,8 +503,10 @@ export class FirebaseIFrameScriptHolder { } } catch (e) { const domain = document.domain; - iframe.src = 'javascript:void((function(){document.open();document.domain=\'' + domain + - '\';document.close();})())'; + iframe.src = + "javascript:void((function(){document.open();document.domain='" + + domain + + "';document.close();})())"; } } else { // LongPollConnection attempts to delay initialization until the document is ready, so hopefully this @@ -481,11 +516,11 @@ export class FirebaseIFrameScriptHolder { // Get the document of the iframe in a browser-specific way. if (iframe.contentDocument) { - (iframe as any).doc = iframe.contentDocument; // Firefox, Opera, Safari + (iframe as any).doc = iframe.contentDocument; // Firefox, Opera, Safari } else if (iframe.contentWindow) { - (iframe as any).doc = iframe.contentWindow.document; // Internet Explorer + (iframe as any).doc = iframe.contentWindow.document; // Internet Explorer } else if ((iframe as any).document) { - (iframe as any).doc = (iframe as any).document; //others? + (iframe as any).doc = (iframe as any).document; //others? } return iframe; @@ -540,7 +575,7 @@ export class FirebaseIFrameScriptHolder { //send the initial request. If there are requests queued, make sure that we transmit as many as we are currently able to. while (this.newRequest_()) {} - }; + } /** * This is called any time someone might want a script tag to be added. It adds a script tag when there aren't @@ -553,7 +588,11 @@ export class FirebaseIFrameScriptHolder { // We keep one outstanding request open all the time to receive data, but if we need to send data // (pendingSegs.length > 0) then we create a new request to send the data. The server will automatically // close the old request. - if (this.alive && this.sendNewPolls && this.outstandingRequests.count() < (this.pendingSegs.length > 0 ? 2 : 1)) { + if ( + this.alive && + this.sendNewPolls && + this.outstandingRequests.count() < (this.pendingSegs.length > 0 ? 2 : 1) + ) { //construct our url this.currentSerial++; const urlParams: { [k: string]: string | number } = {}; @@ -568,11 +607,29 @@ export class FirebaseIFrameScriptHolder { while (this.pendingSegs.length > 0) { //first, lets see if the next segment will fit. const nextSeg = this.pendingSegs[0]; - if (nextSeg.d.length + SEG_HEADER_SIZE + curDataString.length <= MAX_URL_DATA_SIZE) { + if ( + nextSeg.d.length + SEG_HEADER_SIZE + curDataString.length <= + MAX_URL_DATA_SIZE + ) { //great, the segment will fit. Lets append it. const theSeg = this.pendingSegs.shift(); - curDataString = curDataString + '&' + FIREBASE_LONGPOLL_SEGMENT_NUM_PARAM + i + '=' + theSeg.seg + - '&' + FIREBASE_LONGPOLL_SEGMENTS_IN_PACKET + i + '=' + theSeg.ts + '&' + FIREBASE_LONGPOLL_DATA_PARAM + i + '=' + theSeg.d; + curDataString = + curDataString + + '&' + + FIREBASE_LONGPOLL_SEGMENT_NUM_PARAM + + i + + '=' + + theSeg.seg + + '&' + + FIREBASE_LONGPOLL_SEGMENTS_IN_PACKET + + i + + '=' + + theSeg.ts + + '&' + + FIREBASE_LONGPOLL_DATA_PARAM + + i + + '=' + + theSeg.d; i++; } else { break; @@ -586,7 +643,7 @@ export class FirebaseIFrameScriptHolder { } else { return false; } - }; + } /** * Queue a packet for transmission to the server. @@ -596,14 +653,14 @@ export class FirebaseIFrameScriptHolder { */ enqueueSegment(segnum: number, totalsegs: number, data: any) { //add this to the queue of segments to send. - this.pendingSegs.push({seg: segnum, ts: totalsegs, d: data}); + this.pendingSegs.push({ seg: segnum, ts: totalsegs, d: data }); //send the data immediately if there isn't already data being transmitted, unless //startLongPoll hasn't been called yet. if (this.alive) { this.newRequest_(); } - }; + } /** * Add a script tag for a regular long-poll request. @@ -622,7 +679,10 @@ export class FirebaseIFrameScriptHolder { // If this request doesn't return on its own accord (by the server sending us some data), we'll // create a new one after the KEEPALIVE interval to make sure we always keep a fresh request open. - const keepaliveTimeout = setTimeout(doNewRequest, Math.floor(KEEPALIVE_REQUEST_INTERVAL)); + const keepaliveTimeout = setTimeout( + doNewRequest, + Math.floor(KEEPALIVE_REQUEST_INTERVAL) + ); const readyStateCB = () => { // Request completed. Cancel the keepalive. @@ -633,7 +693,7 @@ export class FirebaseIFrameScriptHolder { }; this.addTag(url, readyStateCB); - }; + } /** * Add an arbitrary script tag to the iframe. @@ -652,7 +712,7 @@ export class FirebaseIFrameScriptHolder { newScript.type = 'text/javascript'; newScript.async = true; newScript.src = url; - newScript.onload = (newScript as any).onreadystatechange = function () { + newScript.onload = (newScript as any).onreadystatechange = function() { const rstate = (newScript as any).readyState; if (!rstate || rstate === 'loaded' || rstate === 'complete') { newScript.onload = (newScript as any).onreadystatechange = null; diff --git a/src/database/realtime/Connection.ts b/src/database/realtime/Connection.ts index c27de144555..a06e5662c57 100644 --- a/src/database/realtime/Connection.ts +++ b/src/database/realtime/Connection.ts @@ -19,7 +19,7 @@ import { logWrapper, requireKey, setTimeoutNonBlocking, - warn, + warn } from '../core/util/util'; import { PersistentStorage } from '../core/storage/storage'; import { PROTOCOL_VERSION } from './Constants'; @@ -43,7 +43,7 @@ const BYTES_RECEIVED_HEALTHY_OVERRIDE = 100 * 1024; const enum RealtimeState { CONNECTING, CONNECTED, - DISCONNECTED, + DISCONNECTED } const MESSAGE_TYPE = 't'; @@ -58,7 +58,6 @@ const PING = 'p'; const SERVER_HELLO = 'h'; - /** * Creates a new real-time connection to the server using whichever method works * best in the current browser. @@ -91,13 +90,15 @@ export class Connection { * @param {function(string)} onKill_ - the callback to be triggered when this connection has permanently shut down. * @param {string=} lastSessionId - last session id in persistent connection. is used to clean up old session in real-time server */ - constructor(public id: string, - private repoInfo_: RepoInfo, - private onMessage_: (a: Object) => void, - private onReady_: (a: number, b: string) => void, - private onDisconnect_: () => void, - private onKill_: (a: string) => void, - public lastSessionId?: string) { + constructor( + public id: string, + private repoInfo_: RepoInfo, + private onMessage_: (a: Object) => void, + private onReady_: (a: number, b: string) => void, + private onDisconnect_: () => void, + private onKill_: (a: string) => void, + public lastSessionId?: string + ) { this.log_ = logWrapper('c:' + this.id + ':'); this.transportManager_ = new TransportManager(repoInfo_); this.log_('Connection created'); @@ -110,7 +111,12 @@ export class Connection { */ private start_() { const conn = this.transportManager_.initialTransport(); - this.conn_ = new conn(this.nextTransportId_(), this.repoInfo_, undefined, this.lastSessionId); + this.conn_ = new conn( + this.nextTransportId_(), + this.repoInfo_, + undefined, + this.lastSessionId + ); // For certain transports (WebSockets), we need to send and receive several messages back and forth before we // can consider the transport healthy. @@ -134,20 +140,31 @@ export class Connection { this.conn_ && this.conn_.open(onMessageReceived, onConnectionLost); }, Math.floor(0)); - const healthyTimeout_ms = conn['healthyTimeout'] || 0; if (healthyTimeout_ms > 0) { this.healthyTimeout_ = setTimeoutNonBlocking(() => { this.healthyTimeout_ = null; if (!this.isHealthy_) { - if (this.conn_ && this.conn_.bytesReceived > BYTES_RECEIVED_HEALTHY_OVERRIDE) { - this.log_('Connection exceeded healthy timeout but has received ' + this.conn_.bytesReceived + - ' bytes. Marking connection healthy.'); + if ( + this.conn_ && + this.conn_.bytesReceived > BYTES_RECEIVED_HEALTHY_OVERRIDE + ) { + this.log_( + 'Connection exceeded healthy timeout but has received ' + + this.conn_.bytesReceived + + ' bytes. Marking connection healthy.' + ); this.isHealthy_ = true; this.conn_.markConnectionHealthy(); - } else if (this.conn_ && this.conn_.bytesSent > BYTES_SENT_HEALTHY_OVERRIDE) { - this.log_('Connection exceeded healthy timeout but has sent ' + this.conn_.bytesSent + - ' bytes. Leaving connection alive.'); + } else if ( + this.conn_ && + this.conn_.bytesSent > BYTES_SENT_HEALTHY_OVERRIDE + ) { + this.log_( + 'Connection exceeded healthy timeout but has sent ' + + this.conn_.bytesSent + + ' bytes. Leaving connection alive.' + ); // NOTE: We don't want to mark it healthy, since we have no guarantee that the bytes have made it to // the server. } else { @@ -165,7 +182,7 @@ export class Connection { */ private nextTransportId_(): string { return 'c:' + this.id + ':' + this.connectionCount++; - }; + } private disconnReceiver_(conn) { return everConnected => { @@ -177,7 +194,7 @@ export class Connection { } else { this.log_('closing an old connection'); } - } + }; } private connReceiver_(conn: Transport) { @@ -200,13 +217,15 @@ export class Connection { */ sendRequest(dataMsg: object) { // wrap in a data message envelope and send it on - const msg = {'t': 'd', 'd': dataMsg}; + const msg = { t: 'd', d: dataMsg }; this.sendData_(msg); } tryCleanupConnection() { if (this.tx_ === this.secondaryConn_ && this.rx_ === this.secondaryConn_) { - this.log_('cleaning up and promoting a connection: ' + this.secondaryConn_.connId); + this.log_( + 'cleaning up and promoting a connection: ' + this.secondaryConn_.connId + ); this.conn_ = this.secondaryConn_; this.secondaryConn_ = null; // the server will shutdown the old connection @@ -223,7 +242,10 @@ export class Connection { this.log_('Got a reset on secondary, closing it'); this.secondaryConn_.close(); // If we were already using this connection for something, than we need to fully close - if (this.tx_ === this.secondaryConn_ || this.rx_ === this.secondaryConn_) { + if ( + this.tx_ === this.secondaryConn_ || + this.rx_ === this.secondaryConn_ + ) { this.close(); } } else if (cmd === CONTROL_PONG) { @@ -256,7 +278,7 @@ export class Connection { } else { // Send a ping to make sure the connection is healthy. this.log_('sending ping on secondary.'); - this.secondaryConn_.send({'t': 'c', 'd': {'t': PING, 'd': {}}}); + this.secondaryConn_.send({ t: 'c', d: { t: PING, d: {} } }); } } @@ -265,12 +287,12 @@ export class Connection { this.secondaryConn_.start(); // send ack this.log_('sending client ack on secondary'); - this.secondaryConn_.send({'t': 'c', 'd': {'t': SWITCH_ACK, 'd': {}}}); + this.secondaryConn_.send({ t: 'c', d: { t: SWITCH_ACK, d: {} } }); // send end packet on primary transport, switch to sending on this one // can receive on this one, buffer responses until end received on primary transport this.log_('Ending transmission on primary'); - this.conn_.send({'t': 'c', 'd': {'t': END_TRANSMISSION, 'd': {}}}); + this.conn_.send({ t: 'c', d: { t: END_TRANSMISSION, d: {} } }); this.tx_ = this.secondaryConn_; this.tryCleanupConnection(); @@ -303,7 +325,7 @@ export class Connection { this.conn_.markConnectionHealthy(); } } - }; + } private onControl_(controlData: { [k: string]: any }) { const cmd: string = requireKey(MESSAGE_TYPE, controlData); @@ -343,7 +365,12 @@ export class Connection { * @param {Object} handshake The handshake data returned from the server * @private */ - private onHandshake_(handshake: { ts: number, v: string, h: string, s: string }) { + private onHandshake_(handshake: { + ts: number; + v: string; + h: string; + s: string; + }) { const timestamp = handshake.ts; const version = handshake.v; const host = handshake.h; @@ -369,11 +396,15 @@ export class Connection { } private startUpgrade_(conn: TransportConstructor) { - this.secondaryConn_ = new conn(this.nextTransportId_(), - this.repoInfo_, this.sessionId); + this.secondaryConn_ = new conn( + this.nextTransportId_(), + this.repoInfo_, + this.sessionId + ); // For certain transports (WebSockets), we need to send and receive several messages back and forth before we // can consider the transport healthy. - this.secondaryResponsesRequired_ = conn['responsesRequiredToBeHealthy'] || 0; + this.secondaryResponsesRequired_ = + conn['responsesRequiredToBeHealthy'] || 0; const onMessage = this.connReceiver_(this.secondaryConn_); const onDisconnect = this.disconnReceiver_(this.secondaryConn_); @@ -428,7 +459,7 @@ export class Connection { // If the connection isn't considered healthy yet, we'll send a noop ping packet request. if (!this.isHealthy_ && this.state_ === RealtimeState.CONNECTED) { this.log_('sending ping on primary.'); - this.sendData_({'t': 'c', 'd': {'t': PING, 'd': {}}}); + this.sendData_({ t: 'c', d: { t: PING, d: {} } }); } } @@ -487,7 +518,6 @@ export class Connection { this.close(); } - private sendData_(data: object) { if (this.state_ !== RealtimeState.CONNECTED) { throw 'Connection is not connected'; @@ -535,5 +565,3 @@ export class Connection { } } } - - diff --git a/src/database/realtime/Transport.ts b/src/database/realtime/Transport.ts index a69159b1880..aa532eda460 100644 --- a/src/database/realtime/Transport.ts +++ b/src/database/realtime/Transport.ts @@ -17,7 +17,12 @@ import { RepoInfo } from '../core/RepoInfo'; export interface TransportConstructor { - new(connId: string, repoInfo: RepoInfo, transportSessionId?: string, lastSessionId?: string): Transport; + new ( + connId: string, + repoInfo: RepoInfo, + transportSessionId?: string, + lastSessionId?: string + ): Transport; isAvailable: () => boolean; responsesRequiredToBeHealthy?: number; healthyTimeout?: number; @@ -50,13 +55,21 @@ export abstract class Transport { * @param {string=} lastSessionId Optional lastSessionId if there was a previous connection * @interface */ - constructor(connId: string, repoInfo: RepoInfo, transportSessionId?: string, lastSessionId?: string) {} + constructor( + connId: string, + repoInfo: RepoInfo, + transportSessionId?: string, + lastSessionId?: string + ) {} /** * @param {function(Object)} onMessage Callback when messages arrive * @param {function()} onDisconnect Callback with connection lost. */ - abstract open(onMessage: (a: Object) => void, onDisconnect: (a?: boolean) => void): void; + abstract open( + onMessage: (a: Object) => void, + onDisconnect: (a?: boolean) => void + ): void; abstract start(): void; @@ -73,5 +86,10 @@ export abstract class Transport { } export interface TransportConstructor { - new(connId: string, RepoInfo, transportSessionId?: string, lastSessionId?: string); + new ( + connId: string, + RepoInfo, + transportSessionId?: string, + lastSessionId?: string + ); } diff --git a/src/database/realtime/TransportManager.ts b/src/database/realtime/TransportManager.ts index bf80ba1af5e..9ebdaeba35d 100644 --- a/src/database/realtime/TransportManager.ts +++ b/src/database/realtime/TransportManager.ts @@ -14,9 +14,9 @@ * limitations under the License. */ -import { BrowserPollConnection } from "./BrowserPollConnection"; -import { WebSocketConnection } from "./WebSocketConnection"; -import { warn, each } from "../core/util/util"; +import { BrowserPollConnection } from './BrowserPollConnection'; +import { WebSocketConnection } from './WebSocketConnection'; +import { warn, each } from '../core/util/util'; import { TransportConstructor } from './Transport'; import { RepoInfo } from '../core/RepoInfo'; @@ -36,10 +36,7 @@ export class TransportManager { * @type {!Array.} */ static get ALL_TRANSPORTS() { - return [ - BrowserPollConnection, - WebSocketConnection - ]; + return [BrowserPollConnection, WebSocketConnection]; } /** @@ -54,12 +51,16 @@ export class TransportManager { * @private */ private initTransports_(repoInfo: RepoInfo) { - const isWebSocketsAvailable: boolean = WebSocketConnection && WebSocketConnection['isAvailable'](); - let isSkipPollConnection = isWebSocketsAvailable && !WebSocketConnection.previouslyFailed(); + const isWebSocketsAvailable: boolean = + WebSocketConnection && WebSocketConnection['isAvailable'](); + let isSkipPollConnection = + isWebSocketsAvailable && !WebSocketConnection.previouslyFailed(); if (repoInfo.webSocketOnly) { if (!isWebSocketsAvailable) - warn('wss:// URL used, but browser isn\'t known to support websockets. Trying anyway.'); + warn( + "wss:// URL used, but browser isn't known to support websockets. Trying anyway." + ); isSkipPollConnection = true; } @@ -67,12 +68,15 @@ export class TransportManager { if (isSkipPollConnection) { this.transports_ = [WebSocketConnection]; } else { - const transports = this.transports_ = [] as TransportConstructor[]; - each(TransportManager.ALL_TRANSPORTS, (i: number, transport: TransportConstructor) => { - if (transport && transport['isAvailable']()) { - transports.push(transport); + const transports = (this.transports_ = [] as TransportConstructor[]); + each( + TransportManager.ALL_TRANSPORTS, + (i: number, transport: TransportConstructor) => { + if (transport && transport['isAvailable']()) { + transports.push(transport); + } } - }); + ); } } diff --git a/src/database/realtime/WebSocketConnection.ts b/src/database/realtime/WebSocketConnection.ts index 8f15c142931..c86482600fd 100644 --- a/src/database/realtime/WebSocketConnection.ts +++ b/src/database/realtime/WebSocketConnection.ts @@ -80,11 +80,19 @@ export class WebSocketConnection implements Transport { * session * @param {string=} lastSessionId Optional lastSessionId if there was a previous connection */ - constructor(public connId: string, repoInfo: RepoInfo, - transportSessionId?: string, lastSessionId?: string) { + constructor( + public connId: string, + repoInfo: RepoInfo, + transportSessionId?: string, + lastSessionId?: string + ) { this.log_ = logWrapper(this.connId); this.stats_ = StatsManager.getCollection(repoInfo); - this.connURL = WebSocketConnection.connectionURL_(repoInfo, transportSessionId, lastSessionId); + this.connURL = WebSocketConnection.connectionURL_( + repoInfo, + transportSessionId, + lastSessionId + ); } /** @@ -95,14 +103,20 @@ export class WebSocketConnection implements Transport { * @return {string} connection url * @private */ - private static connectionURL_(repoInfo: RepoInfo, transportSessionId?: string, lastSessionId?: string): string { + private static connectionURL_( + repoInfo: RepoInfo, + transportSessionId?: string, + lastSessionId?: string + ): string { const urlParams: { [k: string]: string } = {}; urlParams[VERSION_PARAM] = PROTOCOL_VERSION; - if (!isNodeSdk() && + if ( + !isNodeSdk() && typeof location !== 'undefined' && location.href && - location.href.indexOf(FORGE_DOMAIN) !== -1) { + location.href.indexOf(FORGE_DOMAIN) !== -1 + ) { urlParams[REFERER_PARAM] = FORGE_REF; } if (transportSessionId) { @@ -134,18 +148,20 @@ export class WebSocketConnection implements Transport { const device = ENV_CONSTANTS.NODE_ADMIN ? 'AdminNode' : 'Node'; // UA Format: Firebase//// const options: { [k: string]: object } = { - 'headers': { + headers: { 'User-Agent': `Firebase/${PROTOCOL_VERSION}/${firebase.SDK_VERSION}/${process.platform}/${device}` - }}; + } + }; // Plumb appropriate http_proxy environment variable into faye-websocket if it exists. const env = process['env']; - const proxy = (this.connURL.indexOf('wss://') == 0) - ? (env['HTTPS_PROXY'] || env['https_proxy']) - : (env['HTTP_PROXY'] || env['http_proxy']); + const proxy = + this.connURL.indexOf('wss://') == 0 + ? env['HTTPS_PROXY'] || env['https_proxy'] + : env['HTTP_PROXY'] || env['http_proxy']; if (proxy) { - options['proxy'] = {origin: proxy}; + options['proxy'] = { origin: proxy }; } this.mySock = new WebSocketImpl(this.connURL, [], options); @@ -190,7 +206,7 @@ export class WebSocketConnection implements Transport { /** * No-op for websockets, we don't need to do anything once the connection is confirmed as open */ - start() {}; + start() {} static forceDisallow_: Boolean; @@ -210,7 +226,11 @@ export class WebSocketConnection implements Transport { } } - return !isOldAndroid && WebSocketImpl !== null && !WebSocketConnection.forceDisallow_; + return ( + !isOldAndroid && + WebSocketImpl !== null && + !WebSocketConnection.forceDisallow_ + ); } /** @@ -232,8 +252,10 @@ export class WebSocketConnection implements Transport { static previouslyFailed(): boolean { // If our persistent storage is actually only in-memory storage, // we default to assuming that it previously failed to be safe. - return PersistentStorage.isInMemoryStorage || - PersistentStorage.get('previous_websocket_failure') === true; + return ( + PersistentStorage.isInMemoryStorage || + PersistentStorage.get('previous_websocket_failure') === true + ); } markConnectionHealthy() { @@ -287,8 +309,7 @@ export class WebSocketConnection implements Transport { * @param mess The frame data */ handleIncomingFrame(mess: { [k: string]: any }) { - if (this.mySock === null) - return; // Chrome apparently delivers incoming packets even after we .close() the connection sometimes. + if (this.mySock === null) return; // Chrome apparently delivers incoming packets even after we .close() the connection sometimes. const data = mess['data'] as string; this.bytesReceived += data.length; this.stats_.incrementCounter('bytes_received', data.length); @@ -312,7 +333,6 @@ export class WebSocketConnection implements Transport { * @param {Object} data The JSON object to transmit */ send(data: Object) { - this.resetKeepAlive(); const dataStr = stringify(data); @@ -400,10 +420,12 @@ export class WebSocketConnection implements Transport { try { this.mySock.send(str); } catch (e) { - this.log_('Exception thrown from WebSocket.send():', e.message || e.data, 'Closing connection.'); + this.log_( + 'Exception thrown from WebSocket.send():', + e.message || e.data, + 'Closing connection.' + ); setTimeout(this.onClosed_.bind(this), 0); } } } - - diff --git a/src/database/realtime/polling/PacketReceiver.ts b/src/database/realtime/polling/PacketReceiver.ts index 4b66858face..af04f59ad54 100644 --- a/src/database/realtime/polling/PacketReceiver.ts +++ b/src/database/realtime/polling/PacketReceiver.ts @@ -30,8 +30,7 @@ export class PacketReceiver { /** * @param onMessage_ */ - constructor(private onMessage_: (a: Object) => void) { - } + constructor(private onMessage_: (a: Object) => void) {} closeAfter(responseNum: number, callback: () => void) { this.closeAfterResponse = responseNum; @@ -72,4 +71,3 @@ export class PacketReceiver { } } } - diff --git a/src/firebase-browser.ts b/src/firebase-browser.ts index ae744edee2b..b78f178955f 100644 --- a/src/firebase-browser.ts +++ b/src/firebase-browser.ts @@ -14,7 +14,7 @@ * limitations under the License. */ -import firebase from "./app"; +import firebase from './app'; import './auth'; import './database'; import './storage'; diff --git a/src/firebase-node.ts b/src/firebase-node.ts index c7150a533a0..5a6d57cb2a2 100644 --- a/src/firebase-node.ts +++ b/src/firebase-node.ts @@ -14,21 +14,20 @@ * limitations under the License. */ -import firebase from "./app"; +import firebase from './app'; import './auth'; import './database'; import './utils/nodePatches'; - var Storage = require('dom-storage'); -var XMLHttpRequest = require("xmlhttprequest").XMLHttpRequest; +var XMLHttpRequest = require('xmlhttprequest').XMLHttpRequest; firebase.INTERNAL.extendNamespace({ - 'INTERNAL': { - 'node': { - 'localStorage': new Storage(null, { strict: true }), - 'sessionStorage': new Storage(null, { strict: true }), - 'XMLHttpRequest': XMLHttpRequest + INTERNAL: { + node: { + localStorage: new Storage(null, { strict: true }), + sessionStorage: new Storage(null, { strict: true }), + XMLHttpRequest: XMLHttpRequest } } }); diff --git a/src/firebase-react-native.ts b/src/firebase-react-native.ts index 0e3092ee86a..528366af512 100644 --- a/src/firebase-react-native.ts +++ b/src/firebase-react-native.ts @@ -14,16 +14,16 @@ * limitations under the License. */ -import firebase from "./app"; +import firebase from './app'; import './auth'; import './database'; import './storage'; var AsyncStorage = require('react-native').AsyncStorage; firebase.INTERNAL.extendNamespace({ - 'INTERNAL': { - 'reactNative': { - 'AsyncStorage': AsyncStorage + INTERNAL: { + reactNative: { + AsyncStorage: AsyncStorage } } }); diff --git a/src/messaging.ts b/src/messaging.ts index 72f02a45d2f..4fda2b98f4d 100644 --- a/src/messaging.ts +++ b/src/messaging.ts @@ -32,10 +32,14 @@ export function registerMessaging(instance) { const namespaceExports = { // no-inline - 'Messaging': WindowController + Messaging: WindowController }; - instance.INTERNAL.registerService(messagingName, factoryMethod, namespaceExports); + instance.INTERNAL.registerService( + messagingName, + factoryMethod, + namespaceExports + ); } -registerMessaging(firebase); \ No newline at end of file +registerMessaging(firebase); diff --git a/src/messaging/controllers/controller-interface.ts b/src/messaging/controllers/controller-interface.ts index c221d604ec8..fdbd4847209 100644 --- a/src/messaging/controllers/controller-interface.ts +++ b/src/messaging/controllers/controller-interface.ts @@ -15,7 +15,7 @@ */ 'use strict'; -import {ErrorFactory} from '../../app/errors'; +import { ErrorFactory } from '../../app/errors'; import Errors from '../models/errors'; import TokenManager from '../models/token-manager'; import NOTIFICATION_PERMISSION from '../models/notification-permission'; @@ -23,7 +23,6 @@ import NOTIFICATION_PERMISSION from '../models/notification-permission'; const SENDER_ID_OPTION_NAME = 'messagingSenderId'; export default class ControllerInterface { - public app; public INTERNAL; protected errorFactory_; @@ -37,8 +36,10 @@ export default class ControllerInterface { constructor(app) { this.errorFactory_ = new ErrorFactory('messaging', 'Messaging', Errors.map); - if (!app.options[SENDER_ID_OPTION_NAME] || - typeof app.options[SENDER_ID_OPTION_NAME] !== 'string') { + if ( + !app.options[SENDER_ID_OPTION_NAME] || + typeof app.options[SENDER_ID_OPTION_NAME] !== 'string' + ) { throw this.errorFactory_.create(Errors.codes.BAD_SENDER_ID); } @@ -62,7 +63,7 @@ export default class ControllerInterface { if (currentPermission !== NOTIFICATION_PERMISSION.granted) { if (currentPermission === NOTIFICATION_PERMISSION.denied) { return Promise.reject( - this.errorFactory_.create(Errors.codes.NOTIFICATIONS_BLOCKED) + this.errorFactory_.create(Errors.codes.NOTIFICATIONS_BLOCKED) ); } @@ -70,19 +71,20 @@ export default class ControllerInterface { return Promise.resolve(null); } - return this.getSWRegistration_() - .then(registration => { - return this.tokenManager_.getSavedToken( - this.messagingSenderId_, registration) - .then(token => { - if (token) { - return token; - } - - return this.tokenManager_.createToken(this.messagingSenderId_, - registration); - }); - }); + return this.getSWRegistration_().then(registration => { + return this.tokenManager_ + .getSavedToken(this.messagingSenderId_, registration) + .then(token => { + if (token) { + return token; + } + + return this.tokenManager_.createToken( + this.messagingSenderId_, + registration + ); + }); + }); } /** @@ -93,10 +95,9 @@ export default class ControllerInterface { * @return {Promise} */ deleteToken(token) { - return this.tokenManager_.deleteToken(token) - .then(() => { - return this.getSWRegistration_() - .then((registration) => { + return this.tokenManager_.deleteToken(token).then(() => { + return this.getSWRegistration_() + .then(registration => { if (registration) { return registration.pushManager.getSubscription(); } @@ -106,10 +107,10 @@ export default class ControllerInterface { return subscription.unsubscribe(); } }); - }); + }); } - getSWRegistration_(): Promise { + getSWRegistration_(): Promise { throw this.errorFactory_.create(Errors.codes.SHOULD_BE_INHERITED); } diff --git a/src/messaging/controllers/sw-controller.ts b/src/messaging/controllers/sw-controller.ts index d9330190955..66b08911d2e 100644 --- a/src/messaging/controllers/sw-controller.ts +++ b/src/messaging/controllers/sw-controller.ts @@ -23,7 +23,6 @@ import FCMDetails from '../models/fcm-details'; const FCM_MSG = 'FCM_MSG'; export default class SWController extends ControllerInterface { - private bgMessageHandler_: (input: Object) => Promise; constructor(app) { @@ -31,9 +30,15 @@ export default class SWController extends ControllerInterface { self.addEventListener('push', e => this.onPush_(e), false); self.addEventListener( - 'pushsubscriptionchange', e => this.onSubChange_(e), false); + 'pushsubscriptionchange', + e => this.onSubChange_(e), + false + ); self.addEventListener( - 'notificationclick', e => this.onNotificationClick_(e), false); + 'notificationclick', + e => this.onNotificationClick_(e), + false + ); /** * @private @@ -64,26 +69,29 @@ export default class SWController extends ControllerInterface { return; } - const handleMsgPromise = this.hasVisibleClients_() - .then(hasVisibleClients => { - if (hasVisibleClients) { - // Do not need to show a notification. - if (msgPayload.notification || this.bgMessageHandler_) { - // Send to page - return this.sendMessageToWindowClients_(msgPayload); + const handleMsgPromise = this.hasVisibleClients_().then( + hasVisibleClients => { + if (hasVisibleClients) { + // Do not need to show a notification. + if (msgPayload.notification || this.bgMessageHandler_) { + // Send to page + return this.sendMessageToWindowClients_(msgPayload); + } + return; } - return; - } - const notificationDetails = this.getNotificationData_(msgPayload); - if (notificationDetails) { - const notificationTitle = notificationDetails.title || ''; - return (self as any).registration - .showNotification(notificationTitle, notificationDetails); - } else if (this.bgMessageHandler_) { - return this.bgMessageHandler_(msgPayload); + const notificationDetails = this.getNotificationData_(msgPayload); + if (notificationDetails) { + const notificationTitle = notificationDetails.title || ''; + return (self as any).registration.showNotification( + notificationTitle, + notificationDetails + ); + } else if (this.bgMessageHandler_) { + return this.bgMessageHandler_(msgPayload); + } } - }); + ); event.waitUntil(handleMsgPromise); } @@ -92,17 +100,18 @@ export default class SWController extends ControllerInterface { * @private */ onSubChange_(event) { - const promiseChain = this.getToken() - .then(token => { - if (!token) { - // We can't resubscribe if we don't have an FCM token for this scope. - throw this.errorFactory_.create( - Errors.codes.NO_FCM_TOKEN_FOR_RESUBSCRIBE); - } + const promiseChain = this.getToken().then(token => { + if (!token) { + // We can't resubscribe if we don't have an FCM token for this scope. + throw this.errorFactory_.create( + Errors.codes.NO_FCM_TOKEN_FOR_RESUBSCRIBE + ); + } - let tokenDetails = null; - const tokenManager = this.getTokenManager(); - return tokenManager.getTokenDetailsFromToken(token) + let tokenDetails = null; + const tokenManager = this.getTokenManager(); + return tokenManager + .getTokenDetailsFromToken(token) .then(details => { tokenDetails = details; if (!tokenDetails) { @@ -110,7 +119,9 @@ export default class SWController extends ControllerInterface { } // Attempt to get a new subscription - return (self as any).registration.pushManager.subscribe(FCMDetails.SUBSCRIPTION_OPTIONS); + return (self as any).registration.pushManager.subscribe( + FCMDetails.SUBSCRIPTION_OPTIONS + ); }) .then(newSubscription => { // Send new subscription to FCM. @@ -123,16 +134,16 @@ export default class SWController extends ControllerInterface { .catch(err => { // The best thing we can do is log this to the terminal so // developers might notice the error. - return tokenManager.deleteToken(tokenDetails.fcmToken) - .then(() => { + return tokenManager.deleteToken(tokenDetails.fcmToken).then(() => { throw this.errorFactory_.create( - Errors.codes.UNABLE_TO_RESUBSCRIBE, { - 'message': err + Errors.codes.UNABLE_TO_RESUBSCRIBE, + { + message: err } ); }); }); - }); + }); event.waitUntil(promiseChain); } @@ -141,8 +152,13 @@ export default class SWController extends ControllerInterface { * @private */ onNotificationClick_(event) { - if (!(event.notification && event.notification.data && - event.notification.data[FCM_MSG])) { + if ( + !( + event.notification && + event.notification.data && + event.notification.data[FCM_MSG] + ) + ) { // Not an FCM notification, do nothing. return; } @@ -160,30 +176,31 @@ export default class SWController extends ControllerInterface { } const promiseChain = this.getWindowClient_(clickAction) - .then(windowClient => { - if (!windowClient) { - // Unable to find window client so need to open one. - return (self as any).clients.openWindow(clickAction); - } - return windowClient; - }) - .then(windowClient => { - if (!windowClient) { - // Window Client will not be returned if it's for a third party origin. - return; - } + .then(windowClient => { + if (!windowClient) { + // Unable to find window client so need to open one. + return (self as any).clients.openWindow(clickAction); + } + return windowClient; + }) + .then(windowClient => { + if (!windowClient) { + // Window Client will not be returned if it's for a third party origin. + return; + } - // Delete notification data from payload before sending to the page. - const notificationData = msgPayload['notification']; - delete msgPayload['notification']; + // Delete notification data from payload before sending to the page. + const notificationData = msgPayload['notification']; + delete msgPayload['notification']; - const internalMsg = WorkerPageMessage.createNewMsg( + const internalMsg = WorkerPageMessage.createNewMsg( WorkerPageMessage.TYPES_OF_MSG.NOTIFICATION_CLICKED, - msgPayload); - // Attempt to send a message to the client to handle the data - // Is affected by: https://github.com/slightlyoff/ServiceWorker/issues/728 - return this.attemptToMessageClient_(windowClient, internalMsg); - }); + msgPayload + ); + // Attempt to send a message to the client to handle the data + // Is affected by: https://github.com/slightlyoff/ServiceWorker/issues/728 + return this.attemptToMessageClient_(windowClient, internalMsg); + }); event.waitUntil(promiseChain); } @@ -232,7 +249,8 @@ export default class SWController extends ControllerInterface { setBackgroundMessageHandler(callback) { if (callback && typeof callback !== 'function') { throw this.errorFactory_.create( - Errors.codes.BG_HANDLER_FUNCTION_EXPECTED); + Errors.codes.BG_HANDLER_FUNCTION_EXPECTED + ); } this.bgMessageHandler_ = callback; @@ -249,25 +267,26 @@ export default class SWController extends ControllerInterface { // This at least handles whether to include trailing slashes or not const parsedURL = new URL(url).href; - return (self as any).clients.matchAll({ - type: 'window', - includeUncontrolled: true - }) - .then(clientList => { - let suitableClient = null; - for (let i = 0; i < clientList.length; i++) { - const parsedClientUrl = new URL(clientList[i].url).href; - if (parsedClientUrl === parsedURL) { - suitableClient = clientList[i]; - break; + return (self as any).clients + .matchAll({ + type: 'window', + includeUncontrolled: true + }) + .then(clientList => { + let suitableClient = null; + for (let i = 0; i < clientList.length; i++) { + const parsedClientUrl = new URL(clientList[i].url).href; + if (parsedClientUrl === parsedURL) { + suitableClient = clientList[i]; + break; + } } - } - if (suitableClient) { - suitableClient.focus(); - return suitableClient; - } - }); + if (suitableClient) { + suitableClient.focus(); + return suitableClient; + } + }); } /** @@ -282,8 +301,9 @@ export default class SWController extends ControllerInterface { attemptToMessageClient_(client, message) { return new Promise((resolve, reject) => { if (!client) { - return reject(this.errorFactory_.create( - Errors.codes.NO_WINDOW_CLIENT_TO_MSG)); + return reject( + this.errorFactory_.create(Errors.codes.NO_WINDOW_CLIENT_TO_MSG) + ); } client.postMessage(message); @@ -297,13 +317,14 @@ export default class SWController extends ControllerInterface { * this method will resolve to true, otherwise false. */ hasVisibleClients_() { - return (self as any).clients.matchAll({ - type: 'window', - includeUncontrolled: true - }) - .then(clientList => { - return clientList.some(client => client.visibilityState === 'visible'); - }); + return (self as any).clients + .matchAll({ + type: 'window', + includeUncontrolled: true + }) + .then(clientList => { + return clientList.some(client => client.visibilityState === 'visible'); + }); } /** @@ -314,21 +335,23 @@ export default class SWController extends ControllerInterface { * has been sent to all WindowClients. */ sendMessageToWindowClients_(msgPayload) { - return (self as any).clients.matchAll({ - type: 'window', - includeUncontrolled: true - }) - .then(clientList => { - const internalMsg = WorkerPageMessage.createNewMsg( - WorkerPageMessage.TYPES_OF_MSG.PUSH_MSG_RECEIVED, - msgPayload); - - return Promise.all( - clientList.map(client => { - return this.attemptToMessageClient_(client, internalMsg); - }) - ); - }); + return (self as any).clients + .matchAll({ + type: 'window', + includeUncontrolled: true + }) + .then(clientList => { + const internalMsg = WorkerPageMessage.createNewMsg( + WorkerPageMessage.TYPES_OF_MSG.PUSH_MSG_RECEIVED, + msgPayload + ); + + return Promise.all( + clientList.map(client => { + return this.attemptToMessageClient_(client, internalMsg); + }) + ); + }); } /** diff --git a/src/messaging/controllers/window-controller.ts b/src/messaging/controllers/window-controller.ts index fa2721ebb8c..4e0075ff2a0 100644 --- a/src/messaging/controllers/window-controller.ts +++ b/src/messaging/controllers/window-controller.ts @@ -20,12 +20,11 @@ import Errors from '../models/errors'; import WorkerPageMessage from '../models/worker-page-message'; import DefaultSW from '../models/default-sw'; import NOTIFICATION_PERMISSION from '../models/notification-permission'; -import {createSubscribe} from '../../app/subscribe'; +import { createSubscribe } from '../../app/subscribe'; declare const firebase: any; export default class WindowController extends ControllerInterface { - private registrationToUse_; private manifestCheckPromise_; private messageObserver_; @@ -94,8 +93,7 @@ export default class WindowController extends ControllerInterface { ); } - return this.manifestCheck_() - .then(() => { + return this.manifestCheck_().then(() => { return super.getToken(); }); } @@ -112,33 +110,36 @@ export default class WindowController extends ControllerInterface { return this.manifestCheckPromise_; } - const manifestTag = ( document.querySelector('link[rel="manifest"]')); + const manifestTag = document.querySelector( + 'link[rel="manifest"]' + ); if (!manifestTag) { this.manifestCheckPromise_ = Promise.resolve(); } else { this.manifestCheckPromise_ = fetch(manifestTag.href) - .then(response => { - return response.json(); - }) - .catch(() => { - // If the download or parsing fails allow check. - // We only want to error if we KNOW that the gcm_sender_id is incorrect. - return Promise.resolve(); - }) - .then(manifestContent => { - if (!manifestContent) { - return; - } - - if (!manifestContent['gcm_sender_id']) { - return; - } - - if (manifestContent['gcm_sender_id'] !== '103953800507') { - throw this.errorFactory_.create( - Errors.codes.INCORRECT_GCM_SENDER_ID); - } - }); + .then(response => { + return response.json(); + }) + .catch(() => { + // If the download or parsing fails allow check. + // We only want to error if we KNOW that the gcm_sender_id is incorrect. + return Promise.resolve(); + }) + .then(manifestContent => { + if (!manifestContent) { + return; + } + + if (!manifestContent['gcm_sender_id']) { + return; + } + + if (manifestContent['gcm_sender_id'] !== '103953800507') { + throw this.errorFactory_.create( + Errors.codes.INCORRECT_GCM_SENDER_ID + ); + } + }); } return this.manifestCheckPromise_; @@ -160,11 +161,13 @@ export default class WindowController extends ControllerInterface { if (result === NOTIFICATION_PERMISSION.granted) { return resolve(); } else if (result === NOTIFICATION_PERMISSION.denied) { - return reject(this.errorFactory_.create( - Errors.codes.PERMISSION_BLOCKED)); + return reject( + this.errorFactory_.create(Errors.codes.PERMISSION_BLOCKED) + ); } else { - return reject(this.errorFactory_.create( - Errors.codes.PERMISSION_DEFAULT)); + return reject( + this.errorFactory_.create(Errors.codes.PERMISSION_DEFAULT) + ); } }; @@ -244,8 +247,8 @@ export default class WindowController extends ControllerInterface { * registration to become active */ waitForRegistrationToActivate_(registration) { - const serviceWorker = registration.installing || registration.waiting || - registration.active; + const serviceWorker = + registration.installing || registration.waiting || registration.active; return new Promise((resolve, reject) => { if (!serviceWorker) { @@ -295,29 +298,30 @@ export default class WindowController extends ControllerInterface { // use a new service worker as registrationToUse_ is no longer undefined this.registrationToUse_ = null; - return navigator.serviceWorker.register(DefaultSW.path, { - scope: DefaultSW.scope - }) - .catch(err => { - throw this.errorFactory_.create( - Errors.codes.FAILED_DEFAULT_REGISTRATION, { - 'browserErrorMessage': err.message - } - ); - }) - .then(registration => { - return this.waitForRegistrationToActivate_(registration) - .then(() => { - this.registrationToUse_ = registration; - - // We update after activation due to an issue with Firefox v49 where - // a race condition occassionally causes the service work to not - // install - registration.update(); - - return registration; + return navigator.serviceWorker + .register(DefaultSW.path, { + scope: DefaultSW.scope + }) + .catch(err => { + throw this.errorFactory_.create( + Errors.codes.FAILED_DEFAULT_REGISTRATION, + { + browserErrorMessage: err.message + } + ); + }) + .then(registration => { + return this.waitForRegistrationToActivate_(registration).then(() => { + this.registrationToUse_ = registration; + + // We update after activation due to an issue with Firefox v49 where + // a race condition occassionally causes the service work to not + // install + registration.update(); + + return registration; + }); }); - }); } /** @@ -332,24 +336,29 @@ export default class WindowController extends ControllerInterface { return; } - navigator.serviceWorker.addEventListener('message', event => { - if (!event.data || !event.data[WorkerPageMessage.PARAMS.TYPE_OF_MSG]) { - // Not a message from FCM - return; - } + navigator.serviceWorker.addEventListener( + 'message', + event => { + if (!event.data || !event.data[WorkerPageMessage.PARAMS.TYPE_OF_MSG]) { + // Not a message from FCM + return; + } - const workerPageMessage = event.data; - switch (workerPageMessage[WorkerPageMessage.PARAMS.TYPE_OF_MSG]) { - case WorkerPageMessage.TYPES_OF_MSG.PUSH_MSG_RECEIVED: - case WorkerPageMessage.TYPES_OF_MSG.NOTIFICATION_CLICKED: - const pushMessage = workerPageMessage[WorkerPageMessage.PARAMS.DATA]; - this.messageObserver_.next(pushMessage); - break; - default: - // Noop. - break; - } - }, false); + const workerPageMessage = event.data; + switch (workerPageMessage[WorkerPageMessage.PARAMS.TYPE_OF_MSG]) { + case WorkerPageMessage.TYPES_OF_MSG.PUSH_MSG_RECEIVED: + case WorkerPageMessage.TYPES_OF_MSG.NOTIFICATION_CLICKED: + const pushMessage = + workerPageMessage[WorkerPageMessage.PARAMS.DATA]; + this.messageObserver_.next(pushMessage); + break; + default: + // Noop. + break; + } + }, + false + ); } /** @@ -358,12 +367,13 @@ export default class WindowController extends ControllerInterface { * @return {boolean} Returns true if the desired APIs are available. */ isSupported_() { - return 'serviceWorker' in navigator && - 'PushManager' in window && - 'Notification' in window && - 'fetch' in window && - ServiceWorkerRegistration.prototype - .hasOwnProperty('showNotification') && - PushSubscription.prototype.hasOwnProperty('getKey'); + return ( + 'serviceWorker' in navigator && + 'PushManager' in window && + 'Notification' in window && + 'fetch' in window && + ServiceWorkerRegistration.prototype.hasOwnProperty('showNotification') && + PushSubscription.prototype.hasOwnProperty('getKey') + ); } } diff --git a/src/messaging/helpers/array-buffer-to-base64.ts b/src/messaging/helpers/array-buffer-to-base64.ts index 2f518b9165b..72e18269c94 100644 --- a/src/messaging/helpers/array-buffer-to-base64.ts +++ b/src/messaging/helpers/array-buffer-to-base64.ts @@ -18,9 +18,7 @@ function toBase64(arrayBuffer) { return window.btoa(String.fromCharCode.apply(null, uint8Version)); } -export default (arrayBuffer) => { +export default arrayBuffer => { const base64String = toBase64(arrayBuffer); - return base64String.replace(/=/g, '') - .replace(/\+/g, '-') - .replace(/\//g, '_'); + return base64String.replace(/=/g, '').replace(/\+/g, '-').replace(/\//g, '_'); }; diff --git a/src/messaging/models/db-interface.ts b/src/messaging/models/db-interface.ts index 58044514393..8788fec8ffb 100644 --- a/src/messaging/models/db-interface.ts +++ b/src/messaging/models/db-interface.ts @@ -15,12 +15,11 @@ */ 'use strict'; -import {ErrorFactory} from '../../app/errors'; +import { ErrorFactory } from '../../app/errors'; import Errors from './errors'; export default class DBInterface { - private dbName_: string; private dbVersion_: number; private openDbPromise_: Promise; @@ -51,14 +50,14 @@ export default class DBInterface { this.openDbPromise_ = new Promise((resolve, reject) => { const request = indexedDB.open(this.dbName_, this.dbVersion_); - request.onerror = (event) => { - reject(( event.target).error); + request.onerror = event => { + reject((event.target).error); }; request.onsuccess = event => { - resolve(( event.target).result); + resolve((event.target).result); }; request.onupgradeneeded = event => { - var db = ( event.target).result; + var db = (event.target).result; this.onDBUpgrade(db); }; }); @@ -71,15 +70,14 @@ export default class DBInterface { * @return {!Promise} Returns the result of the promise chain. */ closeDatabase() { - return Promise.resolve() - .then(() => { - if (this.openDbPromise_) { - return this.openDbPromise_.then(db => { - db.close(); - this.openDbPromise_ = null; - }); - } - }); + return Promise.resolve().then(() => { + if (this.openDbPromise_) { + return this.openDbPromise_.then(db => { + db.close(); + this.openDbPromise_ = null; + }); + } + }); } /** diff --git a/src/messaging/models/default-sw.ts b/src/messaging/models/default-sw.ts index f7db4f2ca67..c94de1899b4 100644 --- a/src/messaging/models/default-sw.ts +++ b/src/messaging/models/default-sw.ts @@ -18,4 +18,4 @@ export default { path: '/firebase-messaging-sw.js', scope: '/firebase-cloud-messaging-push-scope' -}; \ No newline at end of file +}; diff --git a/src/messaging/models/errors.ts b/src/messaging/models/errors.ts index 2885b23aece..e05f735e90c 100644 --- a/src/messaging/models/errors.ts +++ b/src/messaging/models/errors.ts @@ -53,66 +53,81 @@ const CODES = { const ERROR_MAP = { [CODES.AVAILABLE_IN_WINDOW]: 'This method is available in a Window context.', - [CODES.AVAILABLE_IN_SW]: 'This method is available in a service worker ' + - 'context.', - [CODES.SHOULD_BE_INHERITED]: 'This method should be overriden by ' + - 'extended classes.', - [CODES.BAD_SENDER_ID]: 'Please ensure that \'messagingSenderId\' is set ' + + [CODES.AVAILABLE_IN_SW]: + 'This method is available in a service worker ' + 'context.', + [CODES.SHOULD_BE_INHERITED]: + 'This method should be overriden by ' + 'extended classes.', + [CODES.BAD_SENDER_ID]: + "Please ensure that 'messagingSenderId' is set " + 'correctly in the options passed into firebase.initializeApp().', - [CODES.PERMISSION_DEFAULT]: 'The required permissions were not granted and ' + - 'dismissed instead.', - [CODES.PERMISSION_BLOCKED]: 'The required permissions were not granted and ' + - 'blocked instead.', - [CODES.UNSUPPORTED_BROWSER]: 'This browser doesn\'t support the API\'s ' + + [CODES.PERMISSION_DEFAULT]: + 'The required permissions were not granted and ' + 'dismissed instead.', + [CODES.PERMISSION_BLOCKED]: + 'The required permissions were not granted and ' + 'blocked instead.', + [CODES.UNSUPPORTED_BROWSER]: + "This browser doesn't support the API's " + 'required to use the firebase SDK.', [CODES.NOTIFICATIONS_BLOCKED]: 'Notifications have been blocked.', - [CODES.FAILED_DEFAULT_REGISTRATION]: 'We are unable to register the ' + + [CODES.FAILED_DEFAULT_REGISTRATION]: + 'We are unable to register the ' + 'default service worker. {$browserErrorMessage}', - [CODES.SW_REGISTRATION_EXPECTED]: 'A service worker registration was the ' + - 'expected input.', - [CODES.GET_SUBSCRIPTION_FAILED]: 'There was an error when trying to get ' + + [CODES.SW_REGISTRATION_EXPECTED]: + 'A service worker registration was the ' + 'expected input.', + [CODES.GET_SUBSCRIPTION_FAILED]: + 'There was an error when trying to get ' + 'any existing Push Subscriptions.', [CODES.INVALID_SAVED_TOKEN]: 'Unable to access details of the saved token.', - [CODES.SW_REG_REDUNDANT]: 'The service worker being used for push was made ' + - 'redundant.', - [CODES.TOKEN_SUBSCRIBE_FAILED]: 'A problem occured while subscribing the ' + - 'user to FCM: {$message}', - [CODES.TOKEN_SUBSCRIBE_NO_TOKEN]: 'FCM returned no token when subscribing ' + - 'the user to push.', - [CODES.TOKEN_SUBSCRIBE_NO_PUSH_SET]: 'FCM returned an invalid response ' + - 'when getting an FCM token.', - [CODES.USE_SW_BEFORE_GET_TOKEN]: 'You must call useServiceWorker() before ' + + [CODES.SW_REG_REDUNDANT]: + 'The service worker being used for push was made ' + 'redundant.', + [CODES.TOKEN_SUBSCRIBE_FAILED]: + 'A problem occured while subscribing the ' + 'user to FCM: {$message}', + [CODES.TOKEN_SUBSCRIBE_NO_TOKEN]: + 'FCM returned no token when subscribing ' + 'the user to push.', + [CODES.TOKEN_SUBSCRIBE_NO_PUSH_SET]: + 'FCM returned an invalid response ' + 'when getting an FCM token.', + [CODES.USE_SW_BEFORE_GET_TOKEN]: + 'You must call useServiceWorker() before ' + 'calling getToken() to ensure your service worker is used.', - [CODES.INVALID_DELETE_TOKEN]: 'You must pass a valid token into ' + + [CODES.INVALID_DELETE_TOKEN]: + 'You must pass a valid token into ' + 'deleteToken(), i.e. the token from getToken().', - [CODES.DELETE_TOKEN_NOT_FOUND]: 'The deletion attempt for token could not ' + + [CODES.DELETE_TOKEN_NOT_FOUND]: + 'The deletion attempt for token could not ' + 'be performed as the token was not found.', - [CODES.DELETE_SCOPE_NOT_FOUND]: 'The deletion attempt for service worker ' + + [CODES.DELETE_SCOPE_NOT_FOUND]: + 'The deletion attempt for service worker ' + 'scope could not be performed as the scope was not found.', - [CODES.BG_HANDLER_FUNCTION_EXPECTED]: 'The input to ' + - 'setBackgroundMessageHandler() must be a function.', - [CODES.NO_WINDOW_CLIENT_TO_MSG]: 'An attempt was made to message a ' + - 'non-existant window client.', - [CODES.UNABLE_TO_RESUBSCRIBE]: 'There was an error while re-subscribing ' + + [CODES.BG_HANDLER_FUNCTION_EXPECTED]: + 'The input to ' + 'setBackgroundMessageHandler() must be a function.', + [CODES.NO_WINDOW_CLIENT_TO_MSG]: + 'An attempt was made to message a ' + 'non-existant window client.', + [CODES.UNABLE_TO_RESUBSCRIBE]: + 'There was an error while re-subscribing ' + 'the FCM token for push messaging. Will have to resubscribe the ' + 'user on next visit. {$message}', - [CODES.NO_FCM_TOKEN_FOR_RESUBSCRIBE]: 'Could not find an FCM token ' + + [CODES.NO_FCM_TOKEN_FOR_RESUBSCRIBE]: + 'Could not find an FCM token ' + 'and as a result, unable to resubscribe. Will have to resubscribe the ' + 'user on next visit.', [CODES.FAILED_TO_DELETE_TOKEN]: 'Unable to delete the currently saved token.', - [CODES.NO_SW_IN_REG]: 'Even though the service worker registration was ' + + [CODES.NO_SW_IN_REG]: + 'Even though the service worker registration was ' + 'successful, there was a problem accessing the service worker itself.', - [CODES.INCORRECT_GCM_SENDER_ID]: 'Please change your web app manifest\'s ' + - '\'gcm_sender_id\' value to \'103953800507\' to use Firebase messaging.', - [CODES.BAD_SCOPE]: 'The service worker scope must be a string with at ' + + [CODES.INCORRECT_GCM_SENDER_ID]: + "Please change your web app manifest's " + + "'gcm_sender_id' value to '103953800507' to use Firebase messaging.", + [CODES.BAD_SCOPE]: + 'The service worker scope must be a string with at ' + 'least one character.', - [CODES.BAD_VAPID_KEY]: 'The public VAPID key must be a string with at ' + - 'least one character.', - [CODES.BAD_SUBSCRIPTION]: 'The subscription must be a valid ' + - 'PushSubscription.', - [CODES.BAD_TOKEN]: 'The FCM Token used for storage / lookup was not ' + + [CODES.BAD_VAPID_KEY]: + 'The public VAPID key must be a string with at ' + 'least one character.', + [CODES.BAD_SUBSCRIPTION]: + 'The subscription must be a valid ' + 'PushSubscription.', + [CODES.BAD_TOKEN]: + 'The FCM Token used for storage / lookup was not ' + 'a valid token string.', - [CODES.BAD_PUSH_SET]: 'The FCM push set used for storage / lookup was not ' + + [CODES.BAD_PUSH_SET]: + 'The FCM push set used for storage / lookup was not ' + 'not a valid push set string.', [CODES.FAILED_DELETE_VAPID_KEY]: 'The VAPID key could not be deleted.' }; diff --git a/src/messaging/models/fcm-details.ts b/src/messaging/models/fcm-details.ts index e64b316decc..d4247135e9d 100644 --- a/src/messaging/models/fcm-details.ts +++ b/src/messaging/models/fcm-details.ts @@ -15,21 +15,81 @@ */ 'use strict'; -const FCM_APPLICATION_SERVER_KEY = [0x04, 0x33, 0x94, 0xF7, 0xDF, - 0xA1, 0xEB, 0xB1, 0xDC, 0x03, 0xA2, 0x5E, 0x15, 0x71, 0xDB, 0x48, - 0xD3, 0x2E, 0xED, 0xED, 0xB2, 0x34, 0xDB, 0xB7, 0x47, 0x3A, 0x0C, - 0x8F, 0xC4, 0xCC, 0xE1, 0x6F, 0x3C, 0x8C, 0x84, 0xDF, 0xAB, 0xB6, - 0x66, 0x3E, 0xF2, 0x0C, 0xD4, 0x8B, 0xFE, 0xE3, 0xF9, 0x76, 0x2F, - 0x14, 0x1C, 0x63, 0x08, 0x6A, 0x6F, 0x2D, 0xB1, 0x1A, 0x95, 0xB0, - 0xCE, 0x37, 0xC0, 0x9C, 0x6E]; +const FCM_APPLICATION_SERVER_KEY = [ + 0x04, + 0x33, + 0x94, + 0xf7, + 0xdf, + 0xa1, + 0xeb, + 0xb1, + 0xdc, + 0x03, + 0xa2, + 0x5e, + 0x15, + 0x71, + 0xdb, + 0x48, + 0xd3, + 0x2e, + 0xed, + 0xed, + 0xb2, + 0x34, + 0xdb, + 0xb7, + 0x47, + 0x3a, + 0x0c, + 0x8f, + 0xc4, + 0xcc, + 0xe1, + 0x6f, + 0x3c, + 0x8c, + 0x84, + 0xdf, + 0xab, + 0xb6, + 0x66, + 0x3e, + 0xf2, + 0x0c, + 0xd4, + 0x8b, + 0xfe, + 0xe3, + 0xf9, + 0x76, + 0x2f, + 0x14, + 0x1c, + 0x63, + 0x08, + 0x6a, + 0x6f, + 0x2d, + 0xb1, + 0x1a, + 0x95, + 0xb0, + 0xce, + 0x37, + 0xc0, + 0x9c, + 0x6e +]; const SUBSCRIPTION_DETAILS = { - 'userVisibleOnly': true, - 'applicationServerKey': new Uint8Array(FCM_APPLICATION_SERVER_KEY) + userVisibleOnly: true, + applicationServerKey: new Uint8Array(FCM_APPLICATION_SERVER_KEY) }; export default { ENDPOINT: 'https://fcm.googleapis.com', APPLICATION_SERVER_KEY: FCM_APPLICATION_SERVER_KEY, SUBSCRIPTION_OPTIONS: SUBSCRIPTION_DETAILS -}; \ No newline at end of file +}; diff --git a/src/messaging/models/token-details-model.ts b/src/messaging/models/token-details-model.ts index d984a595690..ddb25c40b81 100644 --- a/src/messaging/models/token-details-model.ts +++ b/src/messaging/models/token-details-model.ts @@ -38,7 +38,6 @@ ValidateInput.prototype.fcmSenderId; ValidateInput.prototype.fcmPushSet; export default class TokenDetailsModel extends DBInterface { - constructor() { super(TokenDetailsModel.dbName, DB_VERSION); } @@ -77,44 +76,54 @@ export default class TokenDetailsModel extends DBInterface { if (input.fcmToken) { if (typeof input.fcmToken !== 'string' || input.fcmToken.length === 0) { return Promise.reject( - this.errorFactory_.create(Errors.codes.BAD_TOKEN)); + this.errorFactory_.create(Errors.codes.BAD_TOKEN) + ); } } if (input.swScope) { if (typeof input.swScope !== 'string' || input.swScope.length === 0) { return Promise.reject( - this.errorFactory_.create(Errors.codes.BAD_SCOPE)); + this.errorFactory_.create(Errors.codes.BAD_SCOPE) + ); } } if (input.vapidKey) { if (typeof input.vapidKey !== 'string' || input.vapidKey.length === 0) { return Promise.reject( - this.errorFactory_.create(Errors.codes.BAD_VAPID_KEY)); - } + this.errorFactory_.create(Errors.codes.BAD_VAPID_KEY) + ); + } } if (input.subscription) { if (!(input.subscription instanceof PushSubscription)) { return Promise.reject( - this.errorFactory_.create(Errors.codes.BAD_SUBSCRIPTION)); + this.errorFactory_.create(Errors.codes.BAD_SUBSCRIPTION) + ); } } if (input.fcmSenderId) { - if (typeof input.fcmSenderId !== 'string' || - input.fcmSenderId.length === 0) { + if ( + typeof input.fcmSenderId !== 'string' || + input.fcmSenderId.length === 0 + ) { return Promise.reject( - this.errorFactory_.create(Errors.codes.BAD_SENDER_ID)); + this.errorFactory_.create(Errors.codes.BAD_SENDER_ID) + ); } } if (input.fcmPushSet) { - if (typeof input.fcmPushSet !== 'string' || - input.fcmPushSet.length === 0) { + if ( + typeof input.fcmPushSet !== 'string' || + input.fcmPushSet.length === 0 + ) { return Promise.reject( - this.errorFactory_.create(Errors.codes.BAD_PUSH_SET)); + this.errorFactory_.create(Errors.codes.BAD_PUSH_SET) + ); } } @@ -128,11 +137,10 @@ export default class TokenDetailsModel extends DBInterface { */ getTokenDetailsFromToken(fcmToken) { if (!fcmToken) { - return Promise.reject( - this.errorFactory_.create(Errors.codes.BAD_TOKEN)); + return Promise.reject(this.errorFactory_.create(Errors.codes.BAD_TOKEN)); } - return this.validateInputs_({fcmToken}) + return this.validateInputs_({ fcmToken }) .then(() => { return this.openDatabase(); }) @@ -143,10 +151,12 @@ export default class TokenDetailsModel extends DBInterface { const index = objectStore.index('fcmToken'); const request = index.get(fcmToken); request.onerror = function(event) { - reject(( event.target).error); + reject((event.target).error); }; request.onsuccess = function(event) { - const result = ( event.target).result ? ( event.target).result : null; + const result = (event.target).result + ? (event.target).result + : null; resolve(result); }; }); @@ -162,11 +172,10 @@ export default class TokenDetailsModel extends DBInterface { */ getTokenDetailsFromSWScope(swScope) { if (!swScope) { - return Promise.reject( - this.errorFactory_.create(Errors.codes.BAD_SCOPE)); + return Promise.reject(this.errorFactory_.create(Errors.codes.BAD_SCOPE)); } - return this.validateInputs_({swScope}) + return this.validateInputs_({ swScope }) .then(() => { return this.openDatabase(); }) @@ -176,11 +185,13 @@ export default class TokenDetailsModel extends DBInterface { const objectStore = transaction.objectStore(FCM_TOKEN_OBJ_STORE); const scopeRequest = objectStore.get(swScope); scopeRequest.onerror = event => { - reject(( event.target).error); + reject((event.target).error); }; scopeRequest.onsuccess = event => { - const result = ( event.target).result ? ( event.target).result : null; + const result = (event.target).result + ? (event.target).result + : null; resolve(result); }; }); @@ -194,37 +205,45 @@ export default class TokenDetailsModel extends DBInterface { * fcmPushSet: !string}} input A plain js object containing args to save. * @return {Promise} */ - saveTokenDetails({swScope, vapidKey, subscription, - fcmSenderId, fcmToken, fcmPushSet}) { - if (!swScope) { - return Promise.reject( - this.errorFactory_.create(Errors.codes.BAD_SCOPE)); - } + saveTokenDetails({ + swScope, + vapidKey, + subscription, + fcmSenderId, + fcmToken, + fcmPushSet + }) { + if (!swScope) { + return Promise.reject(this.errorFactory_.create(Errors.codes.BAD_SCOPE)); + } - if (!vapidKey) { - return Promise.reject( - this.errorFactory_.create(Errors.codes.BAD_VAPID_KEY)); - } + if (!vapidKey) { + return Promise.reject( + this.errorFactory_.create(Errors.codes.BAD_VAPID_KEY) + ); + } - if (!subscription) { - return Promise.reject( - this.errorFactory_.create(Errors.codes.BAD_SUBSCRIPTION)); - } + if (!subscription) { + return Promise.reject( + this.errorFactory_.create(Errors.codes.BAD_SUBSCRIPTION) + ); + } - if (!fcmSenderId) { - return Promise.reject( - this.errorFactory_.create(Errors.codes.BAD_SENDER_ID)); - } + if (!fcmSenderId) { + return Promise.reject( + this.errorFactory_.create(Errors.codes.BAD_SENDER_ID) + ); + } - if (!fcmToken) { - return Promise.reject( - this.errorFactory_.create(Errors.codes.BAD_TOKEN)); - } + if (!fcmToken) { + return Promise.reject(this.errorFactory_.create(Errors.codes.BAD_TOKEN)); + } - if (!fcmPushSet) { - return Promise.reject( - this.errorFactory_.create(Errors.codes.BAD_PUSH_SET)); - } + if (!fcmPushSet) { + return Promise.reject( + this.errorFactory_.create(Errors.codes.BAD_PUSH_SET) + ); + } return this.validateInputs_({ swScope, @@ -234,37 +253,39 @@ export default class TokenDetailsModel extends DBInterface { fcmToken, fcmPushSet }) - .then(() => { - return this.openDatabase(); - }) - .then(db => { - /** + .then(() => { + return this.openDatabase(); + }) + .then(db => { + /** * @dict */ - const details = { - 'swScope': swScope, - 'vapidKey': vapidKey, - 'endpoint': subscription.endpoint, - 'auth': arrayBufferToBase64(subscription['getKey']('auth')), - 'p256dh': arrayBufferToBase64(subscription['getKey']('p256dh')), - 'fcmSenderId': fcmSenderId, - 'fcmToken': fcmToken, - 'fcmPushSet': fcmPushSet - }; - - return new Promise((resolve, reject) => { - const transaction = db.transaction( - [FCM_TOKEN_OBJ_STORE], this.TRANSACTION_READ_WRITE); - const objectStore = transaction.objectStore(FCM_TOKEN_OBJ_STORE); - const request = objectStore.put(details); - request.onerror = event => { - reject(( event.target).error); - }; - request.onsuccess = event => { - resolve(); + const details = { + swScope: swScope, + vapidKey: vapidKey, + endpoint: subscription.endpoint, + auth: arrayBufferToBase64(subscription['getKey']('auth')), + p256dh: arrayBufferToBase64(subscription['getKey']('p256dh')), + fcmSenderId: fcmSenderId, + fcmToken: fcmToken, + fcmPushSet: fcmPushSet }; + + return new Promise((resolve, reject) => { + const transaction = db.transaction( + [FCM_TOKEN_OBJ_STORE], + this.TRANSACTION_READ_WRITE + ); + const objectStore = transaction.objectStore(FCM_TOKEN_OBJ_STORE); + const request = objectStore.put(details); + request.onerror = event => { + reject((event.target).error); + }; + request.onsuccess = event => { + resolve(); + }; + }); }); - }); } /** @@ -278,28 +299,31 @@ export default class TokenDetailsModel extends DBInterface { deleteToken(token) { if (typeof token !== 'string' || token.length === 0) { return Promise.reject( - this.errorFactory_.create(Errors.codes.INVALID_DELETE_TOKEN)); + this.errorFactory_.create(Errors.codes.INVALID_DELETE_TOKEN) + ); } - return this.getTokenDetailsFromToken(token) - .then(details => { + return this.getTokenDetailsFromToken(token).then(details => { if (!details) { throw this.errorFactory_.create(Errors.codes.DELETE_TOKEN_NOT_FOUND); } - return this.openDatabase() - .then(db => { + return this.openDatabase().then(db => { return new Promise((resolve, reject) => { const transaction = db.transaction( - [FCM_TOKEN_OBJ_STORE], this.TRANSACTION_READ_WRITE); + [FCM_TOKEN_OBJ_STORE], + this.TRANSACTION_READ_WRITE + ); const objectStore = transaction.objectStore(FCM_TOKEN_OBJ_STORE); const request = objectStore.delete(details['swScope']); request.onerror = event => { - reject(( event.target).error); + reject((event.target).error); }; request.onsuccess = event => { - if (( event.target).result === 0) { - reject(this.errorFactory_.create(Errors.codes.FAILED_TO_DELETE_TOKEN)); + if ((event.target).result === 0) { + reject( + this.errorFactory_.create(Errors.codes.FAILED_TO_DELETE_TOKEN) + ); return; } diff --git a/src/messaging/models/token-manager.ts b/src/messaging/models/token-manager.ts index fe7cea09ca2..27991be2668 100644 --- a/src/messaging/models/token-manager.ts +++ b/src/messaging/models/token-manager.ts @@ -15,7 +15,7 @@ */ 'use strict'; -import {ErrorFactory} from '../../app/errors'; +import { ErrorFactory } from '../../app/errors'; import Errors from './errors'; import arrayBufferToBase64 from '../helpers/array-buffer-to-base64'; @@ -26,7 +26,6 @@ const FCM_TOKEN_OBJ_STORE = 'fcm_token_object_Store'; const FCM_TOKEN_DETAILS_DB_VERSION = 1; export default class TokenManager { - private errorFactory_: ErrorFactory; private openDbPromise_: Promise; @@ -46,16 +45,18 @@ export default class TokenManager { } this.openDbPromise_ = new Promise((resolve, reject) => { - const request = indexedDB.open(FCM_TOKEN_DETAILS_DB, - FCM_TOKEN_DETAILS_DB_VERSION); + const request = indexedDB.open( + FCM_TOKEN_DETAILS_DB, + FCM_TOKEN_DETAILS_DB_VERSION + ); request.onerror = event => { - reject(( event.target).error); + reject((event.target).error); }; request.onsuccess = event => { - resolve(( event.target).result); + resolve((event.target).result); }; request.onupgradeneeded = event => { - var db = ( event.target).result; + var db = (event.target).result; var objectStore = db.createObjectStore(FCM_TOKEN_OBJ_STORE, { keyPath: 'swScope' @@ -97,44 +98,41 @@ export default class TokenManager { * @return {Promise} The details associated with that token. */ getTokenDetailsFromToken(fcmToken) { - return this.openDatabase_() - .then(db => { + return this.openDatabase_().then(db => { return new Promise((resolve, reject) => { const transaction = db.transaction([FCM_TOKEN_OBJ_STORE]); const objectStore = transaction.objectStore(FCM_TOKEN_OBJ_STORE); const index = objectStore.index('fcmToken'); const request = index.get(fcmToken); request.onerror = function(event) { - reject(( event.target).error); + reject((event.target).error); }; request.onsuccess = function(event) { - resolve(( event.target).result); + resolve((event.target).result); }; }); }); } getTokenDetailsFromSWScope_(swScope) { - return this.openDatabase_() - .then(db => { + return this.openDatabase_().then(db => { return new Promise((resolve, reject) => { const transaction = db.transaction([FCM_TOKEN_OBJ_STORE]); const objectStore = transaction.objectStore(FCM_TOKEN_OBJ_STORE); const scopeRequest = objectStore.get(swScope); scopeRequest.onerror = event => { - reject(( event.target).error); + reject((event.target).error); }; scopeRequest.onsuccess = event => { - resolve(( event.target).result); + resolve((event.target).result); }; }); }); } getAllTokenDetailsForSenderId_(senderId): Promise> { - return this.openDatabase_() - .then(db => { + return this.openDatabase_().then(db => { return new Promise>((resolve, reject) => { const transaction = db.transaction([FCM_TOKEN_OBJ_STORE]); const objectStore = transaction.objectStore(FCM_TOKEN_OBJ_STORE); @@ -143,11 +141,11 @@ export default class TokenManager { const cursorRequest = objectStore.openCursor(); cursorRequest.onerror = event => { - reject(( event.target).error); + reject((event.target).error); }; cursorRequest.onsuccess = event => { - const cursor = ( event.target).result; + const cursor = (event.target).result; if (cursor) { if (cursor.value['fcmSenderId'] === senderId) { senderIdTokens.push(cursor.value); @@ -175,7 +173,8 @@ export default class TokenManager { const p256dh = arrayBufferToBase64(subscription['getKey']('p256dh')); const auth = arrayBufferToBase64(subscription['getKey']('auth')); - let fcmSubscribeBody = `authorized_entity=${senderId}&` + + let fcmSubscribeBody = + `authorized_entity=${senderId}&` + `endpoint=${subscription.endpoint}&` + `encryption_key=${p256dh}&` + `encryption_auth=${auth}`; @@ -193,30 +192,37 @@ export default class TokenManager { body: fcmSubscribeBody }; - return fetch(FCMDetails.ENDPOINT + '/fcm/connect/subscribe', - subscribeOptions) - .then(response => response.json()) - .then(response => { - const fcmTokenResponse = response; - if (fcmTokenResponse['error']) { - const message = fcmTokenResponse['error']['message']; - throw this.errorFactory_.create(Errors.codes.TOKEN_SUBSCRIBE_FAILED, - {'message': message}); - } + return fetch( + FCMDetails.ENDPOINT + '/fcm/connect/subscribe', + subscribeOptions + ) + .then(response => response.json()) + .then(response => { + const fcmTokenResponse = response; + if (fcmTokenResponse['error']) { + const message = fcmTokenResponse['error']['message']; + throw this.errorFactory_.create(Errors.codes.TOKEN_SUBSCRIBE_FAILED, { + message: message + }); + } - if (!fcmTokenResponse['token']) { - throw this.errorFactory_.create(Errors.codes.TOKEN_SUBSCRIBE_NO_TOKEN); - } + if (!fcmTokenResponse['token']) { + throw this.errorFactory_.create( + Errors.codes.TOKEN_SUBSCRIBE_NO_TOKEN + ); + } - if (!fcmTokenResponse['pushSet']) { - throw this.errorFactory_.create(Errors.codes.TOKEN_SUBSCRIBE_NO_PUSH_SET); - } + if (!fcmTokenResponse['pushSet']) { + throw this.errorFactory_.create( + Errors.codes.TOKEN_SUBSCRIBE_NO_PUSH_SET + ); + } - return { - 'token': fcmTokenResponse['token'], - 'pushSet': fcmTokenResponse['pushSet'] - }; - }); + return { + token: fcmTokenResponse['token'], + pushSet: fcmTokenResponse['pushSet'] + }; + }); } /** @@ -233,11 +239,13 @@ export default class TokenManager { isSameSubscription_(subscription, masterTokenDetails) { // getKey() isn't defined in the PushSubscription externs file, hence // subscription['getKey'](''). - return (subscription.endpoint === masterTokenDetails['endpoint'] && + return ( + subscription.endpoint === masterTokenDetails['endpoint'] && arrayBufferToBase64(subscription['getKey']('auth')) === masterTokenDetails['auth'] && arrayBufferToBase64(subscription['getKey']('p256dh')) === - masterTokenDetails['p256dh']); + masterTokenDetails['p256dh'] + ); } /** @@ -253,26 +261,30 @@ export default class TokenManager { * @param {string} fcmPushSet The FCM push tied to the fcm token. * @return {Promise} */ - saveTokenDetails_(senderId, swRegistration, - subscription, fcmToken, fcmPushSet) { + saveTokenDetails_( + senderId, + swRegistration, + subscription, + fcmToken, + fcmPushSet + ) { const details = { - 'swScope': swRegistration.scope, - 'endpoint': subscription.endpoint, - 'auth': arrayBufferToBase64(subscription['getKey']('auth')), - 'p256dh': arrayBufferToBase64(subscription['getKey']('p256dh')), - 'fcmToken': fcmToken, - 'fcmPushSet': fcmPushSet, - 'fcmSenderId': senderId + swScope: swRegistration.scope, + endpoint: subscription.endpoint, + auth: arrayBufferToBase64(subscription['getKey']('auth')), + p256dh: arrayBufferToBase64(subscription['getKey']('p256dh')), + fcmToken: fcmToken, + fcmPushSet: fcmPushSet, + fcmSenderId: senderId }; - return this.openDatabase_() - .then(db => { + return this.openDatabase_().then(db => { return new Promise((resolve, reject) => { const transaction = db.transaction([FCM_TOKEN_OBJ_STORE], 'readwrite'); const objectStore = transaction.objectStore(FCM_TOKEN_OBJ_STORE); const request = objectStore.put(details); request.onerror = event => { - reject(( event.target).error); + reject((event.target).error); }; request.onsuccess = event => { resolve(); @@ -294,48 +306,57 @@ export default class TokenManager { */ getSavedToken(senderId, swRegistration) { if (!(swRegistration instanceof ServiceWorkerRegistration)) { - return Promise.reject(this.errorFactory_.create( - Errors.codes.SW_REGISTRATION_EXPECTED)); + return Promise.reject( + this.errorFactory_.create(Errors.codes.SW_REGISTRATION_EXPECTED) + ); } if (typeof senderId !== 'string' || senderId.length === 0) { - return Promise.reject(this.errorFactory_.create( - Errors.codes.BAD_SENDER_ID)); + return Promise.reject( + this.errorFactory_.create(Errors.codes.BAD_SENDER_ID) + ); } return this.getAllTokenDetailsForSenderId_(senderId) - .then(allTokenDetails => { - if (allTokenDetails.length === 0) { - return; - } - - const index = allTokenDetails.findIndex(tokenDetails => { - return (swRegistration.scope === tokenDetails['swScope'] && - senderId === tokenDetails['fcmSenderId']); - }); + .then(allTokenDetails => { + if (allTokenDetails.length === 0) { + return; + } - if (index === -1) { - return; - } + const index = allTokenDetails.findIndex(tokenDetails => { + return ( + swRegistration.scope === tokenDetails['swScope'] && + senderId === tokenDetails['fcmSenderId'] + ); + }); - return allTokenDetails[index]; - }) - .then(tokenDetails => { - if (!tokenDetails) { - return; - } + if (index === -1) { + return; + } - return swRegistration.pushManager.getSubscription() - .catch(err => { - throw this.errorFactory_.create(Errors.codes.GET_SUBSCRIPTION_FAILED); + return allTokenDetails[index]; }) - .then(subscription => { - if (subscription && - this.isSameSubscription_(subscription, tokenDetails)) { - return tokenDetails['fcmToken']; + .then(tokenDetails => { + if (!tokenDetails) { + return; } + + return swRegistration.pushManager + .getSubscription() + .catch(err => { + throw this.errorFactory_.create( + Errors.codes.GET_SUBSCRIPTION_FAILED + ); + }) + .then(subscription => { + if ( + subscription && + this.isSameSubscription_(subscription, tokenDetails) + ) { + return tokenDetails['fcmToken']; + } + }); }); - }); } /** @@ -343,37 +364,46 @@ export default class TokenManager { */ createToken(senderId, swRegistration): Promise { if (typeof senderId !== 'string' || senderId.length === 0) { - return Promise.reject(this.errorFactory_.create( - Errors.codes.BAD_SENDER_ID)); + return Promise.reject( + this.errorFactory_.create(Errors.codes.BAD_SENDER_ID) + ); } - if (!(swRegistration instanceof ServiceWorkerRegistration)) { - return Promise.reject(this.errorFactory_.create( - Errors.codes.SW_REGISTRATION_EXPECTED)); + if (!(swRegistration instanceof ServiceWorkerRegistration)) { + return Promise.reject( + this.errorFactory_.create(Errors.codes.SW_REGISTRATION_EXPECTED) + ); } // Check for existing subscription first let subscription; let fcmTokenDetails; - return swRegistration.pushManager.getSubscription() - .then(subscription => { - if (subscription) { - return subscription; - } + return swRegistration.pushManager + .getSubscription() + .then(subscription => { + if (subscription) { + return subscription; + } - return swRegistration.pushManager.subscribe( - FCMDetails.SUBSCRIPTION_OPTIONS); - }) - .then(sub => { - subscription = sub; - return this.subscribeToFCM(senderId, subscription) - }) - .then(tokenDetails => { - fcmTokenDetails = tokenDetails; - return this.saveTokenDetails_(senderId, swRegistration, subscription, - fcmTokenDetails['token'], fcmTokenDetails['pushSet']); - }) - .then(() => fcmTokenDetails['token']); + return swRegistration.pushManager.subscribe( + FCMDetails.SUBSCRIPTION_OPTIONS + ); + }) + .then(sub => { + subscription = sub; + return this.subscribeToFCM(senderId, subscription); + }) + .then(tokenDetails => { + fcmTokenDetails = tokenDetails; + return this.saveTokenDetails_( + senderId, + swRegistration, + subscription, + fcmTokenDetails['token'], + fcmTokenDetails['pushSet'] + ); + }) + .then(() => fcmTokenDetails['token']); } /** @@ -387,28 +417,31 @@ export default class TokenManager { deleteToken(token) { if (typeof token !== 'string' || token.length === 0) { return Promise.reject( - this.errorFactory_.create(Errors.codes.INVALID_DELETE_TOKEN)); + this.errorFactory_.create(Errors.codes.INVALID_DELETE_TOKEN) + ); } - return this.getTokenDetailsFromToken(token) - .then(details => { + return this.getTokenDetailsFromToken(token).then(details => { if (!details) { throw this.errorFactory_.create(Errors.codes.DELETE_TOKEN_NOT_FOUND); } - return this.openDatabase_() - .then(db => { + return this.openDatabase_().then(db => { return new Promise((resolve, reject) => { - const transaction = db.transaction([FCM_TOKEN_OBJ_STORE], - 'readwrite'); + const transaction = db.transaction( + [FCM_TOKEN_OBJ_STORE], + 'readwrite' + ); const objectStore = transaction.objectStore(FCM_TOKEN_OBJ_STORE); const request = objectStore.delete(details['swScope']); request.onerror = event => { - reject(( event.target).error); + reject((event.target).error); }; request.onsuccess = event => { - if (( event.target).result === 0) { - reject(this.errorFactory_.create(Errors.codes.FAILED_TO_DELETE_TOKEN)); + if ((event.target).result === 0) { + reject( + this.errorFactory_.create(Errors.codes.FAILED_TO_DELETE_TOKEN) + ); return; } diff --git a/src/messaging/models/vapid-details-model.ts b/src/messaging/models/vapid-details-model.ts index 232bbb747c2..ca5b5286c25 100644 --- a/src/messaging/models/vapid-details-model.ts +++ b/src/messaging/models/vapid-details-model.ts @@ -22,7 +22,6 @@ const FCM_VAPID_OBJ_STORE = 'fcm_vapid_object_Store'; const DB_VERSION = 1; export default class VapidDetailsModel extends DBInterface { - constructor() { super(VapidDetailsModel.dbName, DB_VERSION); } @@ -49,30 +48,28 @@ export default class VapidDetailsModel extends DBInterface { */ getVapidFromSWScope(swScope) { if (typeof swScope !== 'string' || swScope.length === 0) { - return Promise.reject( - this.errorFactory_.create(Errors.codes.BAD_SCOPE)); + return Promise.reject(this.errorFactory_.create(Errors.codes.BAD_SCOPE)); } - return this.openDatabase() - .then(db => { - return new Promise((resolve, reject) => { - const transaction = db.transaction([FCM_VAPID_OBJ_STORE]); - const objectStore = transaction.objectStore(FCM_VAPID_OBJ_STORE); - const scopeRequest = objectStore.get(swScope); - scopeRequest.onerror = event => { - reject(( event.target).error); - }; - - scopeRequest.onsuccess = event => { - let result = ( event.target).result; - let vapidKey = null; - if (result) { - vapidKey = result.vapidKey; - } - resolve(vapidKey); - }; - }); + return this.openDatabase().then(db => { + return new Promise((resolve, reject) => { + const transaction = db.transaction([FCM_VAPID_OBJ_STORE]); + const objectStore = transaction.objectStore(FCM_VAPID_OBJ_STORE); + const scopeRequest = objectStore.get(swScope); + scopeRequest.onerror = event => { + reject((event.target).error); + }; + + scopeRequest.onsuccess = event => { + let result = (event.target).result; + let vapidKey = null; + if (result) { + vapidKey = result.vapidKey; + } + resolve(vapidKey); + }; }); + }); } /** @@ -85,35 +82,36 @@ export default class VapidDetailsModel extends DBInterface { */ saveVapidDetails(swScope, vapidKey) { if (typeof swScope !== 'string' || swScope.length === 0) { - return Promise.reject( - this.errorFactory_.create(Errors.codes.BAD_SCOPE)); + return Promise.reject(this.errorFactory_.create(Errors.codes.BAD_SCOPE)); } if (typeof vapidKey !== 'string' || vapidKey.length === 0) { return Promise.reject( - this.errorFactory_.create(Errors.codes.BAD_VAPID_KEY)); + this.errorFactory_.create(Errors.codes.BAD_VAPID_KEY) + ); } const details = { - 'swScope': swScope, - 'vapidKey': vapidKey + swScope: swScope, + vapidKey: vapidKey }; - return this.openDatabase() - .then(db => { - return new Promise((resolve, reject) => { - const transaction = db.transaction( - [FCM_VAPID_OBJ_STORE], this.TRANSACTION_READ_WRITE); - const objectStore = transaction.objectStore(FCM_VAPID_OBJ_STORE); - const request = objectStore.put(details); - request.onerror = event => { - reject(( event.target).error); - }; - request.onsuccess = event => { - resolve(); - }; - }); + return this.openDatabase().then(db => { + return new Promise((resolve, reject) => { + const transaction = db.transaction( + [FCM_VAPID_OBJ_STORE], + this.TRANSACTION_READ_WRITE + ); + const objectStore = transaction.objectStore(FCM_VAPID_OBJ_STORE); + const request = objectStore.put(details); + request.onerror = event => { + reject((event.target).error); + }; + request.onsuccess = event => { + resolve(); + }; }); + }); } /** @@ -123,34 +121,34 @@ export default class VapidDetailsModel extends DBInterface { * deleted and returns the deleted vapid key. */ deleteVapidDetails(swScope) { - return this.getVapidFromSWScope(swScope) - .then(vapidKey => { - if (!vapidKey) { - throw this.errorFactory_.create(Errors.codes.DELETE_SCOPE_NOT_FOUND); - } + return this.getVapidFromSWScope(swScope).then(vapidKey => { + if (!vapidKey) { + throw this.errorFactory_.create(Errors.codes.DELETE_SCOPE_NOT_FOUND); + } - return this.openDatabase() - .then(db => { - return new Promise((resolve, reject) => { - const transaction = db.transaction( - [FCM_VAPID_OBJ_STORE], this.TRANSACTION_READ_WRITE); - const objectStore = transaction.objectStore(FCM_VAPID_OBJ_STORE); - const request = objectStore.delete(swScope); - request.onerror = event => { - reject(( event.target).error); - }; - request.onsuccess = event => { - if (( event.target).result === 0) { - reject( - this.errorFactory_.create(Errors.codes.FAILED_DELETE_VAPID_KEY) - ); - return; - } + return this.openDatabase().then(db => { + return new Promise((resolve, reject) => { + const transaction = db.transaction( + [FCM_VAPID_OBJ_STORE], + this.TRANSACTION_READ_WRITE + ); + const objectStore = transaction.objectStore(FCM_VAPID_OBJ_STORE); + const request = objectStore.delete(swScope); + request.onerror = event => { + reject((event.target).error); + }; + request.onsuccess = event => { + if ((event.target).result === 0) { + reject( + this.errorFactory_.create(Errors.codes.FAILED_DELETE_VAPID_KEY) + ); + return; + } - resolve(vapidKey); - }; - }); - }); + resolve(vapidKey); + }; + }); }); + }); } } diff --git a/src/messaging/models/worker-page-message.ts b/src/messaging/models/worker-page-message.ts index 6199e271ff3..c23d67abb9b 100644 --- a/src/messaging/models/worker-page-message.ts +++ b/src/messaging/models/worker-page-message.ts @@ -41,4 +41,4 @@ export default { PARAMS, TYPES_OF_MSG: msgType, createNewMsg -}; \ No newline at end of file +}; diff --git a/src/storage.ts b/src/storage.ts index 3a072fbc302..6791ae329c8 100644 --- a/src/storage.ts +++ b/src/storage.ts @@ -13,17 +13,14 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -import {StringFormat} from './storage/implementation/string'; -import {TaskEvent} from './storage/implementation/taskenums'; -import {TaskState} from './storage/implementation/taskenums'; -import {XhrIoPool} from './storage/implementation/xhriopool'; -import {Reference} from './storage/reference'; -import {Service} from './storage/service'; +import { StringFormat } from './storage/implementation/string'; +import { TaskEvent } from './storage/implementation/taskenums'; +import { TaskState } from './storage/implementation/taskenums'; +import { XhrIoPool } from './storage/implementation/xhriopool'; +import { Reference } from './storage/reference'; +import { Service } from './storage/service'; import firebase from './app'; -import { - FirebaseApp, - FirebaseServiceFactory -} from "./app/firebase_app"; +import { FirebaseApp, FirebaseServiceFactory } from './app/firebase_app'; /** * Type constant for Firebase Storage. */ @@ -36,19 +33,20 @@ function factory(app: FirebaseApp, unused: any, opt_url?: string): Service { export function registerStorage(instance) { let namespaceExports = { // no-inline - 'TaskState': TaskState, - 'TaskEvent': TaskEvent, - 'StringFormat': StringFormat, - 'Storage': Service, - 'Reference': Reference + TaskState: TaskState, + TaskEvent: TaskEvent, + StringFormat: StringFormat, + Storage: Service, + Reference: Reference }; instance.INTERNAL.registerService( - STORAGE_TYPE, - (factory as FirebaseServiceFactory), - namespaceExports, - undefined, - // Allow multiple storage instances per app. - true); + STORAGE_TYPE, + factory as FirebaseServiceFactory, + namespaceExports, + undefined, + // Allow multiple storage instances per app. + true + ); } registerStorage(firebase); diff --git a/src/storage/implementation/args.ts b/src/storage/implementation/args.ts index 4b2bfaf3c10..eb972cdd27a 100644 --- a/src/storage/implementation/args.ts +++ b/src/storage/implementation/args.ts @@ -14,7 +14,7 @@ * limitations under the License. */ import * as errorsExports from './error'; -import {errors} from './error'; +import { errors } from './error'; import * as MetadataUtils from './metadata'; import * as type from './type'; @@ -36,7 +36,11 @@ export function validate(name: string, specs: ArgSpec[], passed: IArguments) { let validLength = minArgs <= passed.length && passed.length <= maxArgs; if (!validLength) { throw errorsExports.invalidArgumentCount( - minArgs, maxArgs, name, passed.length); + minArgs, + maxArgs, + name, + passed.length + ); } for (let i = 0; i < passed.length; i++) { try { @@ -58,9 +62,7 @@ export class ArgSpec { validator: (p1: any) => void; optional: boolean; - constructor( - validator: (p1: any) => void, - opt_optional?: boolean) { + constructor(validator: (p1: any) => void, opt_optional?: boolean) { let self = this; this.validator = function(p: any) { if (self.optional && !type.isJustDef(p)) { @@ -72,9 +74,7 @@ export class ArgSpec { } } -export function and_( - v1: (p1: any) => void, - v2: Function): (p1: any) => void { +export function and_(v1: (p1: any) => void, v2: Function): (p1: any) => void { return function(p) { v1(p); v2(p); @@ -82,8 +82,9 @@ export function and_( } export function stringSpec( - opt_validator?: (p1: any) => void | null, - opt_optional?: boolean): ArgSpec { + opt_validator?: (p1: any) => void | null, + opt_optional?: boolean +): ArgSpec { function stringValidator(p: any) { if (!type.isString(p)) { throw 'Expected string.'; @@ -100,8 +101,10 @@ export function stringSpec( export function uploadDataSpec(): ArgSpec { function validator(p: any) { - let valid = p instanceof Uint8Array || p instanceof ArrayBuffer || - type.isNativeBlobDefined() && p instanceof Blob; + let valid = + p instanceof Uint8Array || + p instanceof ArrayBuffer || + (type.isNativeBlobDefined() && p instanceof Blob); if (!valid) { throw 'Expected Blob or File.'; } @@ -124,10 +127,11 @@ export function nonNegativeNumberSpec(): ArgSpec { } export function looseObjectSpec( - opt_validator?: ((p1: any) => void) | null, - opt_optional?: boolean): ArgSpec { + opt_validator?: ((p1: any) => void) | null, + opt_optional?: boolean +): ArgSpec { function validator(p: any) { - let isLooseObject = (p === null) || (type.isDef(p) && p instanceof Object); + let isLooseObject = p === null || (type.isDef(p) && p instanceof Object); if (!isLooseObject) { throw 'Expected an Object.'; } diff --git a/src/storage/implementation/authwrapper.ts b/src/storage/implementation/authwrapper.ts index ea056c92509..3bd4b01a49f 100644 --- a/src/storage/implementation/authwrapper.ts +++ b/src/storage/implementation/authwrapper.ts @@ -13,21 +13,21 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -import {Reference} from '../reference'; -import {Service} from '../service'; +import { Reference } from '../reference'; +import { Service } from '../service'; import * as constants from './constants'; import * as errorsExports from './error'; -import {errors} from './error'; -import {FailRequest} from './failrequest'; -import {Location} from './location'; +import { errors } from './error'; +import { FailRequest } from './failrequest'; +import { Location } from './location'; import * as promiseimpl from './promise_external'; -import {Request} from './request'; -import {RequestInfo} from './requestinfo'; -import {requestMaker} from './requestmaker'; -import {RequestMap} from './requestmap'; +import { Request } from './request'; +import { RequestInfo } from './requestinfo'; +import { requestMaker } from './requestmaker'; +import { RequestMap } from './requestmap'; import * as type from './type'; -import {XhrIoPool} from './xhriopool'; -import { FirebaseApp, FirebaseAuthTokenData } from "../../app/firebase_app"; +import { XhrIoPool } from './xhriopool'; +import { FirebaseApp, FirebaseAuthTokenData } from '../../app/firebase_app'; /** * @param app If null, getAuthToken always resolves with null. @@ -36,14 +36,13 @@ import { FirebaseApp, FirebaseAuthTokenData } from "../../app/firebase_app"; * @struct */ export class AuthWrapper { - private app_: FirebaseApp|null; - private bucket_: string|null = null; + private app_: FirebaseApp | null; + private bucket_: string | null = null; /** maker */ - private storageRefMaker_: - (p1: AuthWrapper, p2: Location) => Reference; + private storageRefMaker_: (p1: AuthWrapper, p2: Location) => Reference; private requestMaker_: requestMaker; private pool_: XhrIoPool; private service_: Service; @@ -53,10 +52,12 @@ export class AuthWrapper { private deleted_: boolean = false; constructor( - app: FirebaseApp|null, - maker: (p1: AuthWrapper, p2: Location) => Reference, - requestMaker: requestMaker, service: Service, - pool: XhrIoPool) { + app: FirebaseApp | null, + maker: (p1: AuthWrapper, p2: Location) => Reference, + requestMaker: requestMaker, + service: Service, + pool: XhrIoPool + ) { this.app_ = app; if (this.app_ !== null) { let options = this.app_.options; @@ -73,7 +74,9 @@ export class AuthWrapper { this.requestMap_ = new RequestMap(); } - private static extractBucket_(config: {[prop: string]: any}): string|null { + private static extractBucket_(config: { + [prop: string]: any; + }): string | null { let bucketString = config[constants.configOption] || null; if (bucketString == null) { return null; @@ -82,28 +85,32 @@ export class AuthWrapper { return loc.bucket; } - getAuthToken(): Promise { + getAuthToken(): Promise { // TODO(andysoto): remove ifDef checks after firebase-app implements stubs // (b/28673818). - if (this.app_ !== null && type.isDef(this.app_.INTERNAL) && - type.isDef(this.app_.INTERNAL.getToken)) { + if ( + this.app_ !== null && + type.isDef(this.app_.INTERNAL) && + type.isDef(this.app_.INTERNAL.getToken) + ) { return this.app_.INTERNAL.getToken().then( - function(response: FirebaseAuthTokenData|null): string|null { - if (response !== null) { - return response.accessToken; - } else { - return null; - } - }, - function(_error) { + function(response: FirebaseAuthTokenData | null): string | null { + if (response !== null) { + return response.accessToken; + } else { return null; - }); + } + }, + function(_error) { + return null; + } + ); } else { - return (promiseimpl.resolve(null) as Promise); + return promiseimpl.resolve(null) as Promise; } } - bucket(): string|null { + bucket(): string | null { if (this.deleted_) { throw errorsExports.appDeleted(); } else { @@ -130,10 +137,12 @@ export class AuthWrapper { return this.storageRefMaker_(this, loc); } - makeRequest(requestInfo: RequestInfo, authToken: string|null): Request { + makeRequest( + requestInfo: RequestInfo, + authToken: string | null + ): Request { if (!this.deleted_) { - let request = this.requestMaker_( - requestInfo, authToken, this.pool_); + let request = this.requestMaker_(requestInfo, authToken, this.pool_); this.requestMap_.addRequest(request); return request; } else { diff --git a/src/storage/implementation/backoff.ts b/src/storage/implementation/backoff.ts index 772914ab727..fdc2b68f8a8 100644 --- a/src/storage/implementation/backoff.ts +++ b/src/storage/implementation/backoff.ts @@ -20,7 +20,7 @@ */ type id = (p1: boolean) => void; -export {id}; +export { id }; /** * @param f May be invoked @@ -29,9 +29,13 @@ export {id}; * passed to f, including the initial boolean. */ export function start( - f: (p1: (success: boolean, ...rest: any[]) => void, - canceled: boolean) => void, - callback: Function, timeout: number): id { + f: ( + p1: (success: boolean, ...rest: any[]) => void, + canceled: boolean + ) => void, + callback: Function, + timeout: number +): id { // TODO(andysoto): make this code cleaner (probably refactor into an actual // type instead of a bunch of functions with state shared in the closure) let waitSeconds = 1; @@ -59,7 +63,7 @@ export function start( }, millis); } - function handler(success: boolean, ...var_args: any[]):void { + function handler(success: boolean, ...var_args: any[]): void { if (triggeredCallback) { return; } diff --git a/src/storage/implementation/blob.ts b/src/storage/implementation/blob.ts index 5d8fc581851..bee68e2eddd 100644 --- a/src/storage/implementation/blob.ts +++ b/src/storage/implementation/blob.ts @@ -21,7 +21,7 @@ */ import * as fs from './fs'; import * as string from './string'; -import {StringFormat} from './string'; +import { StringFormat } from './string'; import * as type from './type'; /** @@ -30,15 +30,15 @@ import * as type from './type'; * modified after this blob's construction. */ export class FbsBlob { - private data_: Blob|Uint8Array; + private data_: Blob | Uint8Array; private size_: number; private type_: string; - constructor(data: Blob|Uint8Array|ArrayBuffer, opt_elideCopy?: boolean) { + constructor(data: Blob | Uint8Array | ArrayBuffer, opt_elideCopy?: boolean) { let size: number = 0; let blobType: string = ''; if (type.isNativeBlob(data)) { - this.data_ = (data as Blob); + this.data_ = data as Blob; size = (data as Blob).size; blobType = (data as Blob).type; } else if (data instanceof ArrayBuffer) { @@ -51,10 +51,10 @@ export class FbsBlob { size = this.data_.length; } else if (data instanceof Uint8Array) { if (opt_elideCopy) { - this.data_ = (data as Uint8Array); + this.data_ = data as Uint8Array; } else { this.data_ = new Uint8Array(data.length); - this.data_.set((data as Uint8Array)); + this.data_.set(data as Uint8Array); } size = data.length; } @@ -70,24 +70,29 @@ export class FbsBlob { return this.type_; } - slice(startByte: number, endByte: number): FbsBlob|null { + slice(startByte: number, endByte: number): FbsBlob | null { if (type.isNativeBlob(this.data_)) { - let realBlob = (this.data_ as Blob); + let realBlob = this.data_ as Blob; let sliced = fs.sliceBlob(realBlob, startByte, endByte); if (sliced === null) { return null; } return new FbsBlob(sliced); } else { - let slice = - new Uint8Array((this.data_ as Uint8Array).buffer, startByte, endByte - startByte); + let slice = new Uint8Array( + (this.data_ as Uint8Array).buffer, + startByte, + endByte - startByte + ); return new FbsBlob(slice, true); } } - static getBlob(...var_args: (string|FbsBlob)[]): FbsBlob|null { + static getBlob(...var_args: (string | FbsBlob)[]): FbsBlob | null { if (type.isNativeBlobDefined()) { - var blobby: (Blob|Uint8Array|string)[] = var_args.map(function(val: string|FbsBlob): Blob|Uint8Array|string { + var blobby: (Blob | Uint8Array | string)[] = var_args.map(function( + val: string | FbsBlob + ): Blob | Uint8Array | string { if (val instanceof FbsBlob) { return val.data_; } else { @@ -96,12 +101,14 @@ export class FbsBlob { }); return new FbsBlob(fs.getBlob.apply(null, blobby)); } else { - let uint8Arrays: Uint8Array[] = var_args.map(function(val: string|FbsBlob): Uint8Array { + let uint8Arrays: Uint8Array[] = var_args.map(function( + val: string | FbsBlob + ): Uint8Array { if (type.isString(val)) { return string.dataFromString(StringFormat.RAW, val as string).data; } else { // Blobs don't exist, so this has to be a Uint8Array. - return ((val as FbsBlob).data_ as Uint8Array); + return (val as FbsBlob).data_ as Uint8Array; } }); let finalLength = 0; @@ -119,7 +126,7 @@ export class FbsBlob { } } - uploadData(): Blob|Uint8Array { + uploadData(): Blob | Uint8Array { return this.data_; } } diff --git a/src/storage/implementation/blobbuilder.d.ts b/src/storage/implementation/blobbuilder.d.ts index 433003b4442..fba4d411398 100644 --- a/src/storage/implementation/blobbuilder.d.ts +++ b/src/storage/implementation/blobbuilder.d.ts @@ -14,14 +14,14 @@ * limitations under the License. */ declare class IBlobBuilder { - append(x: string|Blob|ArrayBuffer): void; + append(x: string | Blob | ArrayBuffer): void; getBlob(): Blob; } -declare const BlobBuilder: undefined|(typeof IBlobBuilder); -declare const WebKitBlobBuilder: undefined|(typeof IBlobBuilder); +declare const BlobBuilder: undefined | (typeof IBlobBuilder); +declare const WebKitBlobBuilder: undefined | (typeof IBlobBuilder); -declare interface Blob { +interface Blob { webkitSlice: typeof Blob.prototype.slice; mozSlice: typeof Blob.prototype.slice; } diff --git a/src/storage/implementation/error.ts b/src/storage/implementation/error.ts index e3a17709b79..c2cbb36c9c3 100644 --- a/src/storage/implementation/error.ts +++ b/src/storage/implementation/error.ts @@ -13,12 +13,12 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -import {configOption} from './constants'; +import { configOption } from './constants'; export class FirebaseStorageError implements Error { private code_: string; private message_: string; - private serverResponse_: string|null; + private serverResponse_: string | null; private name_: string; constructor(code: Code, message: string) { @@ -36,11 +36,11 @@ export class FirebaseStorageError implements Error { return prependCode(code) === this.codeProp(); } - serverResponseProp(): string|null { + serverResponseProp(): string | null { return this.serverResponse_; } - setServerResponseProp(serverResponse: string|null) { + setServerResponseProp(serverResponse: string | null) { this.serverResponse_ = serverResponse; } @@ -56,7 +56,7 @@ export class FirebaseStorageError implements Error { return this.message_; } - get serverResponse(): null|string { + get serverResponse(): null | string { return this.serverResponse_; } } @@ -101,115 +101,156 @@ export function prependCode(code: Code): string { export function unknown(): FirebaseStorageError { let message = - 'An unknown error occurred, please check the error payload for ' + - 'server response.'; + 'An unknown error occurred, please check the error payload for ' + + 'server response.'; return new FirebaseStorageError(Code.UNKNOWN, message); } export function objectNotFound(path: string): FirebaseStorageError { return new FirebaseStorageError( - Code.OBJECT_NOT_FOUND, 'Object \'' + path + '\' does not exist.'); + Code.OBJECT_NOT_FOUND, + "Object '" + path + "' does not exist." + ); } export function bucketNotFound(bucket: string): FirebaseStorageError { return new FirebaseStorageError( - Code.BUCKET_NOT_FOUND, 'Bucket \'' + bucket + '\' does not exist.'); + Code.BUCKET_NOT_FOUND, + "Bucket '" + bucket + "' does not exist." + ); } export function projectNotFound(project: string): FirebaseStorageError { return new FirebaseStorageError( - Code.PROJECT_NOT_FOUND, 'Project \'' + project + '\' does not exist.'); + Code.PROJECT_NOT_FOUND, + "Project '" + project + "' does not exist." + ); } export function quotaExceeded(bucket: string): FirebaseStorageError { return new FirebaseStorageError( - Code.QUOTA_EXCEEDED, - 'Quota for bucket \'' + bucket + '\' exceeded, please view quota on ' + - 'https://firebase.google.com/pricing/.'); + Code.QUOTA_EXCEEDED, + "Quota for bucket '" + + bucket + + "' exceeded, please view quota on " + + 'https://firebase.google.com/pricing/.' + ); } export function unauthenticated(): FirebaseStorageError { let message = - 'User is not authenticated, please authenticate using Firebase ' + - 'Authentication and try again.'; + 'User is not authenticated, please authenticate using Firebase ' + + 'Authentication and try again.'; return new FirebaseStorageError(Code.UNAUTHENTICATED, message); } export function unauthorized(path: string): FirebaseStorageError { return new FirebaseStorageError( - Code.UNAUTHORIZED, - 'User does not have permission to access \'' + path + '\'.'); + Code.UNAUTHORIZED, + "User does not have permission to access '" + path + "'." + ); } export function retryLimitExceeded(): FirebaseStorageError { return new FirebaseStorageError( - Code.RETRY_LIMIT_EXCEEDED, - 'Max retry time for operation exceeded, please try again.'); + Code.RETRY_LIMIT_EXCEEDED, + 'Max retry time for operation exceeded, please try again.' + ); } export function invalidChecksum( - path: string, checksum: string, calculated: string): FirebaseStorageError { + path: string, + checksum: string, + calculated: string +): FirebaseStorageError { return new FirebaseStorageError( - Code.INVALID_CHECKSUM, - 'Uploaded/downloaded object \'' + path + '\' has checksum \'' + checksum + - '\' which does not match \'' + calculated + - '\'. Please retry the upload/download.'); + Code.INVALID_CHECKSUM, + "Uploaded/downloaded object '" + + path + + "' has checksum '" + + checksum + + "' which does not match '" + + calculated + + "'. Please retry the upload/download." + ); } export function canceled(): FirebaseStorageError { - return new FirebaseStorageError(Code.CANCELED, 'User canceled the upload/download.'); + return new FirebaseStorageError( + Code.CANCELED, + 'User canceled the upload/download.' + ); } export function invalidEventName(name: string): FirebaseStorageError { return new FirebaseStorageError( - Code.INVALID_EVENT_NAME, 'Invalid event name \'' + name + '\'.'); + Code.INVALID_EVENT_NAME, + "Invalid event name '" + name + "'." + ); } export function invalidUrl(url: string): FirebaseStorageError { - return new FirebaseStorageError(Code.INVALID_URL, 'Invalid URL \'' + url + '\'.'); + return new FirebaseStorageError( + Code.INVALID_URL, + "Invalid URL '" + url + "'." + ); } export function invalidDefaultBucket(bucket: string): FirebaseStorageError { return new FirebaseStorageError( - Code.INVALID_DEFAULT_BUCKET, - 'Invalid default bucket \'' + bucket + '\'.'); + Code.INVALID_DEFAULT_BUCKET, + "Invalid default bucket '" + bucket + "'." + ); } export function noDefaultBucket(): FirebaseStorageError { return new FirebaseStorageError( - Code.NO_DEFAULT_BUCKET, - 'No default bucket ' + - 'found. Did you set the \'' + configOption + - '\' property when initializing the app?'); + Code.NO_DEFAULT_BUCKET, + 'No default bucket ' + + "found. Did you set the '" + + configOption + + "' property when initializing the app?" + ); } export function cannotSliceBlob(): FirebaseStorageError { return new FirebaseStorageError( - Code.CANNOT_SLICE_BLOB, - 'Cannot slice blob for upload. Please retry the upload.'); + Code.CANNOT_SLICE_BLOB, + 'Cannot slice blob for upload. Please retry the upload.' + ); } export function serverFileWrongSize(): FirebaseStorageError { return new FirebaseStorageError( - Code.SERVER_FILE_WRONG_SIZE, - 'Server recorded incorrect upload file size, please retry the upload.'); + Code.SERVER_FILE_WRONG_SIZE, + 'Server recorded incorrect upload file size, please retry the upload.' + ); } export function noDownloadURL(): FirebaseStorageError { return new FirebaseStorageError( - Code.NO_DOWNLOAD_URL, 'The given file does not have any download URLs.'); + Code.NO_DOWNLOAD_URL, + 'The given file does not have any download URLs.' + ); } export function invalidArgument( - index: number, fnName: string, message: string): FirebaseStorageError { + index: number, + fnName: string, + message: string +): FirebaseStorageError { return new FirebaseStorageError( - Code.INVALID_ARGUMENT, - 'Invalid argument in `' + fnName + '` at index ' + index + ': ' + - message); + Code.INVALID_ARGUMENT, + 'Invalid argument in `' + fnName + '` at index ' + index + ': ' + message + ); } export function invalidArgumentCount( - argMin: number, argMax: number, fnName: string, real: number): FirebaseStorageError { + argMin: number, + argMax: number, + fnName: string, + real: number +): FirebaseStorageError { let countPart; let plural; if (argMin === argMax) { @@ -220,13 +261,24 @@ export function invalidArgumentCount( plural = 'arguments'; } return new FirebaseStorageError( - Code.INVALID_ARGUMENT_COUNT, - 'Invalid argument count in `' + fnName + '`: Expected ' + countPart + - ' ' + plural + ', received ' + real + '.'); + Code.INVALID_ARGUMENT_COUNT, + 'Invalid argument count in `' + + fnName + + '`: Expected ' + + countPart + + ' ' + + plural + + ', received ' + + real + + '.' + ); } export function appDeleted(): FirebaseStorageError { - return new FirebaseStorageError(Code.APP_DELETED, 'The Firebase app was deleted.'); + return new FirebaseStorageError( + Code.APP_DELETED, + 'The Firebase app was deleted.' + ); } /** @@ -234,25 +286,34 @@ export function appDeleted(): FirebaseStorageError { */ export function invalidRootOperation(name: string): FirebaseStorageError { return new FirebaseStorageError( - Code.INVALID_ROOT_OPERATION, - 'The operation \'' + name + - '\' cannot be performed on a root reference, create a non-root ' + - 'reference using child, such as .child(\'file.png\').'); + Code.INVALID_ROOT_OPERATION, + "The operation '" + + name + + "' cannot be performed on a root reference, create a non-root " + + "reference using child, such as .child('file.png')." + ); } /** * @param format The format that was not valid. * @param message A message describing the format violation. */ -export function invalidFormat(format: string, message: string): FirebaseStorageError { +export function invalidFormat( + format: string, + message: string +): FirebaseStorageError { return new FirebaseStorageError( - Code.INVALID_FORMAT, - 'String does not match format \'' + format + '\': ' + message); + Code.INVALID_FORMAT, + "String does not match format '" + format + "': " + message + ); } /** * @param message A message describing the internal error. */ export function internalError(message: string): FirebaseStorageError { - throw new FirebaseStorageError(Code.INTERNAL_ERROR, 'Internal error: ' + message); + throw new FirebaseStorageError( + Code.INTERNAL_ERROR, + 'Internal error: ' + message + ); } diff --git a/src/storage/implementation/failrequest.ts b/src/storage/implementation/failrequest.ts index 6e61b017c60..cfaf7591b8a 100644 --- a/src/storage/implementation/failrequest.ts +++ b/src/storage/implementation/failrequest.ts @@ -13,10 +13,10 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -import {FirebaseStorageError} from './error'; +import { FirebaseStorageError } from './error'; import * as promiseimpl from './promise_external'; import * as RequestExports from './request'; -import {Request} from './request'; +import { Request } from './request'; /** * A request whose promise always fails. diff --git a/src/storage/implementation/fs.ts b/src/storage/implementation/fs.ts index 3632d8f0d60..64ff0cb5084 100644 --- a/src/storage/implementation/fs.ts +++ b/src/storage/implementation/fs.ts @@ -25,7 +25,7 @@ declare var IBlobBuilder; declare var BlobBuilder; declare var WebKitBlobBuilder; -function getBlobBuilder(): (typeof IBlobBuilder)|undefined { +function getBlobBuilder(): (typeof IBlobBuilder) | undefined { if (typeof BlobBuilder !== 'undefined') { return BlobBuilder; } else if (typeof WebKitBlobBuilder !== 'undefined') { @@ -41,7 +41,7 @@ function getBlobBuilder(): (typeof IBlobBuilder)|undefined { * @param var_args The values that will make up the resulting blob. * @return The blob. */ -export function getBlob(...var_args: (string|Blob|ArrayBuffer)[]): Blob { +export function getBlob(...var_args: (string | Blob | ArrayBuffer)[]): Blob { let BlobBuilder = getBlobBuilder(); if (BlobBuilder !== undefined) { let bb = new BlobBuilder(); @@ -53,7 +53,7 @@ export function getBlob(...var_args: (string|Blob|ArrayBuffer)[]): Blob { if (type.isNativeBlobDefined()) { return new Blob(var_args); } else { - throw Error('This browser doesn\'t seem to support creating Blobs'); + throw Error("This browser doesn't seem to support creating Blobs"); } } } @@ -67,7 +67,7 @@ export function getBlob(...var_args: (string|Blob|ArrayBuffer)[]): Blob { * @param end Index of the ending byte. * @return The blob slice or null if not supported. */ -export function sliceBlob(blob: Blob, start: number, end: number): Blob|null { +export function sliceBlob(blob: Blob, start: number, end: number): Blob | null { if ((blob as any).webkitSlice) { return (blob as any).webkitSlice(start, end); } else if ((blob as any).mozSlice) { diff --git a/src/storage/implementation/json.ts b/src/storage/implementation/json.ts index 1fe57d88ff5..173eadd13b9 100644 --- a/src/storage/implementation/json.ts +++ b/src/storage/implementation/json.ts @@ -19,7 +19,7 @@ import * as type from './type'; * Returns the Object resulting from parsing the given JSON, or null if the * given string does not represent a JSON object. */ -export function jsonObjectOrNull(s: string): {[name: string]: any}|null { +export function jsonObjectOrNull(s: string): { [name: string]: any } | null { let obj; try { obj = JSON.parse(s); diff --git a/src/storage/implementation/location.ts b/src/storage/implementation/location.ts index 7011a62ac76..b5a07ffa9c3 100644 --- a/src/storage/implementation/location.ts +++ b/src/storage/implementation/location.ts @@ -19,7 +19,7 @@ * object location. */ import * as errorsExports from './error'; -import {errors} from './error'; +import { errors } from './error'; /** * @struct @@ -73,20 +73,25 @@ export class Location { let gsPath = '(/(.*))?$'; let path = '(/([^?#]*).*)?$'; let gsRegex = new RegExp('^gs://' + bucketDomain + gsPath, 'i'); - let gsIndices = {bucket: 1, path: 3}; + let gsIndices = { bucket: 1, path: 3 }; function httpModify(loc: Location) { loc.path_ = decodeURIComponent(loc.path); } let version = 'v[A-Za-z0-9_]+'; let httpRegex = new RegExp( - '^https?://firebasestorage\\.googleapis\\.com/' + version + '/b/' + - bucketDomain + '/o' + path, - 'i'); - let httpIndices = {bucket: 1, path: 3}; + '^https?://firebasestorage\\.googleapis\\.com/' + + version + + '/b/' + + bucketDomain + + '/o' + + path, + 'i' + ); + let httpIndices = { bucket: 1, path: 3 }; let groups = [ - {regex: gsRegex, indices: gsIndices, postModify: gsModify}, - {regex: httpRegex, indices: httpIndices, postModify: httpModify} + { regex: gsRegex, indices: gsIndices, postModify: gsModify }, + { regex: httpRegex, indices: httpIndices, postModify: httpModify } ]; for (let i = 0; i < groups.length; i++) { let group = groups[i]; diff --git a/src/storage/implementation/metadata.ts b/src/storage/implementation/metadata.ts index 04b13bb2c36..3fb2cad913d 100644 --- a/src/storage/implementation/metadata.ts +++ b/src/storage/implementation/metadata.ts @@ -17,17 +17,16 @@ /** * @fileoverview Documentation for the metadata format */ -import {Metadata} from '../metadata'; +import { Metadata } from '../metadata'; -import {AuthWrapper} from './authwrapper'; +import { AuthWrapper } from './authwrapper'; import * as json from './json'; -import {Location} from './location'; +import { Location } from './location'; import * as path from './path'; import * as type from './type'; import * as UrlUtils from './url'; -export function noXform_( - metadata: Metadata, value: any): any { +export function noXform_(metadata: Metadata, value: any): any { return value; } @@ -40,9 +39,11 @@ export class Mapping { xform: (p1: Metadata, p2: any) => any; constructor( - public server: string, opt_local?: string|null, opt_writable?: boolean, - opt_xform?: (p1: Metadata, p2: any) => any | - null) { + public server: string, + opt_local?: string | null, + opt_writable?: boolean, + opt_xform?: (p1: Metadata, p2: any) => any | null + ) { this.local = opt_local || server; this.writable = !!opt_writable; this.xform = opt_xform || noXform_; @@ -50,16 +51,16 @@ export class Mapping { } type Mappings = Mapping[]; -export {Mappings}; +export { Mappings }; -let mappings_: Mappings|null = null; +let mappings_: Mappings | null = null; export function xformPath(fullPath: any): string { let valid = type.isString(fullPath); if (!valid || fullPath.length < 2) { return fullPath; } else { - fullPath = (fullPath as string); + fullPath = fullPath as string; return path.lastComponent(fullPath); } } @@ -74,8 +75,7 @@ export function getMappings(): Mappings { mappings.push(new Mapping('metageneration')); mappings.push(new Mapping('name', 'fullPath', true)); - function mappingsXformPath( - metadata: Metadata, fullPath: any): string { + function mappingsXformPath(metadata: Metadata, fullPath: any): string { return xformPath(fullPath); } let nameMapping = new Mapping('name'); @@ -85,8 +85,7 @@ export function getMappings(): Mappings { /** * Coerces the second param to a number, if it is defined. */ - function xformSize( - metadata: Metadata, size: any): number|null|undefined { + function xformSize(metadata: Metadata, size: any): number | null | undefined { if (type.isDef(size)) { return +(size as number); } else { @@ -110,8 +109,7 @@ export function getMappings(): Mappings { * Transforms a comma-separated string of tokens into a list of download * URLs. */ - function xformTokens( - metadata: Metadata, tokens: any): string[] { + function xformTokens(metadata: Metadata, tokens: any): string[] { let valid = type.isString(tokens) && tokens.length > 0; if (!valid) { // This can happen if objects are uploaded through GCS and retrieved @@ -125,13 +123,17 @@ export function getMappings(): Mappings { let path: string = metadata['fullPath'] as string; let urlPart = '/b/' + encode(bucket) + '/o/' + encode(path); let base = UrlUtils.makeDownloadUrl(urlPart); - let queryString = UrlUtils.makeQueryString({'alt': 'media', 'token': token}); + let queryString = UrlUtils.makeQueryString({ + alt: 'media', + token: token + }); return base + queryString; }); return urls; } mappings.push( - new Mapping('downloadTokens', 'downloadURLs', false, xformTokens)); + new Mapping('downloadTokens', 'downloadURLs', false, xformTokens) + ); mappings_ = mappings; return mappings_; } @@ -143,14 +145,16 @@ export function addRef(metadata: Metadata, authWrapper: AuthWrapper) { let loc = new Location(bucket, path); return authWrapper.makeStorageReference(loc); } - Object.defineProperty(metadata, 'ref', {get: generateRef}); + Object.defineProperty(metadata, 'ref', { get: generateRef }); } export function fromResource( - authWrapper: AuthWrapper, resource: {[name: string]: any}, - mappings: Mappings): Metadata { + authWrapper: AuthWrapper, + resource: { [name: string]: any }, + mappings: Mappings +): Metadata { let metadata: Metadata = {} as Metadata; - metadata['type'] = 'file'; + metadata['type'] = 'file'; let len = mappings.length; for (let i = 0; i < len; i++) { let mapping = mappings[i]; @@ -161,20 +165,24 @@ export function fromResource( } export function fromResourceString( - authWrapper: AuthWrapper, resourceString: string, - mappings: Mappings): Metadata|null { + authWrapper: AuthWrapper, + resourceString: string, + mappings: Mappings +): Metadata | null { let obj = json.jsonObjectOrNull(resourceString); if (obj === null) { return null; } - let resource = (obj as Metadata); + let resource = obj as Metadata; return fromResource(authWrapper, resource, mappings); } export function toResourceString( - metadata: Metadata, mappings: Mappings): string { + metadata: Metadata, + mappings: Mappings +): string { let resource: { - [prop: string]: any + [prop: string]: any; } = {}; let len = mappings.length; for (let i = 0; i < len; i++) { @@ -199,7 +207,7 @@ export function metadataValidator(p: any) { } } else { if (type.isNonNullObject(val)) { - throw 'Mapping for \'' + key + '\' cannot be an object.'; + throw "Mapping for '" + key + "' cannot be an object."; } } } diff --git a/src/storage/implementation/object.ts b/src/storage/implementation/object.ts index 54e79f12a0a..11f5b618c13 100644 --- a/src/storage/implementation/object.ts +++ b/src/storage/implementation/object.ts @@ -17,14 +17,14 @@ /** * @fileoverview Contains methods for working with objects. */ -export function contains( - obj: Object, prop: string): boolean { +export function contains(obj: Object, prop: string): boolean { return Object.prototype.hasOwnProperty.call(obj, prop); } export function forEach( - obj: {[key: string]: T}, - f: (p1: string, p2: T) => void) { + obj: { [key: string]: T }, + f: (p1: string, p2: T) => void +) { for (let key in obj) { if (contains(obj, key)) { f(key, obj[key]); @@ -32,12 +32,12 @@ export function forEach( } } -export function clone(obj?: {[key: string]: any}|null): T { +export function clone(obj?: { [key: string]: any } | null): T { if (obj == null) { return {} as T; } - let c: {[name: string]: any} = {}; + let c: { [name: string]: any } = {}; forEach(obj, function(key, val) { c[key] = val; }); diff --git a/src/storage/implementation/observer.ts b/src/storage/implementation/observer.ts index 3b240f35609..29ec4989009 100644 --- a/src/storage/implementation/observer.ts +++ b/src/storage/implementation/observer.ts @@ -20,12 +20,13 @@ type ErrorFn = (error: Error) => void; type CompleteFn = () => void; type Unsubscribe = () => void; -type Subscribe = - (next: NextFn | {[name: string]: string|null}, - error?: ErrorFn, - complete?: CompleteFn) => Unsubscribe; +type Subscribe = ( + next: NextFn | { [name: string]: string | null }, + error?: ErrorFn, + complete?: CompleteFn +) => Unsubscribe; -export {NextFn, ErrorFn, CompleteFn, Unsubscribe, Subscribe}; +export { NextFn, ErrorFn, CompleteFn, Unsubscribe, Subscribe }; /** * @struct @@ -36,13 +37,16 @@ export class Observer { complete: CompleteFn | null; constructor( - nextOrObserver: NextFn | {[name: string]: string|null} | null, - opt_error?: ErrorFn | null, - opt_complete?: CompleteFn | null) { - let asFunctions = type.isFunction(nextOrObserver) || - type.isDef(opt_error) || type.isDef(opt_complete); + nextOrObserver: NextFn | { [name: string]: string | null } | null, + opt_error?: ErrorFn | null, + opt_complete?: CompleteFn | null + ) { + let asFunctions = + type.isFunction(nextOrObserver) || + type.isDef(opt_error) || + type.isDef(opt_complete); if (asFunctions) { - this.next = nextOrObserver as (NextFn | null); + this.next = nextOrObserver as NextFn | null; this.error = opt_error || null; this.complete = opt_complete || null; } else { diff --git a/src/storage/implementation/path.ts b/src/storage/implementation/path.ts index b7477aaec8f..9ed338023fe 100644 --- a/src/storage/implementation/path.ts +++ b/src/storage/implementation/path.ts @@ -21,7 +21,7 @@ /** * @return Null if the path is already at the root. */ -export function parent(path: string): string|null { +export function parent(path: string): string | null { if (path.length == 0) { return null; } @@ -34,11 +34,12 @@ export function parent(path: string): string|null { } export function child(path: string, childPath: string): string { - let canonicalChildPath = childPath.split('/') - .filter(function(component) { - return component.length > 0; - }) - .join('/'); + let canonicalChildPath = childPath + .split('/') + .filter(function(component) { + return component.length > 0; + }) + .join('/'); if (path.length === 0) { return canonicalChildPath; } else { diff --git a/src/storage/implementation/promise_external.ts b/src/storage/implementation/promise_external.ts index 6acaf0a5354..fd5e9562eea 100644 --- a/src/storage/implementation/promise_external.ts +++ b/src/storage/implementation/promise_external.ts @@ -25,10 +25,11 @@ * (function(!Error): void))} resolver */ -import { PromiseImpl } from "../../utils/promise"; +import { PromiseImpl } from '../../utils/promise'; -export function make(resolver: (p1: (p1: T) => void, - p2: (p1: Error) => void) => void): Promise { +export function make( + resolver: (p1: (p1: T) => void, p2: (p1: Error) => void) => void +): Promise { return new PromiseImpl(resolver); } @@ -36,9 +37,9 @@ export function make(resolver: (p1: (p1: T) => void, * @template T */ export function resolve(value: T): Promise { - return (PromiseImpl.resolve(value) as Promise); + return PromiseImpl.resolve(value) as Promise; } export function reject(error: Error): Promise { - return (PromiseImpl.reject(error) as Promise); + return PromiseImpl.reject(error) as Promise; } diff --git a/src/storage/implementation/request.ts b/src/storage/implementation/request.ts index 45af8bcb686..d4c81944044 100644 --- a/src/storage/implementation/request.ts +++ b/src/storage/implementation/request.ts @@ -21,17 +21,17 @@ import * as array from './array'; import * as backoff from './backoff'; import * as errorsExports from './error'; -import {FirebaseStorageError} from './error'; -import {errors} from './error'; +import { FirebaseStorageError } from './error'; +import { errors } from './error'; import * as object from './object'; import * as promiseimpl from './promise_external'; -import {RequestInfo} from './requestinfo'; +import { RequestInfo } from './requestinfo'; import * as type from './type'; import * as UrlUtils from './url'; import * as XhrIoExports from './xhrio'; -import {Headers, XhrIo} from './xhrio'; -import {XhrIoPool} from './xhriopool'; -import { FirebaseNamespace } from "../../app/firebase_app"; +import { Headers, XhrIo } from './xhrio'; +import { XhrIoPool } from './xhriopool'; +import { FirebaseNamespace } from '../../app/firebase_app'; declare var firebase: FirebaseNamespace; @@ -59,31 +59,39 @@ class NetworkRequest implements Request { private url_: string; private method_: string; private headers_: Headers; - private body_: string|Blob|Uint8Array|null; + private body_: string | Blob | Uint8Array | null; private successCodes_: number[]; private additionalRetryCodes_: number[]; - private pendingXhr_: XhrIo|null = null; - private backoffId_: backoff.id|null = null; - private resolve_: Function|null = null; - private reject_: Function|null = null; + private pendingXhr_: XhrIo | null = null; + private backoffId_: backoff.id | null = null; + private resolve_: Function | null = null; + private reject_: Function | null = null; private canceled_: boolean = false; private appDelete_: boolean = false; private callback_: (p1: XhrIo, p2: string) => T; - private errorCallback_: ((p1: XhrIo, p2: FirebaseStorageError) => FirebaseStorageError) | null; - private progressCallback_: - ((p1: number, p2: number) => void) | null; + private errorCallback_: + | ((p1: XhrIo, p2: FirebaseStorageError) => FirebaseStorageError) + | null; + private progressCallback_: ((p1: number, p2: number) => void) | null; private timeout_: number; private pool_: XhrIoPool; promise_: Promise; constructor( - url: string, method: string, headers: Headers, - body: string|Blob|Uint8Array|null, successCodes: number[], - additionalRetryCodes: number[], - callback: (p1: XhrIo, p2: string) => T, - errorCallback: ((p1: XhrIo, p2: FirebaseStorageError) => FirebaseStorageError) | null, timeout: number, - progressCallback: ((p1: number, p2: number) => void) | null, - pool: XhrIoPool) { + url: string, + method: string, + headers: Headers, + body: string | Blob | Uint8Array | null, + successCodes: number[], + additionalRetryCodes: number[], + callback: (p1: XhrIo, p2: string) => T, + errorCallback: + | ((p1: XhrIo, p2: FirebaseStorageError) => FirebaseStorageError) + | null, + timeout: number, + progressCallback: ((p1: number, p2: number) => void) | null, + pool: XhrIoPool + ) { this.url_ = url; this.method_ = method; this.headers_ = headers; @@ -110,8 +118,9 @@ class NetworkRequest implements Request { let self = this; function doTheRequest( - backoffCallback: (p1: boolean, ...p2: any[]) => void, - canceled: boolean) { + backoffCallback: (p1: boolean, ...p2: any[]) => void, + canceled: boolean + ) { if (canceled) { backoffCallback(false, new RequestEndStatus(false, null, true)); return; @@ -129,26 +138,29 @@ class NetworkRequest implements Request { if (self.progressCallback_ !== null) { xhr.addUploadProgressListener(progressListener); } - xhr.send(self.url_, self.method_, self.body_, self.headers_) - .then(function(xhr: XhrIo) { - if (self.progressCallback_ !== null) { - xhr.removeUploadProgressListener(progressListener); - } - self.pendingXhr_ = null; - xhr = (xhr as XhrIo); - let hitServer = - xhr.getErrorCode() === XhrIoExports.ErrorCode.NO_ERROR; - let status = xhr.getStatus(); - if (!hitServer || self.isRetryStatusCode_(status)) { - let wasCanceled = - xhr.getErrorCode() === XhrIoExports.ErrorCode.ABORT; - backoffCallback( - false, new RequestEndStatus(false, null, wasCanceled)); - return; - } - let successCode = array.contains(self.successCodes_, status); - backoffCallback(true, new RequestEndStatus(successCode, xhr)); - }); + xhr + .send(self.url_, self.method_, self.body_, self.headers_) + .then(function(xhr: XhrIo) { + if (self.progressCallback_ !== null) { + xhr.removeUploadProgressListener(progressListener); + } + self.pendingXhr_ = null; + xhr = xhr as XhrIo; + let hitServer = + xhr.getErrorCode() === XhrIoExports.ErrorCode.NO_ERROR; + let status = xhr.getStatus(); + if (!hitServer || self.isRetryStatusCode_(status)) { + let wasCanceled = + xhr.getErrorCode() === XhrIoExports.ErrorCode.ABORT; + backoffCallback( + false, + new RequestEndStatus(false, null, wasCanceled) + ); + return; + } + let successCode = array.contains(self.successCodes_, status); + backoffCallback(true, new RequestEndStatus(successCode, xhr)); + }); } /** @@ -156,7 +168,9 @@ class NetworkRequest implements Request { * through, false if it hit the retry limit or was canceled. */ function backoffDone( - requestWentThrough: boolean, status: RequestEndStatus) { + requestWentThrough: boolean, + status: RequestEndStatus + ) { let resolve = self.resolve_ as Function; let reject = self.reject_ as Function; let xhr = status.xhr as XhrIo; @@ -182,8 +196,9 @@ class NetworkRequest implements Request { } } else { if (status.canceled) { - let err = self.appDelete_ ? errorsExports.appDeleted() : - errorsExports.canceled(); + let err = self.appDelete_ + ? errorsExports.appDeleted() + : errorsExports.canceled(); reject(err); } else { let err = errorsExports.retryLimitExceeded(); @@ -227,8 +242,10 @@ class NetworkRequest implements Request { 429 ]; let isExtraRetryCode = array.contains(extraRetryCodes, status); - let isRequestSpecificRetryCode = - array.contains(this.additionalRetryCodes_, status); + let isRequestSpecificRetryCode = array.contains( + this.additionalRetryCodes_, + status + ); return isFiveHundredCode || isExtraRetryCode || isRequestSpecificRetryCode; } } @@ -245,20 +262,23 @@ export class RequestEndStatus { canceled: boolean; constructor( - public wasSuccessCode: boolean, public xhr: XhrIo|null, - opt_canceled?: boolean) { + public wasSuccessCode: boolean, + public xhr: XhrIo | null, + opt_canceled?: boolean + ) { this.canceled = !!opt_canceled; } } -export function addAuthHeader_(headers: Headers, authToken: string|null) { +export function addAuthHeader_(headers: Headers, authToken: string | null) { if (authToken !== null && authToken.length > 0) { headers['Authorization'] = 'Firebase ' + authToken; } } export function addVersionHeader_(headers: Headers) { - let number = typeof firebase !== 'undefined' ? firebase.SDK_VERSION : 'AppManager'; + let number = + typeof firebase !== 'undefined' ? firebase.SDK_VERSION : 'AppManager'; headers['X-Firebase-Storage-Version'] = 'webjs/' + number; } @@ -266,16 +286,26 @@ export function addVersionHeader_(headers: Headers) { * @template T */ export function makeRequest( - requestInfo: RequestInfo, authToken: string|null, - pool: XhrIoPool): Request { + requestInfo: RequestInfo, + authToken: string | null, + pool: XhrIoPool +): Request { let queryPart = UrlUtils.makeQueryString(requestInfo.urlParams); let url = requestInfo.url + queryPart; let headers = object.clone(requestInfo.headers); addAuthHeader_(headers, authToken); addVersionHeader_(headers); return new NetworkRequest( - url, requestInfo.method, headers, requestInfo.body, - requestInfo.successCodes, requestInfo.additionalRetryCodes, - requestInfo.handler, requestInfo.errorHandler, requestInfo.timeout, - requestInfo.progressCallback, pool); + url, + requestInfo.method, + headers, + requestInfo.body, + requestInfo.successCodes, + requestInfo.additionalRetryCodes, + requestInfo.handler, + requestInfo.errorHandler, + requestInfo.timeout, + requestInfo.progressCallback, + pool + ); } diff --git a/src/storage/implementation/requestinfo.ts b/src/storage/implementation/requestinfo.ts index e92ea11ce64..e16294e119a 100644 --- a/src/storage/implementation/requestinfo.ts +++ b/src/storage/implementation/requestinfo.ts @@ -13,37 +13,39 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -import {FirebaseStorageError} from './error'; -import {Headers, XhrIo} from './xhrio'; +import { FirebaseStorageError } from './error'; +import { Headers, XhrIo } from './xhrio'; -export type UrlParams = {[name: string]: string}; +export type UrlParams = { [name: string]: string }; export class RequestInfo { urlParams: UrlParams = {}; headers: Headers = {}; - body: Blob|string|Uint8Array|null = null; + body: Blob | string | Uint8Array | null = null; - errorHandler: ((p1: XhrIo, p2: FirebaseStorageError) => FirebaseStorageError) | null = null; + errorHandler: + | ((p1: XhrIo, p2: FirebaseStorageError) => FirebaseStorageError) + | null = null; /** * Called with the current number of bytes uploaded and total size (-1 if not * computable) of the request body (i.e. used to report upload progress). */ - progressCallback: - ((p1: number, p2: number) => void) | null = null; + progressCallback: ((p1: number, p2: number) => void) | null = null; successCodes: number[] = [200]; additionalRetryCodes: number[] = []; constructor( - public url: string, - public method: string, - /** + public url: string, + public method: string, + /** * Returns the value with which to resolve the request's promise. Only called * if the request is successful. Throw from this function to reject the * returned Request's promise with the thrown error. * Note: The XhrIo passed to this function may be reused after this callback * returns. Do not keep a reference to it in any way. */ - public handler: (p1: XhrIo, p2: string) => T, - public timeout: number) {} + public handler: (p1: XhrIo, p2: string) => T, + public timeout: number + ) {} } diff --git a/src/storage/implementation/requestmaker.ts b/src/storage/implementation/requestmaker.ts index ce7048ab523..7ee19998840 100644 --- a/src/storage/implementation/requestmaker.ts +++ b/src/storage/implementation/requestmaker.ts @@ -13,11 +13,14 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -import {Request} from './request'; -import {RequestInfo} from './requestinfo'; -import {XhrIoPool} from './xhriopool'; +import { Request } from './request'; +import { RequestInfo } from './requestinfo'; +import { XhrIoPool } from './xhriopool'; -type requestMaker = - (requestInfo: RequestInfo, authToken: string|null, pool: XhrIoPool) => Request; +type requestMaker = ( + requestInfo: RequestInfo, + authToken: string | null, + pool: XhrIoPool +) => Request; -export {requestMaker}; +export { requestMaker }; diff --git a/src/storage/implementation/requestmap.ts b/src/storage/implementation/requestmap.ts index 22a8685312a..ff22fcac358 100644 --- a/src/storage/implementation/requestmap.ts +++ b/src/storage/implementation/requestmap.ts @@ -15,14 +15,14 @@ */ import * as object from './object'; import * as RequestExports from './request'; -import {Request} from './request'; +import { Request } from './request'; import * as constants from './constants'; /** * @struct */ export class RequestMap { - private map_: {[key: number]: Request} = {}; + private map_: { [key: number]: Request } = {}; private id_: number; constructor() { diff --git a/src/storage/implementation/requests.ts b/src/storage/implementation/requests.ts index 0ab5faab19b..472595eab8c 100644 --- a/src/storage/implementation/requests.ts +++ b/src/storage/implementation/requests.ts @@ -18,21 +18,21 @@ * @fileoverview Defines methods for interacting with the network. */ -import {Metadata} from '../metadata'; +import { Metadata } from '../metadata'; import * as array from './array'; -import {AuthWrapper} from './authwrapper'; -import {FbsBlob} from './blob'; +import { AuthWrapper } from './authwrapper'; +import { FbsBlob } from './blob'; import * as errorsExports from './error'; -import {FirebaseStorageError} from './error'; -import {errors} from './error'; -import {Location} from './location'; +import { FirebaseStorageError } from './error'; +import { errors } from './error'; +import { Location } from './location'; import * as MetadataUtils from './metadata'; import * as object from './object'; -import {RequestInfo} from './requestinfo'; +import { RequestInfo } from './requestinfo'; import * as type from './type'; import * as UrlUtils from './url'; -import {XhrIo} from './xhrio'; +import { XhrIo } from './xhrio'; /** * Throws the UNKNOWN FirebaseStorageError if cndn is false. @@ -44,19 +44,28 @@ export function handlerCheck(cndn: boolean) { } export function metadataHandler( - authWrapper: AuthWrapper, - mappings: MetadataUtils.Mappings): (p1: XhrIo, p2: string) => Metadata { + authWrapper: AuthWrapper, + mappings: MetadataUtils.Mappings +): (p1: XhrIo, p2: string) => Metadata { function handler(xhr: XhrIo, text: string): Metadata { - let metadata = MetadataUtils.fromResourceString(authWrapper, text, mappings); + let metadata = MetadataUtils.fromResourceString( + authWrapper, + text, + mappings + ); handlerCheck(metadata !== null); return metadata as Metadata; } return handler; } -export function sharedErrorHandler(location: Location): ( - p1: XhrIo, p2: FirebaseStorageError) => FirebaseStorageError { - function errorHandler(xhr: XhrIo, err: FirebaseStorageError): FirebaseStorageError { +export function sharedErrorHandler( + location: Location +): (p1: XhrIo, p2: FirebaseStorageError) => FirebaseStorageError { + function errorHandler( + xhr: XhrIo, + err: FirebaseStorageError + ): FirebaseStorageError { let newErr; if (xhr.getStatus() === 401) { newErr = errorsExports.unauthenticated(); @@ -77,11 +86,15 @@ export function sharedErrorHandler(location: Location): ( return errorHandler; } -export function objectErrorHandler(location: Location): ( - p1: XhrIo, p2: FirebaseStorageError) => FirebaseStorageError { +export function objectErrorHandler( + location: Location +): (p1: XhrIo, p2: FirebaseStorageError) => FirebaseStorageError { let shared = sharedErrorHandler(location); - function errorHandler(xhr: XhrIo, err: FirebaseStorageError): FirebaseStorageError { + function errorHandler( + xhr: XhrIo, + err: FirebaseStorageError + ): FirebaseStorageError { let newErr = shared(xhr, err); if (xhr.getStatus() === 404) { newErr = errorsExports.objectNotFound(location.path); @@ -93,29 +106,42 @@ export function objectErrorHandler(location: Location): ( } export function getMetadata( - authWrapper: AuthWrapper, location: Location, - mappings: MetadataUtils.Mappings): RequestInfo { + authWrapper: AuthWrapper, + location: Location, + mappings: MetadataUtils.Mappings +): RequestInfo { let urlPart = location.fullServerUrl(); let url = UrlUtils.makeNormalUrl(urlPart); let method = 'GET'; let timeout = authWrapper.maxOperationRetryTime(); let requestInfo = new RequestInfo( - url, method, metadataHandler(authWrapper, mappings), timeout); + url, + method, + metadataHandler(authWrapper, mappings), + timeout + ); requestInfo.errorHandler = objectErrorHandler(location); return requestInfo; } export function updateMetadata( - authWrapper: AuthWrapper, location: Location, metadata: Metadata, - mappings: MetadataUtils.Mappings): RequestInfo { + authWrapper: AuthWrapper, + location: Location, + metadata: Metadata, + mappings: MetadataUtils.Mappings +): RequestInfo { let urlPart = location.fullServerUrl(); let url = UrlUtils.makeNormalUrl(urlPart); let method = 'PATCH'; let body = MetadataUtils.toResourceString(metadata, mappings); - let headers = {'Content-Type': 'application/json; charset=utf-8'}; + let headers = { 'Content-Type': 'application/json; charset=utf-8' }; let timeout = authWrapper.maxOperationRetryTime(); let requestInfo = new RequestInfo( - url, method, metadataHandler(authWrapper, mappings), timeout); + url, + method, + metadataHandler(authWrapper, mappings), + timeout + ); requestInfo.headers = headers; requestInfo.body = body; requestInfo.errorHandler = objectErrorHandler(location); @@ -123,7 +149,9 @@ export function updateMetadata( } export function deleteObject( - authWrapper: AuthWrapper, location: Location): RequestInfo { + authWrapper: AuthWrapper, + location: Location +): RequestInfo { let urlPart = location.fullServerUrl(); let url = UrlUtils.makeNormalUrl(urlPart); let method = 'DELETE'; @@ -137,13 +165,21 @@ export function deleteObject( } export function determineContentType_( - metadata: Metadata|null, blob: FbsBlob|null): string { - return metadata && metadata['contentType'] || blob && blob.type() || - 'application/octet-stream'; + metadata: Metadata | null, + blob: FbsBlob | null +): string { + return ( + (metadata && metadata['contentType']) || + (blob && blob.type()) || + 'application/octet-stream' + ); } export function metadataForUpload_( - location: Location, blob: FbsBlob, opt_metadata?: Metadata|null): Metadata { + location: Location, + blob: FbsBlob, + opt_metadata?: Metadata | null +): Metadata { let metadata = object.clone(opt_metadata); metadata['fullPath'] = location.path; metadata['size'] = blob.size(); @@ -154,10 +190,16 @@ export function metadataForUpload_( } export function multipartUpload( - authWrapper: AuthWrapper, location: Location, mappings: MetadataUtils.Mappings, - blob: FbsBlob, opt_metadata?: Metadata|null): RequestInfo { + authWrapper: AuthWrapper, + location: Location, + mappings: MetadataUtils.Mappings, + blob: FbsBlob, + opt_metadata?: Metadata | null +): RequestInfo { let urlPart = location.bucketOnlyServerUrl(); - let headers: { [prop: string]: string } = {'X-Goog-Upload-Protocol': 'multipart'}; + let headers: { [prop: string]: string } = { + 'X-Goog-Upload-Protocol': 'multipart' + }; function genBoundary() { let str = ''; @@ -170,21 +212,33 @@ export function multipartUpload( headers['Content-Type'] = 'multipart/related; boundary=' + boundary; let metadata = metadataForUpload_(location, blob, opt_metadata); let metadataString = MetadataUtils.toResourceString(metadata, mappings); - let preBlobPart = '--' + boundary + '\r\n' + - 'Content-Type: application/json; charset=utf-8\r\n\r\n' + metadataString + - '\r\n--' + boundary + '\r\n' + - 'Content-Type: ' + metadata['contentType'] + '\r\n\r\n'; + let preBlobPart = + '--' + + boundary + + '\r\n' + + 'Content-Type: application/json; charset=utf-8\r\n\r\n' + + metadataString + + '\r\n--' + + boundary + + '\r\n' + + 'Content-Type: ' + + metadata['contentType'] + + '\r\n\r\n'; let postBlobPart = '\r\n--' + boundary + '--'; let body = FbsBlob.getBlob(preBlobPart, blob, postBlobPart); if (body === null) { throw errorsExports.cannotSliceBlob(); } - let urlParams = {'name': metadata['fullPath']}; + let urlParams = { name: metadata['fullPath'] }; let url = UrlUtils.makeUploadUrl(urlPart); let method = 'POST'; let timeout = authWrapper.maxUploadRetryTime(); let requestInfo = new RequestInfo( - url, method, metadataHandler(authWrapper, mappings), timeout); + url, + method, + metadataHandler(authWrapper, mappings), + timeout + ); requestInfo.urlParams = urlParams; requestInfo.headers = headers; requestInfo.body = body.uploadData(); @@ -202,13 +256,14 @@ export function multipartUpload( */ export class ResumableUploadStatus { finalized: boolean; - metadata: Metadata|null; + metadata: Metadata | null; constructor( - public current: number, - public total: number, - finalized?: boolean, - metadata?: Metadata|null) { + public current: number, + public total: number, + finalized?: boolean, + metadata?: Metadata | null + ) { this.finalized = !!finalized; this.metadata = metadata || null; } @@ -223,15 +278,19 @@ export function checkResumeHeader_(xhr: XhrIo, opt_allowed?: string[]): string { } let allowed = opt_allowed || ['active']; handlerCheck(array.contains(allowed, status)); - return (status as string); + return status as string; } export function createResumableUpload( - authWrapper: AuthWrapper, location: Location, mappings: MetadataUtils.Mappings, - blob: FbsBlob, opt_metadata?: Metadata|null): RequestInfo { + authWrapper: AuthWrapper, + location: Location, + mappings: MetadataUtils.Mappings, + blob: FbsBlob, + opt_metadata?: Metadata | null +): RequestInfo { let urlPart = location.bucketOnlyServerUrl(); let metadata = metadataForUpload_(location, blob, opt_metadata); - let urlParams = {'name': metadata['fullPath']}; + let urlParams = { name: metadata['fullPath'] }; let url = UrlUtils.makeUploadUrl(urlPart); let method = 'POST'; let headers = { @@ -253,7 +312,7 @@ export function createResumableUpload( handlerCheck(false); } handlerCheck(type.isString(url)); - return (url as string); + return url as string; } let requestInfo = new RequestInfo(url, method, handler, timeout); requestInfo.urlParams = urlParams; @@ -267,9 +326,12 @@ export function createResumableUpload( * @param url From a call to fbs.requests.createResumableUpload. */ export function getResumableUploadStatus( - authWrapper: AuthWrapper, location: Location, url: string, - blob: FbsBlob): RequestInfo { - let headers = {'X-Goog-Upload-Command': 'query'}; + authWrapper: AuthWrapper, + location: Location, + url: string, + blob: FbsBlob +): RequestInfo { + let headers = { 'X-Goog-Upload-Command': 'query' }; function handler(xhr: XhrIo, text: string): ResumableUploadStatus { let status = checkResumeHeader_(xhr, ['active', 'final']); @@ -307,10 +369,15 @@ export const resumableUploadChunkSize: number = 256 * 1024; * for upload. */ export function continueResumableUpload( - location: Location, authWrapper: AuthWrapper, url: string, blob: FbsBlob, - chunkSize: number, mappings: MetadataUtils.Mappings, - opt_status?: ResumableUploadStatus|null, - opt_progressCallback?: ((p1: number, p2: number) => void) | null): RequestInfo { + location: Location, + authWrapper: AuthWrapper, + url: string, + blob: FbsBlob, + chunkSize: number, + mappings: MetadataUtils.Mappings, + opt_status?: ResumableUploadStatus | null, + opt_progressCallback?: ((p1: number, p2: number) => void) | null +): RequestInfo { // TODO(andysoto): standardize on internal asserts // assert(!(opt_status && opt_status.finalized)); let status = new ResumableUploadStatus(0, 0); @@ -332,7 +399,7 @@ export function continueResumableUpload( let startByte = status.current; let endByte = startByte + bytesToUpload; let uploadCommand = - bytesToUpload === bytesLeft ? 'upload, finalize' : 'upload'; + bytesToUpload === bytesLeft ? 'upload, finalize' : 'upload'; let headers = { 'X-Goog-Upload-Command': uploadCommand, 'X-Goog-Upload-Offset': status.current @@ -357,7 +424,11 @@ export function continueResumableUpload( metadata = null; } return new ResumableUploadStatus( - newCurrent, size, uploadStatus === 'final', metadata); + newCurrent, + size, + uploadStatus === 'final', + metadata + ); } let method = 'POST'; let timeout = authWrapper.maxUploadRetryTime(); diff --git a/src/storage/implementation/string.ts b/src/storage/implementation/string.ts index 45b436ee589..3161e24bbdf 100644 --- a/src/storage/implementation/string.ts +++ b/src/storage/implementation/string.ts @@ -14,7 +14,7 @@ * limitations under the License. */ import * as errorsExports from './error'; -import {errors} from './error'; +import { errors } from './error'; /** * @enum {string} @@ -35,9 +35,15 @@ export function formatValidator(stringFormat: string) { case StringFormat.DATA_URL: return; default: - throw 'Expected one of the event types: [' + StringFormat.RAW + ', ' + - StringFormat.BASE64 + ', ' + StringFormat.BASE64URL + ', ' + - StringFormat.DATA_URL + '].'; + throw 'Expected one of the event types: [' + + StringFormat.RAW + + ', ' + + StringFormat.BASE64 + + ', ' + + StringFormat.BASE64URL + + ', ' + + StringFormat.DATA_URL + + '].'; } } @@ -45,15 +51,17 @@ export function formatValidator(stringFormat: string) { * @struct */ export class StringData { - contentType: string|null; + contentType: string | null; - constructor(public data: Uint8Array, opt_contentType?: string|null) { + constructor(public data: Uint8Array, opt_contentType?: string | null) { this.contentType = opt_contentType || null; } } export function dataFromString( - format: StringFormat, string: string): StringData { + format: StringFormat, + string: string +): StringData { switch (format) { case StringFormat.RAW: return new StringData(utf8Bytes_(string)); @@ -76,29 +84,33 @@ export function utf8Bytes_(string: string): Uint8Array { b.push(c); } else { if (c <= 2047) { - b.push(192 | c >> 6, 128 | c & 63); + b.push(192 | (c >> 6), 128 | (c & 63)); } else { if ((c & 64512) == 55296) { // The start of a surrogate pair. - let valid = i < string.length - 1 && - (string.charCodeAt(i + 1) & 64512) == 56320; + let valid = + i < string.length - 1 && + (string.charCodeAt(i + 1) & 64512) == 56320; if (!valid) { // The second surrogate wasn't there. b.push(239, 191, 189); } else { let hi = c; let lo = string.charCodeAt(++i); - c = 65536 | (hi & 1023) << 10 | lo & 1023; + c = 65536 | ((hi & 1023) << 10) | (lo & 1023); b.push( - 240 | c >> 18, 128 | c >> 12 & 63, 128 | c >> 6 & 63, - 128 | c & 63); + 240 | (c >> 18), + 128 | ((c >> 12) & 63), + 128 | ((c >> 6) & 63), + 128 | (c & 63) + ); } } else { if ((c & 64512) == 56320) { // Invalid low surrogate. b.push(239, 191, 189); } else { - b.push(224 | c >> 12, 128 | c >> 6 & 63, 128 | c & 63); + b.push(224 | (c >> 12), 128 | ((c >> 6) & 63), 128 | (c & 63)); } } } @@ -113,7 +125,9 @@ export function percentEncodedBytes_(string: string): Uint8Array { decoded = decodeURIComponent(string); } catch (e) { throw errorsExports.invalidFormat( - StringFormat.DATA_URL, 'Malformed data URL.'); + StringFormat.DATA_URL, + 'Malformed data URL.' + ); } return utf8Bytes_(decoded); } @@ -126,9 +140,11 @@ export function base64Bytes_(format: StringFormat, string: string): Uint8Array { if (hasMinus || hasUnder) { let invalidChar = hasMinus ? '-' : '_'; throw errorsExports.invalidFormat( - format, - 'Invalid character \'' + invalidChar + - '\' found: is it base64url encoded?'); + format, + "Invalid character '" + + invalidChar + + "' found: is it base64url encoded?" + ); } break; } @@ -138,9 +154,9 @@ export function base64Bytes_(format: StringFormat, string: string): Uint8Array { if (hasPlus || hasSlash) { let invalidChar = hasPlus ? '+' : '/'; throw errorsExports.invalidFormat( - format, - 'Invalid character \'' + invalidChar + - '\' found: is it base64 encoded?'); + format, + "Invalid character '" + invalidChar + "' found: is it base64 encoded?" + ); } string = string.replace(/-/g, '+').replace(/_/g, '/'); break; @@ -164,22 +180,23 @@ export function base64Bytes_(format: StringFormat, string: string): Uint8Array { */ class DataURLParts { base64: boolean = false; - contentType: string|null = null; + contentType: string | null = null; rest: string; constructor(dataURL: string) { let matches = dataURL.match(/^data:([^,]+)?,/); if (matches === null) { throw errorsExports.invalidFormat( - StringFormat.DATA_URL, - 'Must be formatted \'data:[][;base64],'); + StringFormat.DATA_URL, + "Must be formatted 'data:[][;base64]," + ); } let middle = matches[1] || null; if (middle != null) { this.base64 = endsWith(middle, ';base64'); - this.contentType = this.base64 ? - middle.substring(0, middle.length - ';base64'.length) : - middle; + this.contentType = this.base64 + ? middle.substring(0, middle.length - ';base64'.length) + : middle; } this.rest = dataURL.substring(dataURL.indexOf(',') + 1); } @@ -194,7 +211,7 @@ export function dataURLBytes_(string: string): Uint8Array { } } -export function dataURLContentType_(string: string): string|null { +export function dataURLContentType_(string: string): string | null { let parts = new DataURLParts(string); return parts.contentType; } diff --git a/src/storage/implementation/taskenums.ts b/src/storage/implementation/taskenums.ts index b786fdba2cc..67b3d76b92f 100644 --- a/src/storage/implementation/taskenums.ts +++ b/src/storage/implementation/taskenums.ts @@ -61,8 +61,9 @@ export const TaskState = { ERROR: 'error' }; -export function taskStateFromInternalTaskState(state: InternalTaskState): - TaskState { +export function taskStateFromInternalTaskState( + state: InternalTaskState +): TaskState { switch (state) { case InternalTaskState.RUNNING: case InternalTaskState.PAUSING: @@ -77,7 +78,6 @@ export function taskStateFromInternalTaskState(state: InternalTaskState): case InternalTaskState.ERROR: return TaskState.ERROR; default: - // TODO(andysoto): assert(false); return TaskState.ERROR; } diff --git a/src/storage/implementation/type.ts b/src/storage/implementation/type.ts index 1d2fdbff98a..f4b5bec49f6 100644 --- a/src/storage/implementation/type.ts +++ b/src/storage/implementation/type.ts @@ -56,4 +56,3 @@ export function isNativeBlob(p: any): boolean { export function isNativeBlobDefined(): boolean { return typeof Blob !== 'undefined'; } - diff --git a/src/storage/implementation/xhrio.ts b/src/storage/implementation/xhrio.ts index 75c93bb4a9c..6e29fc0639a 100644 --- a/src/storage/implementation/xhrio.ts +++ b/src/storage/implementation/xhrio.ts @@ -19,12 +19,15 @@ * goog.net.XhrIo-like interface. */ -export type Headers = {[name: string]: (string|number)}; +export type Headers = { [name: string]: string | number }; export interface XhrIo { send( - url: string, method: string, opt_body?: ArrayBufferView|Blob|string|null, - opt_headers?: Headers): Promise; + url: string, + method: string, + opt_body?: ArrayBufferView | Blob | string | null, + opt_headers?: Headers + ): Promise; getErrorCode(): ErrorCode; @@ -37,7 +40,7 @@ export interface XhrIo { */ abort(): void; - getResponseHeader(header: string): string|null; + getResponseHeader(header: string): string | null; addUploadProgressListener(listener: (p1: Event) => void): void; diff --git a/src/storage/implementation/xhrio_network.ts b/src/storage/implementation/xhrio_network.ts index 1fca54c4b9b..3f7faf02179 100644 --- a/src/storage/implementation/xhrio_network.ts +++ b/src/storage/implementation/xhrio_network.ts @@ -18,7 +18,7 @@ import * as object from './object'; import * as promiseimpl from './promise_external'; import * as type from './type'; import * as XhrIoExports from './xhrio'; -import {Headers, XhrIo} from './xhrio'; +import { Headers, XhrIo } from './xhrio'; /** * We use this instead of goog.net.XhrIo because goog.net.XhrIo is hyuuuuge and @@ -34,15 +34,15 @@ export class NetworkXhrIo implements XhrIo { this.xhr_ = new XMLHttpRequest(); this.errorCode_ = XhrIoExports.ErrorCode.NO_ERROR; this.sendPromise_ = promiseimpl.make((resolve, reject) => { - this.xhr_.addEventListener('abort', (event) => { + this.xhr_.addEventListener('abort', event => { this.errorCode_ = XhrIoExports.ErrorCode.ABORT; resolve(this); }); - this.xhr_.addEventListener('error', (event) => { + this.xhr_.addEventListener('error', event => { this.errorCode_ = XhrIoExports.ErrorCode.NETWORK_ERROR; resolve(this); }); - this.xhr_.addEventListener('load', (event) => { + this.xhr_.addEventListener('load', event => { resolve(this); }); }); @@ -52,15 +52,18 @@ export class NetworkXhrIo implements XhrIo { * @override */ send( - url: string, method: string, opt_body?: ArrayBufferView|Blob|string|null, - opt_headers?: Headers): Promise { + url: string, + method: string, + opt_body?: ArrayBufferView | Blob | string | null, + opt_headers?: Headers + ): Promise { if (this.sent_) { throw errorsExports.internalError('cannot .send() more than once'); } this.sent_ = true; this.xhr_.open(method, url, true); if (type.isDef(opt_headers)) { - const headers = (opt_headers as Headers); + const headers = opt_headers as Headers; object.forEach(headers, (key, val) => { this.xhr_.setRequestHeader(key, val.toString()); }); @@ -79,7 +82,8 @@ export class NetworkXhrIo implements XhrIo { getErrorCode(): XhrIoExports.ErrorCode { if (!this.sent_) { throw errorsExports.internalError( - 'cannot .getErrorCode() before sending'); + 'cannot .getErrorCode() before sending' + ); } return this.errorCode_; } @@ -104,7 +108,8 @@ export class NetworkXhrIo implements XhrIo { getResponseText(): string { if (!this.sent_) { throw errorsExports.internalError( - 'cannot .getResponseText() before sending'); + 'cannot .getResponseText() before sending' + ); } return this.xhr_.responseText; } @@ -120,7 +125,7 @@ export class NetworkXhrIo implements XhrIo { /** * @override */ - getResponseHeader(header: string): string|null { + getResponseHeader(header: string): string | null { return this.xhr_.getResponseHeader(header); } diff --git a/src/storage/implementation/xhriopool.ts b/src/storage/implementation/xhriopool.ts index b04130a9075..3724b9eee24 100644 --- a/src/storage/implementation/xhriopool.ts +++ b/src/storage/implementation/xhriopool.ts @@ -17,8 +17,8 @@ /** * @fileoverview Replacement for goog.net.XhrIoPool that works with fbs.XhrIo. */ -import {XhrIo} from './xhrio'; -import {NetworkXhrIo} from './xhrio_network'; +import { XhrIo } from './xhrio'; +import { NetworkXhrIo } from './xhrio_network'; /** * Factory-like class for creating XhrIo instances. diff --git a/src/storage/metadata.ts b/src/storage/metadata.ts index 7046b0d6dc8..b8a62c8e69e 100644 --- a/src/storage/metadata.ts +++ b/src/storage/metadata.ts @@ -13,33 +13,33 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -import {Reference} from './reference'; +import { Reference } from './reference'; /** * @fileoverview Documentation for the metadata format. */ type Metadata = { - bucket: string|undefined, - generation: string|undefined, - metageneration: string|undefined, - fullPath: string|undefined, - name: string|undefined, - size: number|undefined, - type: string|undefined, - timeCreated: string|undefined, - updated: string|undefined, - md5Hash: string|undefined, - cacheControl: string|undefined, - contentDisposition: string|undefined, - contentEncoding: string|undefined, - contentLanguage: string|undefined, - contentType: string|undefined, - downloadURLs: string[]|undefined, - downloadTokens: string[]|undefined, - customMetadata: {[key: string]: string}|undefined, - ref: Reference|undefined + bucket: string | undefined; + generation: string | undefined; + metageneration: string | undefined; + fullPath: string | undefined; + name: string | undefined; + size: number | undefined; + type: string | undefined; + timeCreated: string | undefined; + updated: string | undefined; + md5Hash: string | undefined; + cacheControl: string | undefined; + contentDisposition: string | undefined; + contentEncoding: string | undefined; + contentLanguage: string | undefined; + contentType: string | undefined; + downloadURLs: string[] | undefined; + downloadTokens: string[] | undefined; + customMetadata: { [key: string]: string } | undefined; + ref: Reference | undefined; [prop: string]: any; }; -export {Metadata}; +export { Metadata }; diff --git a/src/storage/reference.ts b/src/storage/reference.ts index be16322dd57..62561eaf8fc 100644 --- a/src/storage/reference.ts +++ b/src/storage/reference.ts @@ -18,21 +18,21 @@ * @fileoverview Defines the Firebase Storage Reference class. */ import * as args from './implementation/args'; -import {AuthWrapper} from './implementation/authwrapper'; -import {FbsBlob} from './implementation/blob'; +import { AuthWrapper } from './implementation/authwrapper'; +import { FbsBlob } from './implementation/blob'; import * as errorsExports from './implementation/error'; -import {errors} from './implementation/error'; -import {Location} from './implementation/location'; +import { errors } from './implementation/error'; +import { Location } from './implementation/location'; import * as metadata from './implementation/metadata'; import * as object from './implementation/object'; import * as path from './implementation/path'; import * as requests from './implementation/requests'; import * as fbsString from './implementation/string'; -import {StringFormat} from './implementation/string'; +import { StringFormat } from './implementation/string'; import * as type from './implementation/type'; -import {Metadata} from './metadata'; -import {Service} from './service'; -import {UploadTask} from './task'; +import { Metadata } from './metadata'; +import { Service } from './service'; +import { UploadTask } from './task'; /** * Provides methods to interact with a bucket in the Firebase Storage service. @@ -48,7 +48,7 @@ import {UploadTask} from './task'; export class Reference { protected location: Location; - constructor(protected authWrapper: AuthWrapper, location: string|Location) { + constructor(protected authWrapper: AuthWrapper, location: string | Location) { if (location instanceof Location) { this.location = location; } else { @@ -90,7 +90,7 @@ export class Reference { * @return A reference to the parent of the * current object, or null if the current object is the root. */ - get parent(): Reference|null { + get parent(): Reference | null { let newPath = path.parent(this.location.path); if (newPath === null) { return null; @@ -130,13 +130,24 @@ export class Reference { * @return An UploadTask that lets you control and * observe the upload. */ - put(data: Blob|Uint8Array|ArrayBuffer, metadata: Metadata|null = null): UploadTask { + put( + data: Blob | Uint8Array | ArrayBuffer, + metadata: Metadata | null = null + ): UploadTask { args.validate( - 'put', [args.uploadDataSpec(), args.metadataSpec(true)], arguments); + 'put', + [args.uploadDataSpec(), args.metadataSpec(true)], + arguments + ); this.throwIfRoot_('put'); return new UploadTask( - this, this.authWrapper, this.location, this.mappings(), new FbsBlob(data), - metadata); + this, + this.authWrapper, + this.location, + this.mappings(), + new FbsBlob(data), + metadata + ); } /** @@ -146,15 +157,20 @@ export class Reference { * @return An UploadTask that lets you control and * observe the upload. */ - putString(string: string, format: StringFormat = StringFormat.RAW, opt_metadata?: Metadata): - UploadTask { + putString( + string: string, + format: StringFormat = StringFormat.RAW, + opt_metadata?: Metadata + ): UploadTask { args.validate( - 'putString', - [ - args.stringSpec(), args.stringSpec(fbsString.formatValidator, true), - args.metadataSpec(true) - ], - arguments); + 'putString', + [ + args.stringSpec(), + args.stringSpec(fbsString.formatValidator, true), + args.metadataSpec(true) + ], + arguments + ); this.throwIfRoot_('putString'); let data = fbsString.dataFromString(format, string); let metadata = object.clone(opt_metadata); @@ -162,8 +178,13 @@ export class Reference { metadata['contentType'] = data.contentType; } return new UploadTask( - this, this.authWrapper, this.location, this.mappings(), - new FbsBlob(data.data, true), metadata); + this, + this.authWrapper, + this.location, + this.mappings(), + new FbsBlob(data.data, true), + metadata + ); } /** @@ -191,7 +212,10 @@ export class Reference { let self = this; return this.authWrapper.getAuthToken().then(function(authToken) { let requestInfo = requests.getMetadata( - self.authWrapper, self.location, self.mappings()); + self.authWrapper, + self.location, + self.mappings() + ); return self.authWrapper.makeRequest(requestInfo, authToken).getPromise(); }); } @@ -211,7 +235,11 @@ export class Reference { let self = this; return this.authWrapper.getAuthToken().then(function(authToken) { let requestInfo = requests.updateMetadata( - self.authWrapper, self.location, metadata, self.mappings()); + self.authWrapper, + self.location, + metadata, + self.mappings() + ); return self.authWrapper.makeRequest(requestInfo, authToken).getPromise(); }); } diff --git a/src/storage/service.ts b/src/storage/service.ts index 5630fcfe63d..547605f17b7 100644 --- a/src/storage/service.ts +++ b/src/storage/service.ts @@ -14,14 +14,14 @@ * limitations under the License. */ import * as args from './implementation/args'; -import {AuthWrapper} from './implementation/authwrapper'; -import {Location} from './implementation/location'; +import { AuthWrapper } from './implementation/authwrapper'; +import { Location } from './implementation/location'; import * as fbsPromiseImpl from './implementation/promise_external'; import * as RequestExports from './implementation/request'; -import {Request} from './implementation/request'; -import {XhrIoPool} from './implementation/xhriopool'; -import {Reference} from './reference'; -import { FirebaseApp } from "../app/firebase_app"; +import { Request } from './implementation/request'; +import { XhrIoPool } from './implementation/xhriopool'; +import { Reference } from './reference'; +import { FirebaseApp } from '../app/firebase_app'; /** * A service that provides firebaseStorage.Reference instances. @@ -32,15 +32,20 @@ import { FirebaseApp } from "../app/firebase_app"; export class Service { authWrapper_: AuthWrapper; private app_: FirebaseApp; - private bucket_: Location|null = null; + private bucket_: Location | null = null; private internals_: ServiceInternals; constructor(app: FirebaseApp, pool: XhrIoPool, url?: string) { function maker(authWrapper: AuthWrapper, loc: Location) { return new Reference(authWrapper, loc); } - this.authWrapper_ = - new AuthWrapper(app, maker, RequestExports.makeRequest, this, pool); + this.authWrapper_ = new AuthWrapper( + app, + maker, + RequestExports.makeRequest, + this, + pool + ); this.app_ = app; if (url != null) { this.bucket_ = Location.makeFromBucketSpec(url); @@ -101,7 +106,10 @@ export class Service { setMaxUploadRetryTime(time: number) { args.validate( - 'setMaxUploadRetryTime', [args.nonNegativeNumberSpec()], arguments); + 'setMaxUploadRetryTime', + [args.nonNegativeNumberSpec()], + arguments + ); this.authWrapper_.setMaxUploadRetryTime(time); } @@ -111,7 +119,10 @@ export class Service { setMaxOperationRetryTime(time: number) { args.validate( - 'setMaxOperationRetryTime', [args.nonNegativeNumberSpec()], arguments); + 'setMaxOperationRetryTime', + [args.nonNegativeNumberSpec()], + arguments + ); this.authWrapper_.setMaxOperationRetryTime(time); } diff --git a/src/storage/task.ts b/src/storage/task.ts index 2d084acb485..3a155735b83 100644 --- a/src/storage/task.ts +++ b/src/storage/task.ts @@ -17,31 +17,37 @@ * @fileoverview Defines types for interacting with blob transfer tasks. */ -import {AuthWrapper} from './implementation/authwrapper'; -import {FbsBlob} from './implementation/blob'; -import {FirebaseStorageError} from './implementation/error'; -import {InternalTaskState} from './implementation/taskenums'; -import {Metadata} from './metadata'; -import {NextFn, ErrorFn, CompleteFn, Unsubscribe, Observer} from './implementation/observer'; -import {Request} from './implementation/request'; +import { AuthWrapper } from './implementation/authwrapper'; +import { FbsBlob } from './implementation/blob'; +import { FirebaseStorageError } from './implementation/error'; +import { InternalTaskState } from './implementation/taskenums'; +import { Metadata } from './metadata'; +import { + NextFn, + ErrorFn, + CompleteFn, + Unsubscribe, + Observer +} from './implementation/observer'; +import { Request } from './implementation/request'; import * as RequestExports from './implementation/request'; -import {Subscribe} from './implementation/observer'; -import {TaskEvent, TaskState} from './implementation/taskenums'; -import {UploadTaskSnapshot} from './tasksnapshot'; +import { Subscribe } from './implementation/observer'; +import { TaskEvent, TaskState } from './implementation/taskenums'; +import { UploadTaskSnapshot } from './tasksnapshot'; import * as fbsArgs from './implementation/args'; -import {ArgSpec} from './implementation/args'; +import { ArgSpec } from './implementation/args'; import * as fbsArray from './implementation/array'; -import {async as fbsAsync} from './implementation/async'; -import {errors as fbsErrors} from './implementation/error'; +import { async as fbsAsync } from './implementation/async'; +import { errors as fbsErrors } from './implementation/error'; import * as errors from './implementation/error'; -import {Location} from './implementation/location'; +import { Location } from './implementation/location'; import * as fbsMetadata from './implementation/metadata'; import * as fbsPromiseimpl from './implementation/promise_external'; -import {RequestInfo} from './implementation/requestinfo'; +import { RequestInfo } from './implementation/requestinfo'; import * as fbsRequests from './implementation/requests'; import * as fbsTaskEnums from './implementation/taskenums'; import * as typeUtils from './implementation/type'; -import {Reference} from './reference'; +import { Reference } from './reference'; /** * Represents a blob being uploaded. Can be used to pause/resume/cancel the @@ -52,7 +58,7 @@ export class UploadTask { private authWrapper_: AuthWrapper; private location_: Location; private blob_: FbsBlob; - private metadata_: Metadata|null; + private metadata_: Metadata | null; private mappings_: fbsMetadata.Mappings; private transferred_: number = 0; private needToFetchStatus_: boolean = false; @@ -60,14 +66,13 @@ export class UploadTask { private observers_: Observer[] = []; private resumable_: boolean; private state_: InternalTaskState; - private error_: Error|null = null; - private uploadUrl_: string|null = null; - private request_: Request|null = null; + private error_: Error | null = null; + private uploadUrl_: string | null = null; + private request_: Request | null = null; private chunkMultiplier_: number = 1; private errorHandler_: (p1: FirebaseStorageError) => void; private metadataErrorHandler_: (p1: FirebaseStorageError) => void; - private resolve_: - ((p1: UploadTaskSnapshot) => void) | null = null; + private resolve_: ((p1: UploadTaskSnapshot) => void) | null = null; private reject_: ((p1: Error) => void) | null = null; private promise_: Promise; @@ -77,8 +82,13 @@ export class UploadTask { * @param blob The blob to upload. */ constructor( - ref: Reference, authWrapper: AuthWrapper, location: Location, - mappings: fbsMetadata.Mappings, blob: FbsBlob, metadata: Metadata|null = null) { + ref: Reference, + authWrapper: AuthWrapper, + location: Location, + mappings: fbsMetadata.Mappings, + blob: FbsBlob, + metadata: Metadata | null = null + ) { this.ref_ = ref; this.authWrapper_ = authWrapper; this.location_ = location; @@ -87,7 +97,7 @@ export class UploadTask { this.mappings_ = mappings; this.resumable_ = this.shouldDoResumable_(this.blob_); this.state_ = InternalTaskState.RUNNING; - this.errorHandler_ = (error) => { + this.errorHandler_ = error => { this.request_ = null; this.chunkMultiplier_ = 1; if (error.codeEquals(errors.Code.CANCELED)) { @@ -98,7 +108,7 @@ export class UploadTask { this.transition_(InternalTaskState.ERROR); } }; - this.metadataErrorHandler_ = (error) => { + this.metadataErrorHandler_ = error => { this.request_ = null; if (error.codeEquals(errors.Code.CANCELED)) { this.completeTransitions_(); @@ -118,8 +128,7 @@ export class UploadTask { this.promise_.then(null, () => {}); } - private makeProgressCallback_(): - (p1: number, p2: number) => void { + private makeProgressCallback_(): (p1: number, p2: number) => void { const sizeBefore = this.transferred_; return (loaded, total) => { this.updateProgress_(sizeBefore + loaded); @@ -158,8 +167,8 @@ export class UploadTask { } } - private resolveToken_(callback: (p1: string|null) => void) { - this.authWrapper_.getAuthToken().then((authToken) => { + private resolveToken_(callback: (p1: string | null) => void) { + this.authWrapper_.getAuthToken().then(authToken => { switch (this.state_) { case InternalTaskState.RUNNING: callback(authToken); @@ -178,86 +187,108 @@ export class UploadTask { // TODO(andysoto): assert false private createResumable_() { - this.resolveToken_((authToken) => { + this.resolveToken_(authToken => { const requestInfo = fbsRequests.createResumableUpload( - this.authWrapper_, this.location_, this.mappings_, this.blob_, - this.metadata_); - const createRequest = this.authWrapper_.makeRequest(requestInfo, authToken); + this.authWrapper_, + this.location_, + this.mappings_, + this.blob_, + this.metadata_ + ); + const createRequest = this.authWrapper_.makeRequest( + requestInfo, + authToken + ); this.request_ = createRequest; - createRequest.getPromise().then( - (url: string) => { - this.request_ = null; - this.uploadUrl_ = url; - this.needToFetchStatus_ = false; - this.completeTransitions_(); - }, - this.errorHandler_); + createRequest.getPromise().then((url: string) => { + this.request_ = null; + this.uploadUrl_ = url; + this.needToFetchStatus_ = false; + this.completeTransitions_(); + }, this.errorHandler_); }); } private fetchStatus_() { // TODO(andysoto): assert(this.uploadUrl_ !== null); - const url = (this.uploadUrl_ as string); - this.resolveToken_((authToken) => { + const url = this.uploadUrl_ as string; + this.resolveToken_(authToken => { const requestInfo = fbsRequests.getResumableUploadStatus( - this.authWrapper_, this.location_, url, this.blob_); - const statusRequest = this.authWrapper_.makeRequest(requestInfo, authToken); + this.authWrapper_, + this.location_, + url, + this.blob_ + ); + const statusRequest = this.authWrapper_.makeRequest( + requestInfo, + authToken + ); this.request_ = statusRequest; - statusRequest.getPromise().then( - (status) => { - status = (status as fbsRequests.ResumableUploadStatus); - this.request_ = null; - this.updateProgress_(status.current); - this.needToFetchStatus_ = false; - if (status.finalized) { - this.needToFetchMetadata_ = true; - } - this.completeTransitions_(); - }, - this.errorHandler_); + statusRequest.getPromise().then(status => { + status = status as fbsRequests.ResumableUploadStatus; + this.request_ = null; + this.updateProgress_(status.current); + this.needToFetchStatus_ = false; + if (status.finalized) { + this.needToFetchMetadata_ = true; + } + this.completeTransitions_(); + }, this.errorHandler_); }); } private continueUpload_() { const chunkSize = - fbsRequests.resumableUploadChunkSize * this.chunkMultiplier_; + fbsRequests.resumableUploadChunkSize * this.chunkMultiplier_; const status = new fbsRequests.ResumableUploadStatus( - this.transferred_, this.blob_.size()); + this.transferred_, + this.blob_.size() + ); // TODO(andysoto): assert(this.uploadUrl_ !== null); - const url = (this.uploadUrl_ as string); - this.resolveToken_((authToken) => { - let requestInfo; + const url = this.uploadUrl_ as string; + this.resolveToken_(authToken => { + let requestInfo; try { requestInfo = fbsRequests.continueResumableUpload( - this.location_, this.authWrapper_, url, this.blob_, chunkSize, - this.mappings_, status, this.makeProgressCallback_()); + this.location_, + this.authWrapper_, + url, + this.blob_, + chunkSize, + this.mappings_, + status, + this.makeProgressCallback_() + ); } catch (e) { this.error_ = e; this.transition_(InternalTaskState.ERROR); return; } - const uploadRequest = this.authWrapper_.makeRequest(requestInfo, authToken); + const uploadRequest = this.authWrapper_.makeRequest( + requestInfo, + authToken + ); this.request_ = uploadRequest; - uploadRequest.getPromise().then( - (newStatus: fbsRequests.ResumableUploadStatus) => { - this.increaseMultiplier_(); - this.request_ = null; - this.updateProgress_(newStatus.current); - if (newStatus.finalized) { - this.metadata_ = newStatus.metadata; - this.transition_(InternalTaskState.SUCCESS); - } else { - this.completeTransitions_(); - } - }, - this.errorHandler_); + uploadRequest + .getPromise() + .then((newStatus: fbsRequests.ResumableUploadStatus) => { + this.increaseMultiplier_(); + this.request_ = null; + this.updateProgress_(newStatus.current); + if (newStatus.finalized) { + this.metadata_ = newStatus.metadata; + this.transition_(InternalTaskState.SUCCESS); + } else { + this.completeTransitions_(); + } + }, this.errorHandler_); }); } private increaseMultiplier_() { const currentSize = - fbsRequests.resumableUploadChunkSize * this.chunkMultiplier_; + fbsRequests.resumableUploadChunkSize * this.chunkMultiplier_; // Max chunk size is 32M. if (currentSize < 32 * 1024 * 1024) { @@ -266,36 +297,45 @@ export class UploadTask { } private fetchMetadata_() { - this.resolveToken_((authToken) => { + this.resolveToken_(authToken => { const requestInfo = fbsRequests.getMetadata( - this.authWrapper_, this.location_, this.mappings_); - const metadataRequest = this.authWrapper_.makeRequest(requestInfo, authToken); + this.authWrapper_, + this.location_, + this.mappings_ + ); + const metadataRequest = this.authWrapper_.makeRequest( + requestInfo, + authToken + ); this.request_ = metadataRequest; - metadataRequest.getPromise().then( - (metadata) => { - this.request_ = null; - this.metadata_ = metadata; - this.transition_(InternalTaskState.SUCCESS); - }, - this.metadataErrorHandler_); + metadataRequest.getPromise().then(metadata => { + this.request_ = null; + this.metadata_ = metadata; + this.transition_(InternalTaskState.SUCCESS); + }, this.metadataErrorHandler_); }); } private oneShotUpload_() { - this.resolveToken_((authToken) => { + this.resolveToken_(authToken => { const requestInfo = fbsRequests.multipartUpload( - this.authWrapper_, this.location_, this.mappings_, this.blob_, - this.metadata_); - const multipartRequest = this.authWrapper_.makeRequest(requestInfo, authToken); + this.authWrapper_, + this.location_, + this.mappings_, + this.blob_, + this.metadata_ + ); + const multipartRequest = this.authWrapper_.makeRequest( + requestInfo, + authToken + ); this.request_ = multipartRequest; - multipartRequest.getPromise().then( - (metadata) => { - this.request_ = null; - this.metadata_ = metadata; - this.updateProgress_(this.blob_.size()); - this.transition_(InternalTaskState.SUCCESS); - }, - this.errorHandler_); + multipartRequest.getPromise().then(metadata => { + this.request_ = null; + this.metadata_ = metadata; + this.updateProgress_(this.blob_.size()); + this.transition_(InternalTaskState.SUCCESS); + }, this.errorHandler_); }); } @@ -317,7 +357,6 @@ export class UploadTask { } switch (state) { case InternalTaskState.CANCELING: - // TODO(andysoto): // assert(this.state_ === InternalTaskState.RUNNING || // this.state_ === InternalTaskState.PAUSING); @@ -327,7 +366,6 @@ export class UploadTask { } break; case InternalTaskState.PAUSING: - // TODO(andysoto): // assert(this.state_ === InternalTaskState.RUNNING); this.state_ = state; @@ -336,7 +374,6 @@ export class UploadTask { } break; case InternalTaskState.RUNNING: - // TODO(andysoto): // assert(this.state_ === InternalTaskState.PAUSED || // this.state_ === InternalTaskState.PAUSING); @@ -348,14 +385,12 @@ export class UploadTask { } break; case InternalTaskState.PAUSED: - // TODO(andysoto): // assert(this.state_ === InternalTaskState.PAUSING); this.state_ = state; this.notifyObservers_(); break; case InternalTaskState.CANCELED: - // TODO(andysoto): // assert(this.state_ === InternalTaskState.PAUSED || // this.state_ === InternalTaskState.CANCELING); @@ -364,7 +399,6 @@ export class UploadTask { this.notifyObservers_(); break; case InternalTaskState.ERROR: - // TODO(andysoto): // assert(this.state_ === InternalTaskState.RUNNING || // this.state_ === InternalTaskState.PAUSING || @@ -373,7 +407,6 @@ export class UploadTask { this.notifyObservers_(); break; case InternalTaskState.SUCCESS: - // TODO(andysoto): // assert(this.state_ === InternalTaskState.RUNNING || // this.state_ === InternalTaskState.PAUSING || @@ -396,34 +429,43 @@ export class UploadTask { this.start_(); break; default: - // TODO(andysoto): assert(false); break; } } get snapshot(): UploadTaskSnapshot { - const externalState = - fbsTaskEnums.taskStateFromInternalTaskState(this.state_); + const externalState = fbsTaskEnums.taskStateFromInternalTaskState( + this.state_ + ); return new UploadTaskSnapshot( - this.transferred_, this.blob_.size(), externalState, this.metadata_, - this, this.ref_); + this.transferred_, + this.blob_.size(), + externalState, + this.metadata_, + this, + this.ref_ + ); } /** * Adds a callback for an event. * @param type The type of event to listen for. */ - on(type: TaskEvent, nextOrObserver = undefined, error = undefined, - completed = undefined): Unsubscribe | Subscribe { + on( + type: TaskEvent, + nextOrObserver = undefined, + error = undefined, + completed = undefined + ): Unsubscribe | Subscribe { function typeValidator(_p: any) { if (type !== TaskEvent.STATE_CHANGED) { throw `Expected one of the event types: [${TaskEvent.STATE_CHANGED}].`; } } const nextOrObserverMessage = - 'Expected a function or an Object with one of ' + - '`next`, `error`, `complete` properties.'; + 'Expected a function or an Object with one of ' + + '`next`, `error`, `complete` properties.'; const nextValidator = fbsArgs.nullFunctionSpec(true).validator; const observerValidator = fbsArgs.looseObjectSpec(null, true).validator; @@ -431,12 +473,13 @@ export class UploadTask { try { nextValidator(p); return; - } catch (e) { - } + } catch (e) {} try { observerValidator(p); - const anyDefined = typeUtils.isJustDef(p['next']) || typeUtils.isJustDef(p['error']) || - typeUtils.isJustDef(p['complete']); + const anyDefined = + typeUtils.isJustDef(p['next']) || + typeUtils.isJustDef(p['error']) || + typeUtils.isJustDef(p['complete']); if (!anyDefined) { throw ''; } @@ -448,16 +491,23 @@ export class UploadTask { const specs = [ fbsArgs.stringSpec(typeValidator), fbsArgs.looseObjectSpec(nextOrObserverValidator, true), - fbsArgs.nullFunctionSpec(true), fbsArgs.nullFunctionSpec(true) + fbsArgs.nullFunctionSpec(true), + fbsArgs.nullFunctionSpec(true) ]; fbsArgs.validate('on', specs, arguments); const self = this; - function makeBinder(specs: ArgSpec[]|null): Subscribe { + function makeBinder( + specs: ArgSpec[] | null + ): Subscribe { function binder( - nextOrObserver: NextFn | {[name: string]: string|null} | null, - error?: ErrorFn | null, - opt_complete?: CompleteFn | null) { + nextOrObserver: + | NextFn + | { [name: string]: string | null } + | null, + error?: ErrorFn | null, + opt_complete?: CompleteFn | null + ) { if (specs !== null) { fbsArgs.validate('on', specs, arguments); } @@ -478,11 +528,14 @@ export class UploadTask { } const binderSpecs = [ fbsArgs.looseObjectSpec(binderNextOrObserverValidator), - fbsArgs.nullFunctionSpec(true), fbsArgs.nullFunctionSpec(true) + fbsArgs.nullFunctionSpec(true), + fbsArgs.nullFunctionSpec(true) ]; - const typeOnly = - !(typeUtils.isJustDef(nextOrObserver) || typeUtils.isJustDef(error) || - typeUtils.isJustDef(completed)); + const typeOnly = !( + typeUtils.isJustDef(nextOrObserver) || + typeUtils.isJustDef(error) || + typeUtils.isJustDef(completed) + ); if (typeOnly) { return makeBinder(binderSpecs); } else { @@ -496,12 +549,16 @@ export class UploadTask { * @param onFulfilled The fulfillment callback. Promise chaining works as normal. * @param onRejected The rejection callback. */ - then(onFulfilled?: ((value: UploadTaskSnapshot) => U | PromiseLike) | null, onRejected?: ((error: any) => U | PromiseLike) | null): Promise { - // These casts are needed so that TypeScript can infer the types of the - // resulting Promise. - return this.promise_.then( - (onFulfilled as (value: UploadTaskSnapshot) => U | PromiseLike), - (onRejected as ((error: any) => PromiseLike) | null)); + then( + onFulfilled?: ((value: UploadTaskSnapshot) => U | PromiseLike) | null, + onRejected?: ((error: any) => U | PromiseLike) | null + ): Promise { + // These casts are needed so that TypeScript can infer the types of the + // resulting Promise. + return this.promise_.then( + onFulfilled as (value: UploadTaskSnapshot) => U | PromiseLike, + onRejected as ((error: any) => PromiseLike) | null + ); } /** @@ -529,7 +586,7 @@ export class UploadTask { private notifyObservers_() { this.finishPromise_(); const observers = fbsArray.clone(this.observers_); - observers.forEach((observer) => { + observers.forEach(observer => { this.notifyObserver_(observer); }); } @@ -543,8 +600,8 @@ export class UploadTask { break; case TaskState.CANCELED: case TaskState.ERROR: - const toCall = (this.reject_ as ((p1: Error) => void)); - fbsAsync(toCall.bind(null, (this.error_ as Error)))(); + const toCall = this.reject_ as ((p1: Error) => void); + fbsAsync(toCall.bind(null, this.error_ as Error))(); break; default: triggered = false; @@ -558,8 +615,9 @@ export class UploadTask { } private notifyObserver_(observer: Observer) { - const externalState = - fbsTaskEnums.taskStateFromInternalTaskState(this.state_); + const externalState = fbsTaskEnums.taskStateFromInternalTaskState( + this.state_ + ); switch (externalState) { case TaskState.RUNNING: case TaskState.PAUSED: @@ -575,14 +633,13 @@ export class UploadTask { case TaskState.CANCELED: case TaskState.ERROR: if (observer.error !== null) { - fbsAsync(observer.error.bind(observer, (this.error_ as Error)))(); + fbsAsync(observer.error.bind(observer, this.error_ as Error))(); } break; default: - // TODO(andysoto): assert(false); if (observer.error !== null) { - fbsAsync(observer.error.bind(observer, (this.error_ as Error)))(); + fbsAsync(observer.error.bind(observer, this.error_ as Error))(); } } } @@ -593,8 +650,9 @@ export class UploadTask { */ resume(): boolean { fbsArgs.validate('resume', [], arguments); - const valid = this.state_ === InternalTaskState.PAUSED || - this.state_ === InternalTaskState.PAUSING; + const valid = + this.state_ === InternalTaskState.PAUSED || + this.state_ === InternalTaskState.PAUSING; if (valid) { this.transition_(InternalTaskState.RUNNING); } @@ -621,8 +679,9 @@ export class UploadTask { */ cancel(): boolean { fbsArgs.validate('cancel', [], arguments); - const valid = this.state_ === InternalTaskState.RUNNING || - this.state_ === InternalTaskState.PAUSING; + const valid = + this.state_ === InternalTaskState.RUNNING || + this.state_ === InternalTaskState.PAUSING; if (valid) { this.transition_(InternalTaskState.CANCELING); } diff --git a/src/storage/tasksnapshot.ts b/src/storage/tasksnapshot.ts index 9cff1b932e2..7225f04c1e3 100644 --- a/src/storage/tasksnapshot.ts +++ b/src/storage/tasksnapshot.ts @@ -13,18 +13,23 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -import {TaskState} from './implementation/taskenums'; +import { TaskState } from './implementation/taskenums'; import * as type from './implementation/type'; -import {Metadata} from './metadata'; -import {Reference} from './reference'; -import {UploadTask} from './task'; +import { Metadata } from './metadata'; +import { Reference } from './reference'; +import { UploadTask } from './task'; export class UploadTaskSnapshot { - constructor(readonly bytesTransferred: number, readonly totalBytes: number, - readonly state: TaskState, readonly metadata: Metadata|null, - readonly task: UploadTask, readonly ref: Reference) {} + constructor( + readonly bytesTransferred: number, + readonly totalBytes: number, + readonly state: TaskState, + readonly metadata: Metadata | null, + readonly task: UploadTask, + readonly ref: Reference + ) {} - get downloadURL(): string|null { + get downloadURL(): string | null { if (this.metadata !== null) { let urls = this.metadata['downloadURLs']; if (urls != null && urls[0] != null) { diff --git a/src/utils/Sha1.ts b/src/utils/Sha1.ts index 1c543b33719..ce56a51a5da 100644 --- a/src/utils/Sha1.ts +++ b/src/utils/Sha1.ts @@ -31,7 +31,7 @@ import { Hash } from './hash'; * Firefox 16: ~250 Mbit/s * */ - + /** * SHA-1 cryptographic hash constructor. * @@ -49,7 +49,7 @@ export class Sha1 extends Hash { * @private */ private chain_: Array = []; - + /** * A buffer holding the partially computed hash result. * @type {!Array} @@ -84,29 +84,28 @@ export class Sha1 extends Hash { constructor() { super(); - + this.blockSize = 512 / 8; - + this.pad_[0] = 128; for (var i = 1; i < this.blockSize; ++i) { this.pad_[i] = 0; } - + this.reset(); } - + reset() { this.chain_[0] = 0x67452301; this.chain_[1] = 0xefcdab89; this.chain_[2] = 0x98badcfe; this.chain_[3] = 0x10325476; this.chain_[4] = 0xc3d2e1f0; - + this.inbuf_ = 0; this.total_ = 0; } - - + /** * Internal compress helper function. * @param {!Array|!Uint8Array|string} buf Block to compress. @@ -117,9 +116,9 @@ export class Sha1 extends Hash { if (!opt_offset) { opt_offset = 0; } - + var W = this.W_; - + // get 16 big endian words if (typeof buf === 'string') { for (var i = 0; i < 16; i++) { @@ -131,35 +130,37 @@ export class Sha1 extends Hash { // this change once the Safari bug // (https://bugs.webkit.org/show_bug.cgi?id=109036) has been fixed and // most clients have been updated. - W[i] = (buf.charCodeAt(opt_offset) << 24) | - (buf.charCodeAt(opt_offset + 1) << 16) | - (buf.charCodeAt(opt_offset + 2) << 8) | - (buf.charCodeAt(opt_offset + 3)); + W[i] = + (buf.charCodeAt(opt_offset) << 24) | + (buf.charCodeAt(opt_offset + 1) << 16) | + (buf.charCodeAt(opt_offset + 2) << 8) | + buf.charCodeAt(opt_offset + 3); opt_offset += 4; } } else { for (var i = 0; i < 16; i++) { - W[i] = (buf[opt_offset] << 24) | - (buf[opt_offset + 1] << 16) | - (buf[opt_offset + 2] << 8) | - (buf[opt_offset + 3]); + W[i] = + (buf[opt_offset] << 24) | + (buf[opt_offset + 1] << 16) | + (buf[opt_offset + 2] << 8) | + buf[opt_offset + 3]; opt_offset += 4; } } - + // expand to 80 words for (var i = 16; i < 80; i++) { var t = W[i - 3] ^ W[i - 8] ^ W[i - 14] ^ W[i - 16]; W[i] = ((t << 1) | (t >>> 31)) & 0xffffffff; } - + var a = this.chain_[0]; var b = this.chain_[1]; var c = this.chain_[2]; var d = this.chain_[3]; var e = this.chain_[4]; var f, k; - + // TODO(user): Try to unroll this loop to speed up the computation. for (var i = 0; i < 80; i++) { if (i < 40) { @@ -179,7 +180,7 @@ export class Sha1 extends Hash { k = 0xca62c1d6; } } - + var t = (((a << 5) | (a >>> 27)) + f + e + k + W[i]) & 0xffffffff; e = d; d = c; @@ -187,30 +188,30 @@ export class Sha1 extends Hash { b = a; a = t; } - + this.chain_[0] = (this.chain_[0] + a) & 0xffffffff; this.chain_[1] = (this.chain_[1] + b) & 0xffffffff; this.chain_[2] = (this.chain_[2] + c) & 0xffffffff; this.chain_[3] = (this.chain_[3] + d) & 0xffffffff; this.chain_[4] = (this.chain_[4] + e) & 0xffffffff; } - + update(bytes, opt_length?) { // TODO(johnlenz): tighten the function signature and remove this check if (bytes == null) { return; } - + if (opt_length === undefined) { opt_length = bytes.length; } - + var lengthMinusBlock = opt_length - this.blockSize; var n = 0; // Using local instead of member variables gives ~5% speedup on Firefox 16. var buf = this.buf_; var inbuf = this.inbuf_; - + // The outer while loop should execute at most twice. while (n < opt_length) { // When we have no data in the block to top up, we can directly process the @@ -223,7 +224,7 @@ export class Sha1 extends Hash { n += this.blockSize; } } - + if (typeof bytes === 'string') { while (n < opt_length) { buf[inbuf] = bytes.charCodeAt(n); @@ -250,32 +251,31 @@ export class Sha1 extends Hash { } } } - + this.inbuf_ = inbuf; this.total_ += opt_length; } - - + /** @override */ digest() { var digest = []; var totalBits = this.total_ * 8; - + // Add pad 0x80 0x00*. if (this.inbuf_ < 56) { this.update(this.pad_, 56 - this.inbuf_); } else { this.update(this.pad_, this.blockSize - (this.inbuf_ - 56)); } - + // Add # bits. for (var i = this.blockSize - 1; i >= 56; i--) { this.buf_[i] = totalBits & 255; totalBits /= 256; // Don't use bit-shifting here! } - + this.compress_(this.buf_); - + var n = 0; for (var i = 0; i < 5; i++) { for (var j = 24; j >= 0; j -= 8) { @@ -285,4 +285,4 @@ export class Sha1 extends Hash { } return digest; } -} \ No newline at end of file +} diff --git a/src/utils/assert.ts b/src/utils/assert.ts index c77147f663b..047ace953f4 100644 --- a/src/utils/assert.ts +++ b/src/utils/assert.ts @@ -14,7 +14,7 @@ * limitations under the License. */ -import { CONSTANTS } from "./constants"; +import { CONSTANTS } from './constants'; /** * Throws an error if the provided assertion is falsy @@ -33,5 +33,10 @@ export const assert = function(assertion, message) { * @return {!Error} */ export const assertionError = function(message) { - return new Error('Firebase Database (' + CONSTANTS.SDK_VERSION + ') INTERNAL ASSERT FAILED: ' + message); + return new Error( + 'Firebase Database (' + + CONSTANTS.SDK_VERSION + + ') INTERNAL ASSERT FAILED: ' + + message + ); }; diff --git a/src/utils/constants.ts b/src/utils/constants.ts index 2c3b7f2eaeb..0209f1b2f77 100644 --- a/src/utils/constants.ts +++ b/src/utils/constants.ts @@ -32,4 +32,4 @@ export const CONSTANTS = { * Firebase SDK Version */ SDK_VERSION: '${JSCORE_VERSION}' -} \ No newline at end of file +}; diff --git a/src/utils/crypt.ts b/src/utils/crypt.ts index a0b9ad05bd4..b3437d488e0 100644 --- a/src/utils/crypt.ts +++ b/src/utils/crypt.ts @@ -17,8 +17,9 @@ import { globalScope } from './globalScope'; const stringToByteArray = function(str) { - var output = [], p = 0; - for (var i = 0;i < str.length;i++) { + var output = [], + p = 0; + for (var i = 0; i < str.length; i++) { var c = str.charCodeAt(i); while (c > 255) { output[p++] = c & 255; @@ -63,7 +64,7 @@ export const base64 = { * @private */ byteToCharMap_: null, - + /** * Maps characters to bytes. * @type {Object} @@ -77,25 +78,21 @@ export const base64 = { * @private */ byteToCharMapWebSafe_: null, - - + /** * Maps websafe characters to bytes. * @type {Object} * @private */ charToByteMapWebSafe_: null, - - + /** * Our default alphabet, shared between * ENCODED_VALS and ENCODED_VALS_WEBSAFE * @type {string} */ ENCODED_VALS_BASE: - 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' + - 'abcdefghijklmnopqrstuvwxyz' + - '0123456789', + 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' + 'abcdefghijklmnopqrstuvwxyz' + '0123456789', /** * Our default alphabet. Value 64 (=) is special; it means "nothing." @@ -104,15 +101,15 @@ export const base64 = { get ENCODED_VALS() { return this.ENCODED_VALS_BASE + '+/='; }, - + /** * Our websafe alphabet. * @type {string} */ get ENCODED_VALS_WEBSAFE() { - return this.ENCODED_VALS_BASE + '-_.' + return this.ENCODED_VALS_BASE + '-_.'; }, - + /** * Whether this browser supports the atob and btoa functions. This extension * started at Mozilla but is now implemented by many browsers. We use the @@ -122,7 +119,7 @@ export const base64 = { * @type {boolean} */ HAS_NATIVE_SUPPORT: typeof globalScope.atob === 'function', - + /** * Base64-encode an array of bytes. * @@ -136,45 +133,46 @@ export const base64 = { if (!Array.isArray(input)) { throw Error('encodeByteArray takes an array as a parameter'); } - + this.init_(); - - var byteToCharMap = opt_webSafe ? - this.byteToCharMapWebSafe_ : - this.byteToCharMap_; - + + var byteToCharMap = opt_webSafe + ? this.byteToCharMapWebSafe_ + : this.byteToCharMap_; + var output = []; - + for (var i = 0; i < input.length; i += 3) { var byte1 = input[i]; var haveByte2 = i + 1 < input.length; var byte2 = haveByte2 ? input[i + 1] : 0; var haveByte3 = i + 2 < input.length; var byte3 = haveByte3 ? input[i + 2] : 0; - + var outByte1 = byte1 >> 2; var outByte2 = ((byte1 & 0x03) << 4) | (byte2 >> 4); - var outByte3 = ((byte2 & 0x0F) << 2) | (byte3 >> 6); - var outByte4 = byte3 & 0x3F; - + var outByte3 = ((byte2 & 0x0f) << 2) | (byte3 >> 6); + var outByte4 = byte3 & 0x3f; + if (!haveByte3) { outByte4 = 64; - + if (!haveByte2) { outByte3 = 64; } } - - output.push(byteToCharMap[outByte1], - byteToCharMap[outByte2], - byteToCharMap[outByte3], - byteToCharMap[outByte4]); + + output.push( + byteToCharMap[outByte1], + byteToCharMap[outByte2], + byteToCharMap[outByte3], + byteToCharMap[outByte4] + ); } - + return output.join(''); }, - - + /** * Base64-encode a string. * @@ -189,11 +187,9 @@ export const base64 = { if (this.HAS_NATIVE_SUPPORT && !opt_webSafe) { return btoa(input); } - return this.encodeByteArray( - stringToByteArray(input), opt_webSafe); + return this.encodeByteArray(stringToByteArray(input), opt_webSafe); }, - - + /** * Base64-decode a string. * @@ -210,8 +206,7 @@ export const base64 = { } return byteArrayToString(this.decodeStringToByteArray(input, opt_webSafe)); }, - - + /** * Base64-decode a string. * @@ -229,51 +224,49 @@ export const base64 = { */ decodeStringToByteArray(input, opt_webSafe) { this.init_(); - - var charToByteMap = opt_webSafe ? - this.charToByteMapWebSafe_ : - this.charToByteMap_; - + + var charToByteMap = opt_webSafe + ? this.charToByteMapWebSafe_ + : this.charToByteMap_; + var output = []; - + for (var i = 0; i < input.length; ) { var byte1 = charToByteMap[input.charAt(i++)]; - + var haveByte2 = i < input.length; var byte2 = haveByte2 ? charToByteMap[input.charAt(i)] : 0; ++i; - + var haveByte3 = i < input.length; var byte3 = haveByte3 ? charToByteMap[input.charAt(i)] : 64; ++i; - + var haveByte4 = i < input.length; var byte4 = haveByte4 ? charToByteMap[input.charAt(i)] : 64; ++i; - - if (byte1 == null || byte2 == null || - byte3 == null || byte4 == null) { + + if (byte1 == null || byte2 == null || byte3 == null || byte4 == null) { throw Error(); } - + var outByte1 = (byte1 << 2) | (byte2 >> 4); output.push(outByte1); - + if (byte3 != 64) { - var outByte2 = ((byte2 << 4) & 0xF0) | (byte3 >> 2); + var outByte2 = ((byte2 << 4) & 0xf0) | (byte3 >> 2); output.push(outByte2); - + if (byte4 != 64) { - var outByte3 = ((byte3 << 6) & 0xC0) | byte4; + var outByte3 = ((byte3 << 6) & 0xc0) | byte4; output.push(outByte3); } } } - + return output; }, - - + /** * Lazy static initialization function. Called before * accessing any of the static map variables. @@ -285,30 +278,20 @@ export const base64 = { this.charToByteMap_ = {}; this.byteToCharMapWebSafe_ = {}; this.charToByteMapWebSafe_ = {}; - + // We want quick mappings back and forth, so we precompute two maps. for (var i = 0; i < this.ENCODED_VALS.length; i++) { - this.byteToCharMap_[i] = - this.ENCODED_VALS.charAt(i); + this.byteToCharMap_[i] = this.ENCODED_VALS.charAt(i); this.charToByteMap_[this.byteToCharMap_[i]] = i; - this.byteToCharMapWebSafe_[i] = - this.ENCODED_VALS_WEBSAFE.charAt(i); - this.charToByteMapWebSafe_[ - this.byteToCharMapWebSafe_[i]] = i; - + this.byteToCharMapWebSafe_[i] = this.ENCODED_VALS_WEBSAFE.charAt(i); + this.charToByteMapWebSafe_[this.byteToCharMapWebSafe_[i]] = i; + // Be forgiving when decoding and correctly decode both encodings. if (i >= this.ENCODED_VALS_BASE.length) { - this.charToByteMap_[ - this.ENCODED_VALS_WEBSAFE.charAt(i)] = i; - this.charToByteMapWebSafe_[ - this.ENCODED_VALS.charAt(i)] = i; + this.charToByteMap_[this.ENCODED_VALS_WEBSAFE.charAt(i)] = i; + this.charToByteMapWebSafe_[this.ENCODED_VALS.charAt(i)] = i; } } } } }; - - - - - \ No newline at end of file diff --git a/src/utils/deep_copy.ts b/src/utils/deep_copy.ts index b20a155b3d2..d78c1c56768 100644 --- a/src/utils/deep_copy.ts +++ b/src/utils/deep_copy.ts @@ -39,26 +39,26 @@ export function deepExtend(target: any, source: any): any { } switch (source.constructor) { - case Date: - // Treat Dates like scalars; if the target date object had any child - // properties - they will be lost! - let dateValue = (source as any) as Date; - return new Date(dateValue.getTime()); + case Date: + // Treat Dates like scalars; if the target date object had any child + // properties - they will be lost! + let dateValue = (source as any) as Date; + return new Date(dateValue.getTime()); - case Object: - if (target === undefined) { - target = {}; - } - break; + case Object: + if (target === undefined) { + target = {}; + } + break; - case Array: - // Always copy the array source and overwrite the target. - target = []; - break; + case Array: + // Always copy the array source and overwrite the target. + target = []; + break; - default: - // Not a plain Object - treat it as a scalar. - return source; + default: + // Not a plain Object - treat it as a scalar. + return source; } for (let prop in source) { @@ -74,4 +74,4 @@ export function deepExtend(target: any, source: any): any { // TODO: Really needed (for JSCompiler type checking)? export function patchProperty(obj: any, prop: string, value: any) { obj[prop] = value; -} \ No newline at end of file +} diff --git a/src/utils/environment.ts b/src/utils/environment.ts index 434263ef1cc..3afca352d44 100644 --- a/src/utils/environment.ts +++ b/src/utils/environment.ts @@ -14,15 +14,17 @@ * limitations under the License. */ -import { CONSTANTS } from "./constants"; +import { CONSTANTS } from './constants'; /** * Returns navigator.userAgent string or '' if it's not defined. * @return {string} user agent string */ export const getUA = function() { - if (typeof navigator !== 'undefined' && - typeof navigator['userAgent'] === 'string') { + if ( + typeof navigator !== 'undefined' && + typeof navigator['userAgent'] === 'string' + ) { return navigator['userAgent']; } else { return ''; @@ -38,22 +40,24 @@ export const getUA = function() { * @return {boolean} isMobileCordova */ export const isMobileCordova = function() { - return typeof window !== 'undefined' && - !!(window['cordova'] || window['phonegap'] || window['PhoneGap']) && - /ios|iphone|ipod|ipad|android|blackberry|iemobile/i.test(getUA()); + return ( + typeof window !== 'undefined' && + !!(window['cordova'] || window['phonegap'] || window['PhoneGap']) && + /ios|iphone|ipod|ipad|android|blackberry|iemobile/i.test(getUA()) + ); }; - /** * Detect React Native. * * @return {boolean} True if ReactNative environment is detected. */ export const isReactNative = function() { - return typeof navigator === 'object' && navigator['product'] === 'ReactNative'; + return ( + typeof navigator === 'object' && navigator['product'] === 'ReactNative' + ); }; - /** * Detect Node.js. * diff --git a/src/utils/globalScope.ts b/src/utils/globalScope.ts index 8c64492b72a..215860d4e70 100644 --- a/src/utils/globalScope.ts +++ b/src/utils/globalScope.ts @@ -17,15 +17,17 @@ let scope; if (typeof global !== 'undefined') { - scope = global; + scope = global; } else if (typeof self !== 'undefined') { - scope = self; + scope = self; } else { - try { - scope = Function('return this')(); - } catch (e) { - throw new Error('polyfill failed because global object is unavailable in this environment'); - } + try { + scope = Function('return this')(); + } catch (e) { + throw new Error( + 'polyfill failed because global object is unavailable in this environment' + ); + } } -export const globalScope = scope; \ No newline at end of file +export const globalScope = scope; diff --git a/src/utils/hash.ts b/src/utils/hash.ts index 8097e7d9d1c..b9282acd827 100644 --- a/src/utils/hash.ts +++ b/src/utils/hash.ts @@ -27,14 +27,14 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. - + /** * @fileoverview Abstract cryptographic hash interface. * * See Sha1 and Md5 for sample implementations. * */ - + /** * Create a cryptographic hash instance. * @@ -47,6 +47,6 @@ export class Hash { * @type {number} */ blockSize: number = -1; - + constructor() {} -} \ No newline at end of file +} diff --git a/src/utils/json.ts b/src/utils/json.ts index 879411ee848..5337784245f 100644 --- a/src/utils/json.ts +++ b/src/utils/json.ts @@ -24,7 +24,6 @@ export const jsonEval = function(str) { return JSON.parse(str); }; - /** * Returns JSON representing a javascript object. * @param {*} data Javascript object to be stringified. diff --git a/src/utils/jwt.ts b/src/utils/jwt.ts index 22a6a2cfe10..619ca92bdec 100644 --- a/src/utils/jwt.ts +++ b/src/utils/jwt.ts @@ -14,8 +14,8 @@ * limitations under the License. */ -import { base64Decode } from "../database/core/util/util"; -import { jsonEval } from "./json"; +import { base64Decode } from '../database/core/util/util'; +import { jsonEval } from './json'; /** * Decodes a Firebase auth. token into constituent parts. @@ -29,9 +29,9 @@ import { jsonEval } from "./json"; */ export const decode = function(token) { var header = {}, - claims = {}, - data = {}, - signature = ''; + claims = {}, + data = {}, + signature = ''; try { var parts = token.split('.'); @@ -63,8 +63,9 @@ export const decode = function(token) { */ export const isValidTimestamp = function(token) { var claims = decode(token).claims, - now = Math.floor(new Date().getTime() / 1000), - validSince, validUntil; + now = Math.floor(new Date().getTime() / 1000), + validSince, + validUntil; if (typeof claims === 'object') { if (claims.hasOwnProperty('nbf')) { @@ -81,8 +82,9 @@ export const isValidTimestamp = function(token) { } } - return now && validSince && validUntil && - (now >= validSince) && (now <= validUntil); + return ( + now && validSince && validUntil && now >= validSince && now <= validUntil + ); }; /** @@ -116,12 +118,14 @@ export const issuedAtTime = function(token) { */ export const isValidFormat = function(token) { var decoded = decode(token), - claims = decoded.claims; + claims = decoded.claims; - return !!decoded.signature && + return ( + !!decoded.signature && !!claims && - (typeof claims === 'object') && - claims.hasOwnProperty('iat'); + typeof claims === 'object' && + claims.hasOwnProperty('iat') + ); }; /** @@ -136,5 +140,5 @@ export const isValidFormat = function(token) { */ export const isAdmin = function(token) { var claims = decode(token).claims; - return (typeof claims === 'object' && claims['admin'] === true); + return typeof claims === 'object' && claims['admin'] === true; }; diff --git a/src/utils/nodePatches.ts b/src/utils/nodePatches.ts index 9ae92982625..e700ff64784 100644 --- a/src/utils/nodePatches.ts +++ b/src/utils/nodePatches.ts @@ -14,15 +14,15 @@ * limitations under the License. */ -import { CONSTANTS } from "./constants"; -import { setWebSocketImpl } from "../database/realtime/WebSocketConnection"; -import { setBufferImpl } from "../database/core/util/util"; +import { CONSTANTS } from './constants'; +import { setWebSocketImpl } from '../database/realtime/WebSocketConnection'; +import { setBufferImpl } from '../database/core/util/util'; import { FirebaseIFrameScriptHolder, FIREBASE_LONGPOLL_COMMAND_CB_NAME, FIREBASE_LONGPOLL_DATA_CB_NAME -} from "../database/realtime/BrowserPollConnection"; -import { Client } from "faye-websocket"; +} from '../database/realtime/BrowserPollConnection'; +import { Client } from 'faye-websocket'; setBufferImpl(Buffer); setWebSocketImpl(Client); @@ -35,7 +35,12 @@ CONSTANTS.NODE_CLIENT = true; */ (function() { var version = process['version']; - if (version !== 'v0.10.22' && version !== 'v0.10.23' && version !== 'v0.10.24') return; + if ( + version !== 'v0.10.22' && + version !== 'v0.10.23' && + version !== 'v0.10.24' + ) + return; /** * The following duplicates much of `/lib/_stream_writable.js` at * b922b5e90d2c14dd332b95827c2533e083df7e55, applying the fix for @@ -54,16 +59,12 @@ CONSTANTS.NODE_CLIENT = true; encoding = null; } - if (Buffer['isBuffer'](chunk)) - encoding = 'buffer'; - else if (!encoding) - encoding = state['defaultEncoding']; + if (Buffer['isBuffer'](chunk)) encoding = 'buffer'; + else if (!encoding) encoding = state['defaultEncoding']; - if (typeof cb !== 'function') - cb = function() {}; + if (typeof cb !== 'function') cb = function() {}; - if (state['ended']) - writeAfterEnd(this, state, cb); + if (state['ended']) writeAfterEnd(this, state, cb); else if (validChunk(this, state, chunk, cb)) ret = writeOrBuffer(this, state, chunk, encoding, cb); @@ -81,11 +82,13 @@ CONSTANTS.NODE_CLIENT = true; function validChunk(stream, state, chunk, cb) { var valid = true; - if (!Buffer['isBuffer'](chunk) && - 'string' !== typeof chunk && - chunk !== null && - chunk !== undefined && - !state['objectMode']) { + if ( + !Buffer['isBuffer'](chunk) && + 'string' !== typeof chunk && + chunk !== null && + chunk !== undefined && + !state['objectMode'] + ) { var er = new TypeError('Invalid non-string/buffer chunk'); stream['emit']('error', er); process['nextTick'](function() { @@ -98,29 +101,28 @@ CONSTANTS.NODE_CLIENT = true; function writeOrBuffer(stream, state, chunk, encoding, cb) { chunk = decodeChunk(state, chunk, encoding); - if (Buffer['isBuffer'](chunk)) - encoding = 'buffer'; + if (Buffer['isBuffer'](chunk)) encoding = 'buffer'; var len = state['objectMode'] ? 1 : chunk['length']; state['length'] += len; var ret = state['length'] < state['highWaterMark']; // we must ensure that previous needDrain will not be reset to false. - if (!ret) - state['needDrain'] = true; + if (!ret) state['needDrain'] = true; if (state['writing']) state['buffer']['push'](new WriteReq(chunk, encoding, cb)); - else - doWrite(stream, state, len, chunk, encoding, cb); + else doWrite(stream, state, len, chunk, encoding, cb); return ret; } function decodeChunk(state, chunk, encoding) { - if (!state['objectMode'] && - state['decodeStrings'] !== false && - typeof chunk === 'string') { + if ( + !state['objectMode'] && + state['decodeStrings'] !== false && + typeof chunk === 'string' + ) { chunk = new Buffer(chunk, encoding); } return chunk; @@ -157,17 +159,21 @@ CONSTANTS.NODE_CLIENT = true; * @param {{url: string, forever: boolean}} req * @param {function(string)=} onComplete */ -(FirebaseIFrameScriptHolder as any).nodeRestRequest = function(req, onComplete) { +(FirebaseIFrameScriptHolder as any).nodeRestRequest = function( + req, + onComplete +) { if (!(FirebaseIFrameScriptHolder as any).request) - (FirebaseIFrameScriptHolder as any).request = - /** @type {function({url: string, forever: boolean}, function(Error, number, string))} */ (require('request')); + (FirebaseIFrameScriptHolder as any).request = /** @type {function({url: string, forever: boolean}, function(Error, number, string))} */ require('request'); - (FirebaseIFrameScriptHolder as any).request(req, function(error, response, body) { - if (error) - throw 'Rest request for ' + req.url + ' failed.'; + (FirebaseIFrameScriptHolder as any).request(req, function( + error, + response, + body + ) { + if (error) throw 'Rest request for ' + req.url + ' failed.'; - if (onComplete) - onComplete(body); + if (onComplete) onComplete(body); }); }; @@ -175,12 +181,18 @@ CONSTANTS.NODE_CLIENT = true; * @param {!string} url * @param {function()} loadCB */ -(FirebaseIFrameScriptHolder.prototype).doNodeLongPoll = function(url, loadCB) { +(FirebaseIFrameScriptHolder.prototype).doNodeLongPoll = function( + url, + loadCB +) { var self = this; - (FirebaseIFrameScriptHolder as any).nodeRestRequest({ url: url, forever: true }, function(body) { - self.evalBody(body); - loadCB(); - }); + (FirebaseIFrameScriptHolder as any).nodeRestRequest( + { url: url, forever: true }, + function(body) { + self.evalBody(body); + loadCB(); + } + ); }; /** @@ -190,9 +202,14 @@ CONSTANTS.NODE_CLIENT = true; (FirebaseIFrameScriptHolder.prototype).evalBody = function(body) { var jsonpCB; //jsonpCB is externed in firebase-extern.js - eval('jsonpCB = function(' + FIREBASE_LONGPOLL_COMMAND_CB_NAME + ', ' + FIREBASE_LONGPOLL_DATA_CB_NAME + ') {' + - body + - '}'); + eval( + 'jsonpCB = function(' + + FIREBASE_LONGPOLL_COMMAND_CB_NAME + + ', ' + + FIREBASE_LONGPOLL_DATA_CB_NAME + + ') {' + + body + + '}' + ); jsonpCB(this.commandCB, this.onMessageCB); }; - diff --git a/src/utils/obj.ts b/src/utils/obj.ts index 9784ff5caeb..09f7e2115ef 100644 --- a/src/utils/obj.ts +++ b/src/utils/obj.ts @@ -21,8 +21,7 @@ export const contains = function(obj, key) { }; export const safeGet = function(obj, key) { - if (Object.prototype.hasOwnProperty.call(obj, key)) - return obj[key]; + if (Object.prototype.hasOwnProperty.call(obj, key)) return obj[key]; // else return undefined. }; @@ -52,8 +51,7 @@ export const extend = function(objTo, objFrom) { objTo[key] = value; }); return objTo; -} - +}; /** * Returns a clone of the specified object. @@ -64,7 +62,6 @@ export const clone = function(obj) { return extend({}, obj); }; - /** * Returns true if obj has typeof "object" and is not null. Unlike goog.isObject(), does not return true * for functions. @@ -81,7 +78,7 @@ export const isEmpty = function(obj) { return false; } return true; -} +}; export const getCount = function(obj) { var rv = 0; @@ -89,7 +86,7 @@ export const getCount = function(obj) { rv++; } return rv; -} +}; export const map = function(obj, f, opt_obj?) { var res = {}; @@ -136,7 +133,10 @@ export const getValues = function(obj) { * @param {!function(K, V)} fn Function to call for each key and value. * @template K,V */ -export const every = function(obj: Object, fn: (k: string, v?: V) => boolean): boolean { +export const every = function( + obj: Object, + fn: (k: string, v?: V) => boolean +): boolean { for (let key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { if (!fn(key, obj[key])) { diff --git a/src/utils/promise.ts b/src/utils/promise.ts index 879f98f5e4e..796b3155295 100644 --- a/src/utils/promise.ts +++ b/src/utils/promise.ts @@ -25,7 +25,7 @@ export class Deferred { resolve; reject; promise; - + /** @constructor */ constructor() { var self = this; @@ -70,8 +70,7 @@ export class Deferred { } return meta; } -}; - +} /** * Chrome (and maybe other browsers) report an Error in the console if you reject a promise @@ -86,4 +85,4 @@ export class Deferred { */ export const attachDummyErrorHandler = function(promise) { promise.catch(() => {}); -}; \ No newline at end of file +}; diff --git a/src/utils/utf8.ts b/src/utils/utf8.ts index 154884641a4..80c7dada197 100644 --- a/src/utils/utf8.ts +++ b/src/utils/utf8.ts @@ -14,7 +14,7 @@ * limitations under the License. */ -import { assert } from "./assert"; +import { assert } from './assert'; // Code originally came from goog.crypt.stringToUtf8ByteArray, but for some reason they // automatically replaced '\r\n' with '\n', and they didn't handle surrogate pairs, @@ -27,13 +27,13 @@ import { assert } from "./assert"; // pair). // See http://www.ecma-international.org/ecma-262/5.1/#sec-15.1.3 - /** * @param {string} str * @return {Array} */ export const stringToByteArray = function(str) { - var out = [], p = 0; + var out = [], + p = 0; for (var i = 0; i < str.length; i++) { var c = str.charCodeAt(i); @@ -65,7 +65,6 @@ export const stringToByteArray = function(str) { return out; }; - /** * Calculate length without actually converting; useful for doing cheaper validation. * @param {string} str diff --git a/src/utils/util.ts b/src/utils/util.ts index f9a99d09d2f..b54e770146e 100644 --- a/src/utils/util.ts +++ b/src/utils/util.ts @@ -14,7 +14,7 @@ * limitations under the License. */ -import { forEach } from "./obj"; +import { forEach } from './obj'; /** * Returns a querystring-formatted string (e.g. &arg=val&arg2=val2) from a params @@ -29,16 +29,17 @@ export const querystring = function(querystringParams) { forEach(querystringParams, function(key, value) { if (Array.isArray(value)) { value.forEach(function(arrayVal) { - params.push(encodeURIComponent(key) + '=' + encodeURIComponent(arrayVal)); + params.push( + encodeURIComponent(key) + '=' + encodeURIComponent(arrayVal) + ); }); } else { params.push(encodeURIComponent(key) + '=' + encodeURIComponent(value)); } }); - return (params.length) ? '&' + params.join('&') : ''; + return params.length ? '&' + params.join('&') : ''; }; - /** * Decodes a querystring (e.g. ?arg=val&arg2=val2) into a params object (e.g. {arg: 'val', arg2: 'val2'}) * @@ -56,4 +57,4 @@ export const querystringDecode = function(querystring) { } }); return obj; -}; \ No newline at end of file +}; diff --git a/src/utils/validation.ts b/src/utils/validation.ts index c3251a588ad..21c27eabdb3 100644 --- a/src/utils/validation.ts +++ b/src/utils/validation.ts @@ -28,12 +28,17 @@ export const validateArgCount = function(fnName, minCount, maxCount, argCount) { if (argCount < minCount) { argError = 'at least ' + minCount; } else if (argCount > maxCount) { - argError = (maxCount === 0) ? 'none' : ('no more than ' + maxCount); + argError = maxCount === 0 ? 'none' : 'no more than ' + maxCount; } if (argError) { - var error = fnName + ' failed: Was called with ' + argCount + - ((argCount === 1) ? ' argument.' : ' arguments.') + - ' Expects ' + argError + '.'; + var error = + fnName + + ' failed: Was called with ' + + argCount + + (argCount === 1 ? ' argument.' : ' arguments.') + + ' Expects ' + + argError + + '.'; throw new Error(error); } }; @@ -62,14 +67,16 @@ export function errorPrefix(fnName, argumentNumber, optional) { argName = optional ? 'fourth' : 'Fourth'; break; default: - throw new Error('errorPrefix called with argumentNumber > 4. Need to update it?'); + throw new Error( + 'errorPrefix called with argumentNumber > 4. Need to update it?' + ); } var error = fnName + ' failed: '; error += argName + ' argument '; return error; -}; +} /** * @param {!string} fnName @@ -77,27 +84,46 @@ export function errorPrefix(fnName, argumentNumber, optional) { * @param {!string} namespace * @param {boolean} optional */ -export const validateNamespace = function(fnName, argumentNumber, namespace, optional) { - if (optional && !(namespace)) - return; +export const validateNamespace = function( + fnName, + argumentNumber, + namespace, + optional +) { + if (optional && !namespace) return; if (typeof namespace !== 'string') { //TODO: I should do more validation here. We only allow certain chars in namespaces. - throw new Error(errorPrefix(fnName, argumentNumber, optional) + - 'must be a valid firebase namespace.'); + throw new Error( + errorPrefix(fnName, argumentNumber, optional) + + 'must be a valid firebase namespace.' + ); } }; -export const validateCallback = function(fnName, argumentNumber, callback, optional) { - if (optional && !(callback)) - return; +export const validateCallback = function( + fnName, + argumentNumber, + callback, + optional +) { + if (optional && !callback) return; if (typeof callback !== 'function') - throw new Error(errorPrefix(fnName, argumentNumber, optional) + 'must be a valid function.'); + throw new Error( + errorPrefix(fnName, argumentNumber, optional) + + 'must be a valid function.' + ); }; -export const validateContextObject = function(fnName, argumentNumber, context, optional) { - if (optional && !(context)) - return; +export const validateContextObject = function( + fnName, + argumentNumber, + context, + optional +) { + if (optional && !context) return; if (typeof context !== 'object' || context === null) - throw new Error(errorPrefix(fnName, argumentNumber, optional) + - 'must be a valid context object.'); + throw new Error( + errorPrefix(fnName, argumentNumber, optional) + + 'must be a valid context object.' + ); }; diff --git a/tests/app/errors.test.ts b/tests/app/errors.test.ts index 560c6dbe88f..f1b52db8f19 100644 --- a/tests/app/errors.test.ts +++ b/tests/app/errors.test.ts @@ -13,58 +13,57 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -import {assert} from 'chai'; -import {ErrorFactory, ErrorList, patchCapture} from '../../src/app/errors'; +import { assert } from 'chai'; +import { ErrorFactory, ErrorList, patchCapture } from '../../src/app/errors'; -type Err = - 'generic-error' | - 'file-not-found' | - 'anon-replace' -; +type Err = 'generic-error' | 'file-not-found' | 'anon-replace'; let errors = { - 'generic-error': "Unknown error", + 'generic-error': 'Unknown error', 'file-not-found': "Could not find file: '{$file}'", - 'anon-replace': "Hello, {$repl_}!" + 'anon-replace': 'Hello, {$repl_}!' } as ErrorList; let error = new ErrorFactory('fake', 'Fake', errors); -describe("FirebaseError", () => { - it("create", () => { +describe('FirebaseError', () => { + it('create', () => { let e = error.create('generic-error'); - assert.equal(e.code, "fake/generic-error"); - assert.equal(e.message, "Fake: Unknown error (fake/generic-error)."); + assert.equal(e.code, 'fake/generic-error'); + assert.equal(e.message, 'Fake: Unknown error (fake/generic-error).'); }); - it("String replacement", () => { - let e = error.create('file-not-found', {file: 'foo.txt'}); - assert.equal(e.code, "fake/file-not-found"); - assert.equal(e.message, - "Fake: Could not find file: 'foo.txt' (fake/file-not-found)."); + it('String replacement', () => { + let e = error.create('file-not-found', { file: 'foo.txt' }); + assert.equal(e.code, 'fake/file-not-found'); + assert.equal( + e.message, + "Fake: Could not find file: 'foo.txt' (fake/file-not-found)." + ); assert.equal((e as any).file, 'foo.txt'); }); - it("Anonymous String replacement", () => { - let e = error.create('anon-replace', {repl_: 'world'}); - assert.equal(e.code, "fake/anon-replace"); - assert.equal(e.message, - "Fake: Hello, world! (fake/anon-replace)."); + it('Anonymous String replacement', () => { + let e = error.create('anon-replace', { repl_: 'world' }); + assert.equal(e.code, 'fake/anon-replace'); + assert.equal(e.message, 'Fake: Hello, world! (fake/anon-replace).'); assert.isUndefined((e as any).repl_); }); - it("Missing template", () => { + it('Missing template', () => { // Cast to avoid compile-time error. let e = error.create(('no-such-code' as any) as Err); - assert.equal(e.code, "fake/no-such-code"); - assert.equal(e.message, "Fake: Error (fake/no-such-code)."); + assert.equal(e.code, 'fake/no-such-code'); + assert.equal(e.message, 'Fake: Error (fake/no-such-code).'); }); - it("Missing replacement", () => { - let e = error.create('file-not-found', {fileX: 'foo.txt'}); - assert.equal(e.code, "fake/file-not-found"); - assert.equal(e.message, - "Fake: Could not find file: '' (fake/file-not-found)."); + it('Missing replacement', () => { + let e = error.create('file-not-found', { fileX: 'foo.txt' }); + assert.equal(e.code, 'fake/file-not-found'); + assert.equal( + e.message, + "Fake: Could not find file: '' (fake/file-not-found)." + ); }); }); @@ -77,33 +76,33 @@ function stackTests(fakeCapture: any) { let saveCapture: any; describe( - "Error#stack tests - Error.captureStackTrace is " + - ((fakeCapture) ? 'defined' : 'NOT defined'), + 'Error#stack tests - Error.captureStackTrace is ' + + (fakeCapture ? 'defined' : 'NOT defined'), () => { - before(() => { - saveCapture = patchCapture(fakeCapture) + saveCapture = patchCapture(fakeCapture); }); after(() => { patchCapture(saveCapture); }); - it("has stack", () => { + it('has stack', () => { let e = error.create('generic-error'); // Multi-line match trick - .* does not match \n assert.match(e.stack, /FirebaseError[\s\S]*?errors.test/); }); - it("stack frames", () => { + it('stack frames', () => { try { dummy1(); assert.ok(false); - } catch(e) { + } catch (e) { assert.match(e.stack, /dummy2[\s\S]*?dummy1/); } }); - }); + } + ); } function dummy1() { diff --git a/tests/app/firebase_app.test.ts b/tests/app/firebase_app.test.ts index fd8a7b137c9..d4b72da602a 100644 --- a/tests/app/firebase_app.test.ts +++ b/tests/app/firebase_app.test.ts @@ -19,20 +19,20 @@ import { FirebaseApp, FirebaseService } from '../../src/app/firebase_app'; -import {assert} from 'chai'; +import { assert } from 'chai'; -describe("Firebase App Class", () => { +describe('Firebase App Class', () => { let firebase: FirebaseNamespace; beforeEach(() => { firebase = createFirebaseNamespace(); }); - it("No initial apps.", () => { + it('No initial apps.', () => { assert.equal(firebase.apps.length, 0); }); - it("Can intialize DEFAULT App.", () => { + it('Can intialize DEFAULT App.', () => { let app = firebase.initializeApp({}); assert.equal(firebase.apps.length, 1); assert.strictEqual(app, firebase.apps[0]); @@ -41,25 +41,24 @@ describe("Firebase App Class", () => { assert.strictEqual(firebase.app('[DEFAULT]'), app); }); - it("Can get options of App.", () => { - const options = {'test': 'option'}; + it('Can get options of App.', () => { + const options = { test: 'option' }; let app = firebase.initializeApp(options); - assert.deepEqual((app.options as any), (options as any)); + assert.deepEqual(app.options as any, options as any); }); - it("Can delete App.", () => { + it('Can delete App.', () => { let app = firebase.initializeApp({}); assert.equal(firebase.apps.length, 1); - return app.delete() - .then(() => { - assert.equal(firebase.apps.length, 0); - }); + return app.delete().then(() => { + assert.equal(firebase.apps.length, 0); + }); }); - it("Register App Hook", (done) => { + it('Register App Hook', done => { let events = ['create', 'delete']; let hookEvents = 0; - let app: FirebaseApp;; + let app: FirebaseApp; firebase.INTERNAL.registerService( 'test', (app: FirebaseApp) => { @@ -72,91 +71,92 @@ describe("Firebase App Class", () => { if (hookEvents === events.length) { done(); } - }); + } + ); app = firebase.initializeApp({}); // Ensure the hook is called synchronously assert.equal(hookEvents, 1); app.delete(); }); - it("Can create named App.", () => { + it('Can create named App.', () => { let app = firebase.initializeApp({}, 'my-app'); assert.equal(firebase.apps.length, 1); assert.equal(app.name, 'my-app'); assert.strictEqual(firebase.app('my-app'), app); }); - it("Can create named App and DEFAULT app.", () => { + it('Can create named App and DEFAULT app.', () => { firebase.initializeApp({}, 'my-app'); assert.equal(firebase.apps.length, 1); firebase.initializeApp({}); assert.equal(firebase.apps.length, 2); }); - it("Can get app via firebase namespace.", () => { + it('Can get app via firebase namespace.', () => { firebase.initializeApp({}); }); - it("Duplicate DEFAULT initialize is an error.", () => { + it('Duplicate DEFAULT initialize is an error.', () => { firebase.initializeApp({}); assert.throws(() => { firebase.initializeApp({}); }, /\[DEFAULT\].*exists/i); }); - it("Duplicate named App initialize is an error.", () => { + it('Duplicate named App initialize is an error.', () => { firebase.initializeApp({}, 'abc'); assert.throws(() => { firebase.initializeApp({}, 'abc'); }, /'abc'.*exists/i); }); - it("Modifying options object does not change options.", () => { - let options = {opt: 'original', nested: {opt: 123}}; + it('Modifying options object does not change options.', () => { + let options = { opt: 'original', nested: { opt: 123 } }; firebase.initializeApp(options); options.opt = 'changed'; options.nested.opt = 456; - assert.deepEqual(firebase.app().options, - {opt: 'original', nested: {opt: 123}}); + assert.deepEqual(firebase.app().options, { + opt: 'original', + nested: { opt: 123 } + }); }); - it("Error to use app after it is deleted.", () => { + it('Error to use app after it is deleted.', () => { let app = firebase.initializeApp({}); - return app.delete() - .then(() => { - assert.throws(() => { - console.log(app.name); - }, /already.*deleted/); - }); + return app.delete().then(() => { + assert.throws(() => { + console.log(app.name); + }, /already.*deleted/); + }); }); - it("OK to create same-name app after it is deleted.", () => { + it('OK to create same-name app after it is deleted.', () => { let app = firebase.initializeApp({}, 'app-name'); - return app.delete() - .then(() => { - let app2 = firebase.initializeApp({}, 'app-name'); - assert.ok(app !== app2, "Expect new instance."); - // But original app id still orphaned. - assert.throws(() => { - console.log(app.name); - }, /already.*deleted/); - }); + return app.delete().then(() => { + let app2 = firebase.initializeApp({}, 'app-name'); + assert.ok(app !== app2, 'Expect new instance.'); + // But original app id still orphaned. + assert.throws(() => { + console.log(app.name); + }, /already.*deleted/); + }); }); - it("OK to use Object.prototype member names as app name.", () => { + it('OK to use Object.prototype member names as app name.', () => { let app = firebase.initializeApp({}, 'toString'); assert.equal(firebase.apps.length, 1); assert.equal(app.name, 'toString'); assert.strictEqual(firebase.app('toString'), app); }); - it("Error to get uninitialized app using Object.prototype member name.", () => { + it('Error to get uninitialized app using Object.prototype member name.', () => { assert.throws(() => { firebase.app('toString'); }, /'toString'.*created/i); }); - it("Only calls createService on first use (per app).", () => { + it('Only calls createService on first use (per app).', () => { let registrations = 0; firebase.INTERNAL.registerService('test', (app: FirebaseApp) => { registrations += 1; @@ -179,7 +179,7 @@ describe("Firebase App Class", () => { assert.equal(registrations, 2); }); - it("Can lazy load a service", () => { + it('Can lazy load a service', () => { let registrations = 0; const app1 = firebase.initializeApp({}); @@ -190,7 +190,7 @@ describe("Firebase App Class", () => { return new TestService(app); }); - assert.isDefined((app1 as any).lazyService); + assert.isDefined((app1 as any).lazyService); // Initial service registration happens on first invocation assert.equal(registrations, 0); @@ -210,7 +210,7 @@ describe("Firebase App Class", () => { // Service should already be defined for the second app const app2 = firebase.initializeApp({}, 'second'); assert.isDefined((app1 as any).lazyService); - + // Service still should not have registered for the second app assert.equal(registrations, 1); @@ -219,7 +219,7 @@ describe("Firebase App Class", () => { assert.equal(registrations, 2); }); - it("Can lazy register App Hook", (done) => { + it('Can lazy register App Hook', done => { let events = ['create', 'delete']; let hookEvents = 0; const app = firebase.initializeApp({}); @@ -235,7 +235,8 @@ describe("Firebase App Class", () => { if (hookEvents === events.length) { done(); } - }); + } + ); // Ensure the hook is called synchronously assert.equal(hookEvents, 1); app.delete(); @@ -246,7 +247,7 @@ describe("Firebase App Class", () => { firebase.INTERNAL.registerService( 'multiInstance', (...args) => { - const [app,,instanceIdentifier] = args; + const [app, , instanceIdentifier] = args; return new TestService(app, instanceIdentifier); }, null, @@ -262,12 +263,22 @@ describe("Firebase App Class", () => { // Capture a custom instance service ref const serviceIdentifier = 'custom instance identifier'; const service2 = (firebase.app() as any).multiInstance(serviceIdentifier); - assert.strictEqual(service2, (firebase.app() as any).multiInstance(serviceIdentifier)); + assert.strictEqual( + service2, + (firebase.app() as any).multiInstance(serviceIdentifier) + ); // Ensure that the two services **are not equal** - assert.notStrictEqual(service.instanceIdentifier, service2.instanceIdentifier, '`instanceIdentifier` is not being set correctly'); + assert.notStrictEqual( + service.instanceIdentifier, + service2.instanceIdentifier, + '`instanceIdentifier` is not being set correctly' + ); assert.notStrictEqual(service, service2); - assert.notStrictEqual((firebase.app() as any).multiInstance(), (firebase.app() as any).multiInstance(serviceIdentifier)); + assert.notStrictEqual( + (firebase.app() as any).multiInstance(), + (firebase.app() as any).multiInstance(serviceIdentifier) + ); }); it(`Should return the same instance of a service if a service doesn't support multi instance`, () => { @@ -275,8 +286,8 @@ describe("Firebase App Class", () => { firebase.INTERNAL.registerService( 'singleInstance', (...args) => { - const [app,,instanceIdentifier] = args; - return new TestService(app, instanceIdentifier) + const [app, , instanceIdentifier] = args; + return new TestService(app, instanceIdentifier); }, null, null, @@ -290,14 +301,21 @@ describe("Firebase App Class", () => { const service2 = (firebase.app() as any).singleInstance(serviceIdentifier); // Ensure that the two services **are equal** - assert.strictEqual(service.instanceIdentifier, service2.instanceIdentifier, '`instanceIdentifier` is not being set correctly'); + assert.strictEqual( + service.instanceIdentifier, + service2.instanceIdentifier, + '`instanceIdentifier` is not being set correctly' + ); assert.strictEqual(service, service2); }); it(`Should pass null to the factory method if using default instance`, () => { // Register Multi Instance Service firebase.INTERNAL.registerService('testService', (...args) => { - const [app,,instanceIdentifier] = args; - assert.isUndefined(instanceIdentifier, '`instanceIdentifier` is not `undefined`'); + const [app, , instanceIdentifier] = args; + assert.isUndefined( + instanceIdentifier, + '`instanceIdentifier` is not `undefined`' + ); return new TestService(app, instanceIdentifier); }); firebase.initializeApp({}); @@ -315,16 +333,17 @@ describe("Firebase App Class", () => { const service = new TestService(app); (service as any).token = 'tokenFor' + counter++; extendApp({ - 'INTERNAL': { + INTERNAL: { getToken: () => { return Promise.resolve({ - accessToken: (service as any).token, + accessToken: (service as any).token }); - }, - }, - }); + } + } + }); return service; - }); + } + ); // Initialize 2 apps and their corresponding services. const app = firebase.initializeApp({}); (app as any).test(); @@ -332,7 +351,8 @@ describe("Firebase App Class", () => { (app2 as any).test(); // Confirm extended INTERNAL getToken resolve with the corresponding // service's value. - return app.INTERNAL.getToken() + return app.INTERNAL + .getToken() .then(token => { assert.equal('tokenFor0', token.accessToken); return app2.INTERNAL.getToken(); @@ -342,13 +362,13 @@ describe("Firebase App Class", () => { }); }); - describe("Check for bad app names", () => { - let tests = ["", 123, false, null]; + describe('Check for bad app names', () => { + let tests = ['', 123, false, null]; for (let data of tests) { it("where name == '" + data + "'", () => { assert.throws(() => { firebase.initializeApp({}, data as string); - }, /Illegal app name/i);; + }, /Illegal app name/i); }); } }); diff --git a/tests/app/subscribe.test.ts b/tests/app/subscribe.test.ts index 148479d2d29..82fa96e83a3 100644 --- a/tests/app/subscribe.test.ts +++ b/tests/app/subscribe.test.ts @@ -17,17 +17,17 @@ import { async, CompleteFn, - createSubscribe, + createSubscribe, ErrorFn, NextFn, Observer, Subscribe, - Unsubscribe, + Unsubscribe } from '../../src/app/subscribe'; -import {assert} from 'chai'; +import { assert } from 'chai'; import * as sinon from 'sinon'; -describe("createSubscribe", function() { +describe('createSubscribe', function() { let spy: any; beforeEach(() => { // Listen to console.error calls. @@ -38,7 +38,7 @@ describe("createSubscribe", function() { spy.restore(); }); - it("Creation", (done) => { + it('Creation', done => { let subscribe = createSubscribe((observer: Observer) => { observer.next(123); }); @@ -50,7 +50,7 @@ describe("createSubscribe", function() { }); }); - it("Logging observer error to console", (done) => { + it('Logging observer error to console', done => { let uncatchableError = new Error('uncatchable'); let subscribe = createSubscribe((observer: Observer) => { observer.next(123); @@ -72,8 +72,8 @@ describe("createSubscribe", function() { }); }); - it("Well-defined subscription order", (done) => { - let subscribe = createSubscribe((observer) => { + it('Well-defined subscription order', done => { + let subscribe = createSubscribe(observer => { observer.next(123); // Subscription after value emitted should NOT be received. subscribe({ @@ -90,9 +90,9 @@ describe("createSubscribe", function() { }); }); - it("Subscribing to already complete Subscribe", (done) => { + it('Subscribing to already complete Subscribe', done => { let seq = 0; - let subscribe = createSubscribe((observer) => { + let subscribe = createSubscribe(observer => { observer.next(456); observer.complete(); }); @@ -112,11 +112,11 @@ describe("createSubscribe", function() { }); }); - it("Subscribing to errored Subscribe", (done) => { + it('Subscribing to errored Subscribe', done => { let seq = 0; - let subscribe = createSubscribe((observer) => { + let subscribe = createSubscribe(observer => { observer.next(246); - observer.error(new Error("failure")); + observer.error(new Error('failure')); }); subscribe({ next(value: number) { @@ -128,7 +128,7 @@ describe("createSubscribe", function() { subscribe({ error(e2) { assert.equal(seq++, 2); - assert.equal(e.message, "failure"); + assert.equal(e.message, 'failure'); done(); } }); @@ -139,7 +139,7 @@ describe("createSubscribe", function() { }); }); - it("Delayed value", (done) => { + it('Delayed value', done => { let subscribe = createSubscribe((observer: Observer) => { setTimeout(() => observer.next(123), 10); }); @@ -150,21 +150,21 @@ describe("createSubscribe", function() { }); }); - it("Executor throws => Error", () => { + it('Executor throws => Error', () => { // It's an application error to throw an exception in the executor - // but since it is called asynchronously, our only option is // to emit that Error and terminate the Subscribe. let subscribe = createSubscribe((observer: Observer) => { - throw new Error("Executor throws"); + throw new Error('Executor throws'); }); subscribe({ error(e) { - assert.equal(e.message, "Executor throws"); + assert.equal(e.message, 'Executor throws'); } }); }); - it("Sequence", (done) => { + it('Sequence', done => { let subscribe = makeCounter(10); let j = 1; @@ -179,12 +179,14 @@ describe("createSubscribe", function() { }); }); - it("unlisten", (done) => { + it('unlisten', done => { let subscribe = makeCounter(10); - subscribe({complete: () => { - async(done)(); - }}); + subscribe({ + complete: () => { + async(done)(); + } + }); let j = 1; let unsub = subscribe({ @@ -196,12 +198,12 @@ describe("createSubscribe", function() { } }, complete: () => { - assert.ok(false, "Does not call completed if unsubscribed"); + assert.ok(false, 'Does not call completed if unsubscribed'); } }); }); - it("onNoObservers", (done) => { + it('onNoObservers', done => { let subscribe = makeCounter(10); let j = 1; @@ -215,13 +217,13 @@ describe("createSubscribe", function() { } }, complete: () => { - assert.ok(false, "Does not call completed if unsubscribed"); + assert.ok(false, 'Does not call completed if unsubscribed'); } }); }); // TODO(koss): Add test for partial Observer (missing methods). - it("Partial Observer", (done) => { + it('Partial Observer', done => { let subscribe = makeCounter(10); let unsub = subscribe({ @@ -230,7 +232,6 @@ describe("createSubscribe", function() { } }); }); - }); function makeCounter(maxCount: number, ms = 10): Subscribe { @@ -253,5 +254,6 @@ function makeCounter(maxCount: number, ms = 10): Subscribe { (observer: Observer) => { clearInterval(id); id = undefined; - }); + } + ); } diff --git a/tests/database/browser/connection.test.ts b/tests/database/browser/connection.test.ts index f83b81bd1da..2a6ef1a5724 100644 --- a/tests/database/browser/connection.test.ts +++ b/tests/database/browser/connection.test.ts @@ -14,22 +14,24 @@ * limitations under the License. */ -import { expect } from "chai"; -import { TEST_PROJECT, testRepoInfo } from "../helpers/util"; -import { Connection } from "../../../src/database/realtime/Connection"; +import { expect } from 'chai'; +import { TEST_PROJECT, testRepoInfo } from '../helpers/util'; +import { Connection } from '../../../src/database/realtime/Connection'; describe('Connection', function() { it('return the session id', function(done) { - new Connection('1', - testRepoInfo(TEST_PROJECT.databaseURL), - message => {}, - (timestamp, sessionId) => { - expect(sessionId).not.to.be.null; - expect(sessionId).not.to.equal(''); - done(); - }, - () => {}, - reason => {}); + new Connection( + '1', + testRepoInfo(TEST_PROJECT.databaseURL), + message => {}, + (timestamp, sessionId) => { + expect(sessionId).not.to.be.null; + expect(sessionId).not.to.equal(''); + done(); + }, + () => {}, + reason => {} + ); }); // TODO - Flakey Test. When Dev Tools is closed on my Mac, this test @@ -38,19 +40,25 @@ describe('Connection', function() { // https://app.asana.com/0/58926111402292/101921715724749 it.skip('disconnect old session on new connection', function(done) { const info = testRepoInfo(TEST_PROJECT.databaseURL); - new Connection('1', info, - message => {}, - (timestamp, sessionId) => { - new Connection('2', info, - message => {}, - (timestamp, sessionId) => {}, - () => {}, - reason => {}, - sessionId); - }, - () => { - done(); // first connection was disconnected - }, - reason => {}); + new Connection( + '1', + info, + message => {}, + (timestamp, sessionId) => { + new Connection( + '2', + info, + message => {}, + (timestamp, sessionId) => {}, + () => {}, + reason => {}, + sessionId + ); + }, + () => { + done(); // first connection was disconnected + }, + reason => {} + ); }); }); diff --git a/tests/database/browser/crawler_support.test.ts b/tests/database/browser/crawler_support.test.ts index 68bcc9fce30..20aee8b0007 100644 --- a/tests/database/browser/crawler_support.test.ts +++ b/tests/database/browser/crawler_support.test.ts @@ -14,14 +14,14 @@ * limitations under the License. */ -import { expect } from "chai"; -import { forceRestClient } from "../../../src/database/api/test_access"; +import { expect } from 'chai'; +import { forceRestClient } from '../../../src/database/api/test_access'; -import { +import { getRandomNode, testAuthTokenProvider, getFreshRepoFromReference -} from "../helpers/util"; +} from '../helpers/util'; // Some sanity checks for the ReadonlyRestClient crawler support. describe('Crawler Support', function() { @@ -114,17 +114,24 @@ describe('Crawler Support', function() { // We need to wait long enough to be sure that our 'hello' didn't actually get set, but there's // no good way to do that. So we just do a couple round-trips via the REST client and assume // that's good enough. - return restRef.child('obj').once('value').then(function(s) { - expect(s.val()).to.deep.equal(initialData.obj); + return restRef + .child('obj') + .once('value') + .then(function(s) { + expect(s.val()).to.deep.equal(initialData.obj); - return restRef.child('obj').once('value'); - }).then(function(s) { - expect(s.val()).to.deep.equal(initialData.obj); - normalRef.child('leaf').off(); - }, function (reason) { - normalRef.child('leaf').off(); - return Promise.reject(reason); - }); + return restRef.child('obj').once('value'); + }) + .then( + function(s) { + expect(s.val()).to.deep.equal(initialData.obj); + normalRef.child('leaf').off(); + }, + function(reason) { + normalRef.child('leaf').off(); + return Promise.reject(reason); + } + ); }); it('.info/connected fires with true', function(done) { diff --git a/tests/database/compound_write.test.ts b/tests/database/compound_write.test.ts index 8488c4ae8cc..553ddb6ee8b 100644 --- a/tests/database/compound_write.test.ts +++ b/tests/database/compound_write.test.ts @@ -14,28 +14,33 @@ * limitations under the License. */ -import { expect } from "chai"; -import { ChildrenNode } from "../../src/database/core/snap/ChildrenNode"; -import { CompoundWrite } from "../../src/database/core/CompoundWrite"; -import { LeafNode } from "../../src/database/core/snap/LeafNode"; -import { NamedNode } from "../../src/database/core/snap/Node"; -import { nodeFromJSON } from "../../src/database/core/snap/nodeFromJSON"; -import { Path } from "../../src/database/core/util/Path"; +import { expect } from 'chai'; +import { ChildrenNode } from '../../src/database/core/snap/ChildrenNode'; +import { CompoundWrite } from '../../src/database/core/CompoundWrite'; +import { LeafNode } from '../../src/database/core/snap/LeafNode'; +import { NamedNode } from '../../src/database/core/snap/Node'; +import { nodeFromJSON } from '../../src/database/core/snap/nodeFromJSON'; +import { Path } from '../../src/database/core/util/Path'; describe('CompoundWrite Tests', function() { const LEAF_NODE = nodeFromJSON('leaf-node'); const PRIO_NODE = nodeFromJSON('prio'); - const CHILDREN_NODE = nodeFromJSON({ 'child-1': 'value-1', 'child-2': 'value-2' }); + const CHILDREN_NODE = nodeFromJSON({ + 'child-1': 'value-1', + 'child-2': 'value-2' + }); const EMPTY_NODE = ChildrenNode.EMPTY_NODE; function assertNodeGetsCorrectPriority(compoundWrite, node, priority) { if (node.isEmpty()) { expect(compoundWrite.apply(node)).to.equal(EMPTY_NODE); } else { - expect(compoundWrite.apply(node)).to.deep.equal(node.updatePriority(priority)); + expect(compoundWrite.apply(node)).to.deep.equal( + node.updatePriority(priority) + ); } } - + function assertNodesEqual(expected, actual) { expect(actual.equals(expected)).to.be.true; } @@ -45,24 +50,34 @@ describe('CompoundWrite Tests', function() { }); it('CompoundWrite with priority update is not empty.', function() { - expect(CompoundWrite.Empty.addWrite(new Path('.priority'), PRIO_NODE).isEmpty()).to.be.false; + expect( + CompoundWrite.Empty.addWrite(new Path('.priority'), PRIO_NODE).isEmpty() + ).to.be.false; }); it('CompoundWrite with update is not empty.', function() { - expect(CompoundWrite.Empty.addWrite(new Path('foo/bar'), LEAF_NODE).isEmpty()).to.be.false; + expect( + CompoundWrite.Empty.addWrite(new Path('foo/bar'), LEAF_NODE).isEmpty() + ).to.be.false; }); it('CompoundWrite with root update is not empty.', function() { - expect(CompoundWrite.Empty.addWrite(Path.Empty, LEAF_NODE).isEmpty()).to.be.false; + expect(CompoundWrite.Empty.addWrite(Path.Empty, LEAF_NODE).isEmpty()).to.be + .false; }); it('CompoundWrite with empty root update is not empty.', function() { - expect(CompoundWrite.Empty.addWrite(Path.Empty, EMPTY_NODE).isEmpty()).to.be.false; + expect(CompoundWrite.Empty.addWrite(Path.Empty, EMPTY_NODE).isEmpty()).to.be + .false; }); it('CompoundWrite with root priority update, child write is not empty.', function() { - let compoundWrite = CompoundWrite.Empty.addWrite(new Path('.priority'), PRIO_NODE); - expect(compoundWrite.childCompoundWrite(new Path('.priority')).isEmpty()).to.be.false; + let compoundWrite = CompoundWrite.Empty.addWrite( + new Path('.priority'), + PRIO_NODE + ); + expect(compoundWrite.childCompoundWrite(new Path('.priority')).isEmpty()).to + .be.false; }); it('Applies leaf overwrite', function() { @@ -97,29 +112,41 @@ describe('CompoundWrite Tests', function() { let compoundWrite = CompoundWrite.Empty; const updateOne = nodeFromJSON('new-foo-value'); const updateTwo = nodeFromJSON('baz-value'); - const updateThree = nodeFromJSON({'foo': 'foo-value', 'bar': 'bar-value' }); + const updateThree = nodeFromJSON({ foo: 'foo-value', bar: 'bar-value' }); compoundWrite = compoundWrite.addWrite(new Path('child-1/foo'), updateOne); compoundWrite = compoundWrite.addWrite(new Path('child-1/baz'), updateTwo); compoundWrite = compoundWrite.addWrite(new Path('child-1'), updateThree); const expectedChildOne = { - 'foo': 'foo-value', - 'bar': 'bar-value' + foo: 'foo-value', + bar: 'bar-value' }; - const expected = CHILDREN_NODE.updateImmediateChild('child-1', - nodeFromJSON(expectedChildOne)); + const expected = CHILDREN_NODE.updateImmediateChild( + 'child-1', + nodeFromJSON(expectedChildOne) + ); assertNodesEqual(expected, compoundWrite.apply(CHILDREN_NODE)); }); it('child priority updates empty priority on child write', function() { let compoundWrite = CompoundWrite.Empty; - compoundWrite = compoundWrite.addWrite(new Path('child-1/.priority'), EMPTY_NODE); + compoundWrite = compoundWrite.addWrite( + new Path('child-1/.priority'), + EMPTY_NODE + ); const node = new LeafNode('foo', PRIO_NODE); - assertNodeGetsCorrectPriority(compoundWrite.childCompoundWrite(new Path('child-1')), node, EMPTY_NODE); + assertNodeGetsCorrectPriority( + compoundWrite.childCompoundWrite(new Path('child-1')), + node, + EMPTY_NODE + ); }); it('deep priority set works on empty node when other set is available', function() { let compoundWrite = CompoundWrite.Empty; - compoundWrite = compoundWrite.addWrite(new Path('foo/.priority'), PRIO_NODE); + compoundWrite = compoundWrite.addWrite( + new Path('foo/.priority'), + PRIO_NODE + ); compoundWrite = compoundWrite.addWrite(new Path('foo/child'), LEAF_NODE); const node = compoundWrite.apply(EMPTY_NODE); assertNodesEqual(PRIO_NODE, node.getChild(new Path('foo')).getPriority()); @@ -127,24 +154,33 @@ describe('CompoundWrite Tests', function() { it('child merge looks into update node', function() { let compoundWrite = CompoundWrite.Empty; - const update = nodeFromJSON({ 'foo': 'foo-value', 'bar': 'bar-value'}); + const update = nodeFromJSON({ foo: 'foo-value', bar: 'bar-value' }); compoundWrite = compoundWrite.addWrite(Path.Empty, update); - assertNodesEqual(nodeFromJSON('foo-value'), - compoundWrite.childCompoundWrite(new Path('foo')).apply(EMPTY_NODE)); + assertNodesEqual( + nodeFromJSON('foo-value'), + compoundWrite.childCompoundWrite(new Path('foo')).apply(EMPTY_NODE) + ); }); it('child merge removes node on deeper paths', function() { let compoundWrite = CompoundWrite.Empty; - const update = nodeFromJSON({ 'foo': 'foo-value', 'bar': 'bar-value' }); + const update = nodeFromJSON({ foo: 'foo-value', bar: 'bar-value' }); compoundWrite = compoundWrite.addWrite(Path.Empty, update); - assertNodesEqual(EMPTY_NODE, compoundWrite.childCompoundWrite(new Path('foo/not/existing')).apply(LEAF_NODE)); + assertNodesEqual( + EMPTY_NODE, + compoundWrite + .childCompoundWrite(new Path('foo/not/existing')) + .apply(LEAF_NODE) + ); }); it('child merge with empty path is same merge', function() { let compoundWrite = CompoundWrite.Empty; - const update = nodeFromJSON({ 'foo': 'foo-value', 'bar': 'bar-value' }); + const update = nodeFromJSON({ foo: 'foo-value', bar: 'bar-value' }); compoundWrite = compoundWrite.addWrite(Path.Empty, update); - expect(compoundWrite.childCompoundWrite(Path.Empty)).to.equal(compoundWrite); + expect(compoundWrite.childCompoundWrite(Path.Empty)).to.equal( + compoundWrite + ); }); it('root update removes root priority', function() { @@ -156,25 +192,34 @@ describe('CompoundWrite Tests', function() { it('deep update removes priority there', function() { let compoundWrite = CompoundWrite.Empty; - compoundWrite = compoundWrite.addWrite(new Path('foo/.priority'), PRIO_NODE); - compoundWrite = compoundWrite.addWrite(new Path('foo'), nodeFromJSON('bar')); - const expected = nodeFromJSON({ 'foo': 'bar' }); + compoundWrite = compoundWrite.addWrite( + new Path('foo/.priority'), + PRIO_NODE + ); + compoundWrite = compoundWrite.addWrite( + new Path('foo'), + nodeFromJSON('bar') + ); + const expected = nodeFromJSON({ foo: 'bar' }); assertNodesEqual(expected, compoundWrite.apply(EMPTY_NODE)); }); it('adding updates at path works', function() { let compoundWrite = CompoundWrite.Empty; const updates = { - 'foo': nodeFromJSON('foo-value'), - 'bar': nodeFromJSON('bar-value') + foo: nodeFromJSON('foo-value'), + bar: nodeFromJSON('bar-value') }; compoundWrite = compoundWrite.addWrites(new Path('child-1'), updates); const expectedChildOne = { - 'foo': 'foo-value', - 'bar': 'bar-value' + foo: 'foo-value', + bar: 'bar-value' }; - const expected = CHILDREN_NODE.updateImmediateChild('child-1', nodeFromJSON(expectedChildOne)); + const expected = CHILDREN_NODE.updateImmediateChild( + 'child-1', + nodeFromJSON(expectedChildOne) + ); assertNodesEqual(expected, compoundWrite.apply(CHILDREN_NODE)); }); @@ -188,45 +233,64 @@ describe('CompoundWrite Tests', function() { compoundWrite = compoundWrite.addWrites(Path.Empty, updates); const expected = { - 'child-1': 'new-value-1', - 'child-3': 'value-3' + 'child-1': 'new-value-1', + 'child-3': 'value-3' }; - assertNodesEqual(nodeFromJSON(expected), compoundWrite.apply(CHILDREN_NODE)); + assertNodesEqual( + nodeFromJSON(expected), + compoundWrite.apply(CHILDREN_NODE) + ); }); it('child write of root priority works', function() { - let compoundWrite = CompoundWrite.Empty.addWrite(new Path('.priority'), PRIO_NODE); - assertNodesEqual(PRIO_NODE, compoundWrite.childCompoundWrite(new Path('.priority')).apply(EMPTY_NODE)); + let compoundWrite = CompoundWrite.Empty.addWrite( + new Path('.priority'), + PRIO_NODE + ); + assertNodesEqual( + PRIO_NODE, + compoundWrite.childCompoundWrite(new Path('.priority')).apply(EMPTY_NODE) + ); }); it('complete children only returns complete overwrites', function() { let compoundWrite = CompoundWrite.Empty; compoundWrite = compoundWrite.addWrite(new Path('child-1'), LEAF_NODE); - expect(compoundWrite.getCompleteChildren()).to.deep.equal([new NamedNode('child-1', LEAF_NODE)]); + expect(compoundWrite.getCompleteChildren()).to.deep.equal([ + new NamedNode('child-1', LEAF_NODE) + ]); }); it('complete children only returns empty overwrites', function() { let compoundWrite = CompoundWrite.Empty; compoundWrite = compoundWrite.addWrite(new Path('child-1'), EMPTY_NODE); - expect(compoundWrite.getCompleteChildren()).to.deep.equal([new NamedNode('child-1', EMPTY_NODE)]); + expect(compoundWrite.getCompleteChildren()).to.deep.equal([ + new NamedNode('child-1', EMPTY_NODE) + ]); }); it('complete children doesnt return deep overwrites', function() { let compoundWrite = CompoundWrite.Empty; - compoundWrite = compoundWrite.addWrite(new Path('child-1/deep/path'), LEAF_NODE); + compoundWrite = compoundWrite.addWrite( + new Path('child-1/deep/path'), + LEAF_NODE + ); expect(compoundWrite.getCompleteChildren()).to.deep.equal([]); }); it('complete children return all complete children but no incomplete', function() { let compoundWrite = CompoundWrite.Empty; - compoundWrite = compoundWrite.addWrite(new Path('child-1/deep/path'), LEAF_NODE); + compoundWrite = compoundWrite.addWrite( + new Path('child-1/deep/path'), + LEAF_NODE + ); compoundWrite = compoundWrite.addWrite(new Path('child-2'), LEAF_NODE); compoundWrite = compoundWrite.addWrite(new Path('child-3'), EMPTY_NODE); const expected = { 'child-2': LEAF_NODE, 'child-3': EMPTY_NODE }; - const actual = { }; + const actual = {}; const completeChildren = compoundWrite.getCompleteChildren(); for (let i = 0; i < completeChildren.length; i++) { actual[completeChildren[i].name] = completeChildren[i].node; @@ -243,7 +307,7 @@ describe('CompoundWrite Tests', function() { 'child-2': nodeFromJSON('value-2') }; - const actual = { }; + const actual = {}; const completeChildren = compoundWrite.getCompleteChildren(); for (let i = 0; i < completeChildren.length; i++) { actual[completeChildren[i].name] = completeChildren[i].node; @@ -268,21 +332,27 @@ describe('CompoundWrite Tests', function() { }); it('compound write with deep update has shadowing write', function() { - let compoundWrite = CompoundWrite.Empty.addWrite(new Path('deep/update'), LEAF_NODE); + let compoundWrite = CompoundWrite.Empty.addWrite( + new Path('deep/update'), + LEAF_NODE + ); expect(compoundWrite.hasCompleteWrite(Path.Empty)).to.be.false; expect(compoundWrite.hasCompleteWrite(new Path('deep'))).to.be.false; expect(compoundWrite.hasCompleteWrite(new Path('deep/update'))).to.be.true; }); it('compound write with priority update has shadowing write', function() { - let compoundWrite = CompoundWrite.Empty.addWrite(new Path('.priority'), PRIO_NODE); + let compoundWrite = CompoundWrite.Empty.addWrite( + new Path('.priority'), + PRIO_NODE + ); expect(compoundWrite.hasCompleteWrite(Path.Empty)).to.be.false; expect(compoundWrite.hasCompleteWrite(new Path('.priority'))).to.be.true; }); it('updates can be removed', function() { let compoundWrite = CompoundWrite.Empty; - const update = nodeFromJSON({ 'foo': 'foo-value', 'bar': 'bar-value' }); + const update = nodeFromJSON({ foo: 'foo-value', bar: 'bar-value' }); compoundWrite = compoundWrite.addWrite(new Path('child-1'), update); compoundWrite = compoundWrite.removeWrite(new Path('child-1')); assertNodesEqual(CHILDREN_NODE, compoundWrite.apply(CHILDREN_NODE)); @@ -290,37 +360,49 @@ describe('CompoundWrite Tests', function() { it('deep removes has no effect on overlaying set', function() { let compoundWrite = CompoundWrite.Empty; - const updateOne = nodeFromJSON({ 'foo': 'foo-value', 'bar': 'bar-value' }); + const updateOne = nodeFromJSON({ foo: 'foo-value', bar: 'bar-value' }); const updateTwo = nodeFromJSON('baz-value'); const updateThree = nodeFromJSON('new-foo-value'); compoundWrite = compoundWrite.addWrite(new Path('child-1'), updateOne); compoundWrite = compoundWrite.addWrite(new Path('child-1/baz'), updateTwo); - compoundWrite = compoundWrite.addWrite(new Path('child-1/foo'), updateThree); + compoundWrite = compoundWrite.addWrite( + new Path('child-1/foo'), + updateThree + ); compoundWrite = compoundWrite.removeWrite(new Path('child-1/foo')); const expectedChildOne = { - 'foo': 'new-foo-value', - 'bar': 'bar-value', - 'baz': 'baz-value' + foo: 'new-foo-value', + bar: 'bar-value', + baz: 'baz-value' }; - const expected = CHILDREN_NODE.updateImmediateChild('child-1', nodeFromJSON(expectedChildOne)); + const expected = CHILDREN_NODE.updateImmediateChild( + 'child-1', + nodeFromJSON(expectedChildOne) + ); assertNodesEqual(expected, compoundWrite.apply(CHILDREN_NODE)); }); it('remove at path without set is without effect', function() { let compoundWrite = CompoundWrite.Empty; - const updateOne = nodeFromJSON({ 'foo': 'foo-value', 'bar': 'bar-value' }); + const updateOne = nodeFromJSON({ foo: 'foo-value', bar: 'bar-value' }); const updateTwo = nodeFromJSON('baz-value'); const updateThree = nodeFromJSON('new-foo-value'); compoundWrite = compoundWrite.addWrite(new Path('child-1'), updateOne); compoundWrite = compoundWrite.addWrite(new Path('child-1/baz'), updateTwo); - compoundWrite = compoundWrite.addWrite(new Path('child-1/foo'), updateThree); + compoundWrite = compoundWrite.addWrite( + new Path('child-1/foo'), + updateThree + ); compoundWrite = compoundWrite.removeWrite(new Path('child-2')); const expectedChildOne = { - 'foo': 'new-foo-value', - 'bar': 'bar-value', - 'baz': 'baz-value' + foo: 'new-foo-value', + bar: 'bar-value', + baz: 'baz-value' }; - const expected = CHILDREN_NODE.updateImmediateChild('child-1', nodeFromJSON(expectedChildOne)); + const expected = CHILDREN_NODE.updateImmediateChild( + 'child-1', + nodeFromJSON(expectedChildOne) + ); assertNodesEqual(expected, compoundWrite.apply(CHILDREN_NODE)); }); @@ -334,19 +416,22 @@ describe('CompoundWrite Tests', function() { it('removing only affects removed path', function() { let compoundWrite = CompoundWrite.Empty; const updates = { - 'child-1': nodeFromJSON('new-value-1'), - 'child-2': EMPTY_NODE, - 'child-3': nodeFromJSON('value-3') + 'child-1': nodeFromJSON('new-value-1'), + 'child-2': EMPTY_NODE, + 'child-3': nodeFromJSON('value-3') }; compoundWrite = compoundWrite.addWrites(Path.Empty, updates); compoundWrite = compoundWrite.removeWrite(new Path('child-2')); const expected = { - 'child-1': 'new-value-1', - 'child-2': 'value-2', - 'child-3': 'value-3' + 'child-1': 'new-value-1', + 'child-2': 'value-2', + 'child-3': 'value-3' }; - assertNodesEqual(nodeFromJSON(expected), compoundWrite.apply(CHILDREN_NODE)); + assertNodesEqual( + nodeFromJSON(expected), + compoundWrite.apply(CHILDREN_NODE) + ); }); it('remove removes all deeper sets', function() { @@ -354,14 +439,20 @@ describe('CompoundWrite Tests', function() { const updateTwo = nodeFromJSON('baz-value'); const updateThree = nodeFromJSON('new-foo-value'); compoundWrite = compoundWrite.addWrite(new Path('child-1/baz'), updateTwo); - compoundWrite = compoundWrite.addWrite(new Path('child-1/foo'), updateThree); + compoundWrite = compoundWrite.addWrite( + new Path('child-1/foo'), + updateThree + ); compoundWrite = compoundWrite.removeWrite(new Path('child-1')); assertNodesEqual(CHILDREN_NODE, compoundWrite.apply(CHILDREN_NODE)); }); it('remove at root also removes priority', function() { let compoundWrite = CompoundWrite.Empty; - compoundWrite = compoundWrite.addWrite(Path.Empty, new LeafNode('foo', PRIO_NODE)); + compoundWrite = compoundWrite.addWrite( + Path.Empty, + new LeafNode('foo', PRIO_NODE) + ); compoundWrite = compoundWrite.removeWrite(Path.Empty); const node = nodeFromJSON('value'); assertNodeGetsCorrectPriority(compoundWrite, node, EMPTY_NODE); @@ -370,7 +461,10 @@ describe('CompoundWrite Tests', function() { it('updating priority doesnt overwrite leaf node', function() { let compoundWrite = CompoundWrite.Empty; compoundWrite = compoundWrite.addWrite(Path.Empty, LEAF_NODE); - compoundWrite = compoundWrite.addWrite(new Path('child/.priority'), PRIO_NODE); + compoundWrite = compoundWrite.addWrite( + new Path('child/.priority'), + PRIO_NODE + ); assertNodesEqual(LEAF_NODE, compoundWrite.apply(EMPTY_NODE)); }); @@ -385,14 +479,18 @@ describe('CompoundWrite Tests', function() { let compoundWrite = CompoundWrite.Empty; const path = new Path('child-1'); compoundWrite = compoundWrite.addWrite(path, LEAF_NODE); - expect(compoundWrite.apply(CHILDREN_NODE)).to.deep.equal(CHILDREN_NODE.updateImmediateChild(path.getFront(), LEAF_NODE)); + expect(compoundWrite.apply(CHILDREN_NODE)).to.deep.equal( + CHILDREN_NODE.updateImmediateChild(path.getFront(), LEAF_NODE) + ); }); it('Updates existing child', function() { let compoundWrite = CompoundWrite.Empty; const path = new Path('child-1/foo'); compoundWrite = compoundWrite.addWrite(path, LEAF_NODE); - expect(compoundWrite.apply(CHILDREN_NODE)).to.deep.equal(CHILDREN_NODE.updateChild(path, LEAF_NODE)); + expect(compoundWrite.apply(CHILDREN_NODE)).to.deep.equal( + CHILDREN_NODE.updateChild(path, LEAF_NODE) + ); }); it("Doesn't update priority on empty node.", function() { @@ -412,7 +510,10 @@ describe('CompoundWrite Tests', function() { let compoundWrite = CompoundWrite.Empty; const path = new Path('child-1/.priority'); compoundWrite = compoundWrite.addWrite(path, PRIO_NODE); - assertNodesEqual(CHILDREN_NODE.updateChild(path, PRIO_NODE), compoundWrite.apply(CHILDREN_NODE)); + assertNodesEqual( + CHILDREN_NODE.updateChild(path, PRIO_NODE), + compoundWrite.apply(CHILDREN_NODE) + ); }); it("Doesn't update priority of nonexistent child.", function() { @@ -424,31 +525,51 @@ describe('CompoundWrite Tests', function() { it('Deep update existing updates', function() { let compoundWrite = CompoundWrite.Empty; - const updateOne = nodeFromJSON({ 'foo': 'foo-value', 'bar': 'bar-value' }); + const updateOne = nodeFromJSON({ foo: 'foo-value', bar: 'bar-value' }); const updateTwo = nodeFromJSON('baz-value'); const updateThree = nodeFromJSON('new-foo-value'); compoundWrite = compoundWrite.addWrite(new Path('child-1'), updateOne); compoundWrite = compoundWrite.addWrite(new Path('child-1/baz'), updateTwo); - compoundWrite = compoundWrite.addWrite(new Path('child-1/foo'), updateThree); + compoundWrite = compoundWrite.addWrite( + new Path('child-1/foo'), + updateThree + ); const expectedChildOne = { - 'foo': 'new-foo-value', - 'bar': 'bar-value', - 'baz': 'baz-value' + foo: 'new-foo-value', + bar: 'bar-value', + baz: 'baz-value' }; - const expected = CHILDREN_NODE.updateImmediateChild('child-1', nodeFromJSON(expectedChildOne)); + const expected = CHILDREN_NODE.updateImmediateChild( + 'child-1', + nodeFromJSON(expectedChildOne) + ); assertNodesEqual(expected, compoundWrite.apply(CHILDREN_NODE)); }); it("child priority doesn't update empty node priority on child merge", function() { let compoundWrite = CompoundWrite.Empty; - compoundWrite = compoundWrite.addWrite(new Path('child-1/.priority'), PRIO_NODE); - assertNodeGetsCorrectPriority(compoundWrite.childCompoundWrite(new Path('child-1')), EMPTY_NODE, EMPTY_NODE); + compoundWrite = compoundWrite.addWrite( + new Path('child-1/.priority'), + PRIO_NODE + ); + assertNodeGetsCorrectPriority( + compoundWrite.childCompoundWrite(new Path('child-1')), + EMPTY_NODE, + EMPTY_NODE + ); }); it('Child priority updates priority on child write', function() { let compoundWrite = CompoundWrite.Empty; - compoundWrite = compoundWrite.addWrite(new Path('child-1/.priority'), PRIO_NODE); + compoundWrite = compoundWrite.addWrite( + new Path('child-1/.priority'), + PRIO_NODE + ); const node = nodeFromJSON('value'); - assertNodeGetsCorrectPriority(compoundWrite.childCompoundWrite(new Path('child-1')), node, PRIO_NODE); + assertNodeGetsCorrectPriority( + compoundWrite.childCompoundWrite(new Path('child-1')), + node, + PRIO_NODE + ); }); -}); \ No newline at end of file +}); diff --git a/tests/database/database.test.ts b/tests/database/database.test.ts index 5f42373f266..94f2e4e06ba 100644 --- a/tests/database/database.test.ts +++ b/tests/database/database.test.ts @@ -14,19 +14,18 @@ * limitations under the License. */ -import { expect } from "chai"; -import firebase from "../../src/app"; -import { - TEST_PROJECT, - patchFakeAuthFunctions, -} from "./helpers/util"; -import "../../src/database"; +import { expect } from 'chai'; +import firebase from '../../src/app'; +import { TEST_PROJECT, patchFakeAuthFunctions } from './helpers/util'; +import '../../src/database'; describe('Database Tests', function() { let defaultApp; beforeEach(function() { - defaultApp = firebase.initializeApp({databaseURL: TEST_PROJECT.databaseURL}); + defaultApp = firebase.initializeApp({ + databaseURL: TEST_PROJECT.databaseURL + }); patchFakeAuthFunctions(defaultApp); }); @@ -95,7 +94,9 @@ describe('Database Tests', function() { it('refFromURL() validates domain', function() { const db = firebase.database(); expect(function() { - const ref = db.refFromURL('https://thisisnotarealfirebase.firebaseio.com/path/to/data'); + const ref = db.refFromURL( + 'https://thisisnotarealfirebase.firebaseio.com/path/to/data' + ); }).to.throw(/does not match.*database/i); }); diff --git a/tests/database/datasnapshot.test.ts b/tests/database/datasnapshot.test.ts index 13165c82c78..eeb3f24185e 100644 --- a/tests/database/datasnapshot.test.ts +++ b/tests/database/datasnapshot.test.ts @@ -14,36 +14,36 @@ * limitations under the License. */ -import { expect } from "chai"; -import { nodeFromJSON } from "../../src/database/core/snap/nodeFromJSON"; -import { PRIORITY_INDEX } from "../../src/database/core/snap/indexes/PriorityIndex"; -import { getRandomNode } from "./helpers/util"; -import { DataSnapshot } from "../../src/database/api/DataSnapshot"; -import { Reference } from "../../src/database/api/Reference"; - -describe("DataSnapshot Tests", function () { +import { expect } from 'chai'; +import { nodeFromJSON } from '../../src/database/core/snap/nodeFromJSON'; +import { PRIORITY_INDEX } from '../../src/database/core/snap/indexes/PriorityIndex'; +import { getRandomNode } from './helpers/util'; +import { DataSnapshot } from '../../src/database/api/DataSnapshot'; +import { Reference } from '../../src/database/api/Reference'; + +describe('DataSnapshot Tests', function() { /** @return {!DataSnapshot} */ const snapshotForJSON = function(json) { const dummyRef = getRandomNode(); return new DataSnapshot(nodeFromJSON(json), dummyRef, PRIORITY_INDEX); }; - it("DataSnapshot.hasChildren() works.", function() { + it('DataSnapshot.hasChildren() works.', function() { let snap = snapshotForJSON({}); expect(snap.hasChildren()).to.equal(false); snap = snapshotForJSON(5); expect(snap.hasChildren()).to.equal(false); - snap = snapshotForJSON({'x': 5}); + snap = snapshotForJSON({ x: 5 }); expect(snap.hasChildren()).to.equal(true); }); - it("DataSnapshot.exists() works.", function() { + it('DataSnapshot.exists() works.', function() { let snap = snapshotForJSON({}); expect(snap.exists()).to.equal(false); - snap = snapshotForJSON({ '.priority':1 }); + snap = snapshotForJSON({ '.priority': 1 }); expect(snap.exists()).to.equal(false); snap = snapshotForJSON(null); @@ -55,19 +55,18 @@ describe("DataSnapshot Tests", function () { snap = snapshotForJSON(5); expect(snap.exists()).to.equal(true); - snap = snapshotForJSON({'x': 5}); + snap = snapshotForJSON({ x: 5 }); expect(snap.exists()).to.equal(true); }); - it("DataSnapshot.val() works.", function() { + it('DataSnapshot.val() works.', function() { let snap = snapshotForJSON(5); expect(snap.val()).to.equal(5); - snap = snapshotForJSON({ }); + snap = snapshotForJSON({}); expect(snap.val()).to.equal(null); - const json = - { + const json = { x: 5, y: { ya: 1, @@ -79,19 +78,19 @@ describe("DataSnapshot Tests", function () { expect(snap.val()).to.deep.equal(json); }); - it("DataSnapshot.child() works.", function() { - const snap = snapshotForJSON({x: 5, y: { yy: 3, yz: 4}}); + it('DataSnapshot.child() works.', function() { + const snap = snapshotForJSON({ x: 5, y: { yy: 3, yz: 4 } }); expect(snap.child('x').val()).to.equal(5); - expect(snap.child('y').val()).to.deep.equal({yy: 3, yz: 4}); + expect(snap.child('y').val()).to.deep.equal({ yy: 3, yz: 4 }); expect(snap.child('y').child('yy').val()).to.equal(3); expect(snap.child('y/yz').val()).to.equal(4); expect(snap.child('z').val()).to.equal(null); expect(snap.child('x/y').val()).to.equal(null); - expect(snap.child('x').child('y').val()).to.equal(null) + expect(snap.child('x').child('y').val()).to.equal(null); }); - it("DataSnapshot.hasChild() works.", function() { - const snap = snapshotForJSON({x: 5, y: { yy: 3, yz: 4}}); + it('DataSnapshot.hasChild() works.', function() { + const snap = snapshotForJSON({ x: 5, y: { yy: 3, yz: 4 } }); expect(snap.hasChild('x')).to.equal(true); expect(snap.hasChild('y/yy')).to.equal(true); expect(snap.hasChild('dinosaur')).to.equal(false); @@ -99,8 +98,8 @@ describe("DataSnapshot Tests", function () { expect(snap.hasChild('x/anything/at/all')).to.equal(false); }); - it("DataSnapshot.key works.", function() { - const snap = snapshotForJSON({a: { b: { c: 5 }}}); + it('DataSnapshot.key works.', function() { + const snap = snapshotForJSON({ a: { b: { c: 5 } } }); expect(snap.child('a').key).to.equal('a'); expect(snap.child('a/b/c').key).to.equal('c'); expect(snap.child('/a/b/c/').key).to.equal('c'); @@ -111,8 +110,16 @@ describe("DataSnapshot Tests", function () { expect(snap.child('/z/q/r/v/m').key).to.equal('m'); }); - it("DataSnapshot.forEach() works: no priorities.", function() { - const snap = snapshotForJSON({a: 1, z: 26, m: 13, n: 14, c: 3, b: 2, e: 5}); + it('DataSnapshot.forEach() works: no priorities.', function() { + const snap = snapshotForJSON({ + a: 1, + z: 26, + m: 13, + n: 14, + c: 3, + b: 2, + e: 5 + }); let out = ''; snap.forEach(function(child) { out = out + child.key + ':' + child.val() + ':'; @@ -121,15 +128,16 @@ describe("DataSnapshot Tests", function () { expect(out).to.equal('a:1:b:2:c:3:e:5:m:13:n:14:z:26:'); }); - it("DataSnapshot.forEach() works: numeric priorities.", function() { + it('DataSnapshot.forEach() works: numeric priorities.', function() { const snap = snapshotForJSON({ - a: {'.value': 1, '.priority': 26}, - z: {'.value': 26, '.priority': 1}, - m: {'.value': 13, '.priority': 14}, - n: {'.value': 14, '.priority': 12}, - c: {'.value': 3, '.priority': 24}, - b: {'.value': 2, '.priority': 25}, - e: {'.value': 5, '.priority': 22}}); + a: { '.value': 1, '.priority': 26 }, + z: { '.value': 26, '.priority': 1 }, + m: { '.value': 13, '.priority': 14 }, + n: { '.value': 14, '.priority': 12 }, + c: { '.value': 3, '.priority': 24 }, + b: { '.value': 2, '.priority': 25 }, + e: { '.value': 5, '.priority': 22 } + }); let out = ''; snap.forEach(function(child) { @@ -139,15 +147,16 @@ describe("DataSnapshot Tests", function () { expect(out).to.equal('z:26:n:14:m:13:e:5:c:3:b:2:a:1:'); }); - it("DataSnapshot.forEach() works: numeric priorities as strings.", function() { + it('DataSnapshot.forEach() works: numeric priorities as strings.', function() { const snap = snapshotForJSON({ - a: {'.value': 1, '.priority': '26'}, - z: {'.value': 26, '.priority': '1'}, - m: {'.value': 13, '.priority': '14'}, - n: {'.value': 14, '.priority': '12'}, - c: {'.value': 3, '.priority': '24'}, - b: {'.value': 2, '.priority': '25'}, - e: {'.value': 5, '.priority': '22'}}); + a: { '.value': 1, '.priority': '26' }, + z: { '.value': 26, '.priority': '1' }, + m: { '.value': 13, '.priority': '14' }, + n: { '.value': 14, '.priority': '12' }, + c: { '.value': 3, '.priority': '24' }, + b: { '.value': 2, '.priority': '25' }, + e: { '.value': 5, '.priority': '22' } + }); let out = ''; snap.forEach(function(child) { @@ -157,15 +166,16 @@ describe("DataSnapshot Tests", function () { expect(out).to.equal('z:26:n:14:m:13:e:5:c:3:b:2:a:1:'); }); - it("DataSnapshot.forEach() works: alpha priorities.", function() { + it('DataSnapshot.forEach() works: alpha priorities.', function() { const snap = snapshotForJSON({ - a: {'.value': 1, '.priority': 'first'}, - z: {'.value': 26, '.priority': 'second'}, - m: {'.value': 13, '.priority': 'third'}, - n: {'.value': 14, '.priority': 'fourth'}, - c: {'.value': 3, '.priority': 'fifth'}, - b: {'.value': 2, '.priority': 'sixth'}, - e: {'.value': 5, '.priority': 'seventh'}}); + a: { '.value': 1, '.priority': 'first' }, + z: { '.value': 26, '.priority': 'second' }, + m: { '.value': 13, '.priority': 'third' }, + n: { '.value': 14, '.priority': 'fourth' }, + c: { '.value': 3, '.priority': 'fifth' }, + b: { '.value': 2, '.priority': 'sixth' }, + e: { '.value': 5, '.priority': 'seventh' } + }); let out = ''; snap.forEach(function(child) { @@ -175,27 +185,28 @@ describe("DataSnapshot Tests", function () { expect(out).to.equal('c:3:a:1:n:14:z:26:e:5:b:2:m:13:'); }); - it("DataSnapshot.foreach() works: mixed alpha and numeric priorities", function() { + it('DataSnapshot.foreach() works: mixed alpha and numeric priorities', function() { const json = { - "alpha42": {'.value': 1, '.priority': "zed" }, - "noPriorityC": {'.value': 1, '.priority': null }, - "num41": {'.value': 1, '.priority': 500 }, - "noPriorityB": {'.value': 1, '.priority': null }, - "num80": {'.value': 1, '.priority': 4000.1 }, - "num50": {'.value': 1, '.priority': 4000 }, - "num10": {'.value': 1, '.priority': 24 }, - "alpha41": {'.value': 1, '.priority': "zed" }, - "alpha20": {'.value': 1, '.priority': "horse" }, - "num20": {'.value': 1, '.priority': 123 }, - "num70": {'.value': 1, '.priority': 4000.01 }, - "noPriorityA": {'.value': 1, '.priority': null }, - "alpha30": {'.value': 1, '.priority': "tree" }, - "num30": {'.value': 1, '.priority': 300 }, - "num60": {'.value': 1, '.priority': 4000.001 }, - "alpha10": {'.value': 1, '.priority': "0horse" }, - "num42": {'.value': 1, '.priority': 500 }, - "alpha40": {'.value': 1, '.priority': "zed" }, - "num40": {'.value': 1, '.priority': 500 } }; + alpha42: { '.value': 1, '.priority': 'zed' }, + noPriorityC: { '.value': 1, '.priority': null }, + num41: { '.value': 1, '.priority': 500 }, + noPriorityB: { '.value': 1, '.priority': null }, + num80: { '.value': 1, '.priority': 4000.1 }, + num50: { '.value': 1, '.priority': 4000 }, + num10: { '.value': 1, '.priority': 24 }, + alpha41: { '.value': 1, '.priority': 'zed' }, + alpha20: { '.value': 1, '.priority': 'horse' }, + num20: { '.value': 1, '.priority': 123 }, + num70: { '.value': 1, '.priority': 4000.01 }, + noPriorityA: { '.value': 1, '.priority': null }, + alpha30: { '.value': 1, '.priority': 'tree' }, + num30: { '.value': 1, '.priority': 300 }, + num60: { '.value': 1, '.priority': 4000.001 }, + alpha10: { '.value': 1, '.priority': '0horse' }, + num42: { '.value': 1, '.priority': 500 }, + alpha40: { '.value': 1, '.priority': 'zed' }, + num40: { '.value': 1, '.priority': 500 } + }; const snap = snapshotForJSON(json); let out = ''; @@ -203,10 +214,12 @@ describe("DataSnapshot Tests", function () { out = out + child.key + ', '; }); - expect(out).to.equal("noPriorityA, noPriorityB, noPriorityC, num10, num20, num30, num40, num41, num42, num50, num60, num70, num80, alpha10, alpha20, alpha30, alpha40, alpha41, alpha42, "); + expect(out).to.equal( + 'noPriorityA, noPriorityB, noPriorityC, num10, num20, num30, num40, num41, num42, num50, num60, num70, num80, alpha10, alpha20, alpha30, alpha40, alpha41, alpha42, ' + ); }); - it(".val() exports array-like data as arrays.", function() { + it('.val() exports array-like data as arrays.', function() { const array = ['bob', 'and', 'becky', 'seem', 'really', 'nice', 'yeah?']; const snap = snapshotForJSON(array); const snapVal = snap.val(); @@ -214,10 +227,10 @@ describe("DataSnapshot Tests", function () { expect(snapVal instanceof Array).to.equal(true); // to.equal doesn't verify type. }); - it("DataSnapshot can be JSON serialized", function() { + it('DataSnapshot can be JSON serialized', function() { const json = { - "foo": "bar", - ".priority": 1 + foo: 'bar', + '.priority': 1 }; const snap = snapshotForJSON(json); expect(JSON.parse(JSON.stringify(snap))).to.deep.equal(json); diff --git a/tests/database/helpers/EventAccumulator.ts b/tests/database/helpers/EventAccumulator.ts index cabb035cb0b..1b0f4a7b314 100644 --- a/tests/database/helpers/EventAccumulator.ts +++ b/tests/database/helpers/EventAccumulator.ts @@ -19,8 +19,12 @@ export const EventAccumulatorFactory = { let count = 0; const condition = () => ea.eventData.length >= count; const ea = new EventAccumulator(condition); - ea.onReset(() => { count = 0; }); - ea.onEvent(() => { count++; }); + ea.onReset(() => { + count = 0; + }); + ea.onEvent(() => { + count++; + }); return ea; } }; @@ -39,10 +43,7 @@ export class EventAccumulator { }); } addEvent(eventData?: any) { - this.eventData = [ - ...this.eventData, - eventData - ]; + this.eventData = [...this.eventData, eventData]; if (typeof this.onEventFxn === 'function') this.onEventFxn(); if (this._testCondition()) { this.resolve(this.eventData); @@ -66,4 +67,4 @@ export class EventAccumulator { _testCondition() { return this.condition(); } -} \ No newline at end of file +} diff --git a/tests/database/helpers/events.ts b/tests/database/helpers/events.ts index fa4f9d07909..7229e0af438 100644 --- a/tests/database/helpers/events.ts +++ b/tests/database/helpers/events.ts @@ -14,7 +14,7 @@ * limitations under the License. */ -import { TEST_PROJECT } from "./util"; +import { TEST_PROJECT } from './util'; import { Reference } from '../../../src/database/api/Reference'; /** @@ -23,7 +23,6 @@ import { Reference } from '../../../src/database/api/Reference'; */ export let eventCleanupHandlers = []; - /** Clean up outstanding event handlers */ export function eventCleanup() { for (let i = 0; i < eventCleanupHandlers.length; ++i) { @@ -91,7 +90,7 @@ export function eventTestHelper(pathAndEvents, helperName?) { if (waiter()) { resolve(); } - } catch(e) {} + } catch (e) {} }; }; @@ -101,7 +100,15 @@ export function eventTestHelper(pathAndEvents, helperName?) { // keep waiting. const waiter = function() { const pathAndEventToString = function(pathAndEvent) { - return '{path: ' + pathAndEvent[0] + ', event:[' + pathAndEvent[1][0] + ', ' + pathAndEvent[1][1] + ']}'; + return ( + '{path: ' + + pathAndEvent[0] + + ', event:[' + + pathAndEvent[1][0] + + ', ' + + pathAndEvent[1][1] + + ']}' + ); }; let i = 0; @@ -109,15 +116,27 @@ export function eventTestHelper(pathAndEvents, helperName?) { const expected = expectedPathAndEvents[i]; const actual = actualPathAndEvents[i]; - if (expected[0] != actual[0] || expected[1][0] != actual[1][0] || expected[1][1] != actual[1][1]) { - throw helperName + 'Event ' + i + ' incorrect. Expected: ' + pathAndEventToString(expected) + - ' Actual: ' + pathAndEventToString(actual); + if ( + expected[0] != actual[0] || + expected[1][0] != actual[1][0] || + expected[1][1] != actual[1][1] + ) { + throw helperName + + 'Event ' + + i + + ' incorrect. Expected: ' + + pathAndEventToString(expected) + + ' Actual: ' + + pathAndEventToString(actual); } i++; } if (expectedPathAndEvents.length < actualPathAndEvents.length) { - throw helperName + "Extra event detected '" + pathAndEventToString(actualPathAndEvents[i]) + "'."; + throw helperName + + "Extra event detected '" + + pathAndEventToString(actualPathAndEvents[i]) + + "'."; } // If we haven't thrown and both arrays are the same length, then we're @@ -142,14 +161,12 @@ export function eventTestHelper(pathAndEvents, helperName?) { path.off('child_moved', movedCB); path.off('child_changed', changedCB); path.off('value', valueCB); - } + }; }; - const addExpectedEvents = function(pathAndEvents) { const pathsToListenOn = []; for (let i = 0; i < pathAndEvents.length; i++) { - const pathAndEvent = pathAndEvents[i]; const path = pathAndEvent[0]; @@ -159,8 +176,7 @@ export function eventTestHelper(pathAndEvents, helperName?) { pathAndEvent[0] = rawPath(path); - if (pathAndEvent[1][0] === 'value') - pathAndEvent[1][1] = path.key; + if (pathAndEvent[1][0] === 'value') pathAndEvent[1][1] = path.key; expectedPathAndEvents.push(pathAndEvent); } @@ -177,11 +193,13 @@ export function eventTestHelper(pathAndEvents, helperName?) { // Notice the 3rd and 4th events are swapped. // To mitigate this, we re-ordeer your event registrations and do them in order of shortest path to longest. - pathsToListenOn.sort(function(a, b) { return a.toString().length - b.toString().length; }); + pathsToListenOn.sort(function(a, b) { + return a.toString().length - b.toString().length; + }); for (let i = 0; i < pathsToListenOn.length; i++) { let path = pathsToListenOn[i]; if (!pathEventListeners[path.toString()]) { - pathEventListeners[path.toString()] = { }; + pathEventListeners[path.toString()] = {}; pathEventListeners[path.toString()].initialized = false; pathEventListeners[path.toString()].unlisten = listenOnPath(path); } @@ -197,12 +215,14 @@ export function eventTestHelper(pathAndEvents, helperName?) { const watchesInitializedWaiter = function() { for (let path in pathEventListeners) { - if (!pathEventListeners[path].initialized) - return false; + if (!pathEventListeners[path].initialized) return false; } // Remove any initialization events. - actualPathAndEvents.splice(actualPathAndEvents.length - initializationEvents, initializationEvents); + actualPathAndEvents.splice( + actualPathAndEvents.length - initializationEvents, + initializationEvents + ); initializationEvents = 0; resolveInit(); @@ -229,4 +249,4 @@ export function eventTestHelper(pathAndEvents, helperName?) { addExpectedEvents(moreEvents); } }; -} \ No newline at end of file +} diff --git a/tests/database/helpers/util.ts b/tests/database/helpers/util.ts index fa2f2c08e18..5875b8070ba 100644 --- a/tests/database/helpers/util.ts +++ b/tests/database/helpers/util.ts @@ -14,13 +14,12 @@ * limitations under the License. */ -import { globalScope } from "../../../src/utils/globalScope"; -import firebase from "../../../src/app"; +import { globalScope } from '../../../src/utils/globalScope'; +import firebase from '../../../src/app'; import '../../../src/database'; -import { Reference } from "../../../src/database/api/Reference"; -import { Query } from "../../../src/database/api/Query"; -import { ConnectionTarget } from "../../../src/database/api/test_access"; - +import { Reference } from '../../../src/database/api/Reference'; +import { Query } from '../../../src/database/api/Query'; +import { ConnectionTarget } from '../../../src/database/api/test_access'; export const TEST_PROJECT = require('../../config/project.json'); @@ -29,7 +28,7 @@ if ('location' in this) { const search = (this.location.search.substr(1) || '').split('&'); for (let i = 0; i < search.length; ++i) { const parts = search[i].split('='); - qs[parts[0]] = parts[1] || true; // support for foo= + qs[parts[0]] = parts[1] || true; // support for foo= } } @@ -49,11 +48,9 @@ export function patchFakeAuthFunctions(app) { return Promise.resolve(token_); }; - app['INTERNAL']['addAuthTokenListener'] = function(listener) { - }; + app['INTERNAL']['addAuthTokenListener'] = function(listener) {}; - app['INTERNAL']['removeAuthTokenListener'] = function(listener) { - }; + app['INTERNAL']['removeAuthTokenListener'] = function(listener) {}; return app; } @@ -72,9 +69,12 @@ export function getRootNode(i = 0, ref?: string) { let app; let db; try { - app = firebase.app("TEST-" + i); - } catch(e) { - app = firebase.initializeApp({ databaseURL: TEST_PROJECT.databaseURL }, "TEST-" + i); + app = firebase.app('TEST-' + i); + } catch (e) { + app = firebase.initializeApp( + { databaseURL: TEST_PROJECT.databaseURL }, + 'TEST-' + i + ); patchFakeAuthFunctions(app); } db = app.database(); @@ -121,7 +121,7 @@ export function getPath(query: Query) { } export function shuffle(arr, randFn = Math.random) { - for (let i = arr.length - 1;i > 0;i--) { + for (let i = arr.length - 1; i > 0; i--) { const j = Math.floor(randFn() * (i + 1)); const tmp = arr[i]; arr[i] = arr[j]; @@ -133,7 +133,7 @@ export function testAuthTokenProvider(app) { let token_ = null; let nextToken_ = null; let hasNextToken_ = false; - const listeners_ = []; + const listeners_ = []; app['INTERNAL'] = app['INTERNAL'] || {}; @@ -142,7 +142,7 @@ export function testAuthTokenProvider(app) { token_ = nextToken_; hasNextToken_ = false; } - return Promise.resolve({accessToken: token_}); + return Promise.resolve({ accessToken: token_ }); }; app['INTERNAL']['addAuthTokenListener'] = function(listener) { @@ -150,7 +150,7 @@ export function testAuthTokenProvider(app) { listeners_.push(listener); const async = Promise.resolve(); async.then(function() { - listener(token) + listener(token); }); }; @@ -163,11 +163,13 @@ export function testAuthTokenProvider(app) { token_ = token; const async = Promise.resolve(); for (let i = 0; i < listeners_.length; i++) { - async.then((function(idx) { - return function() { - listeners_[idx](token); - } - }(i))); + async.then( + (function(idx) { + return function() { + listeners_[idx](token); + }; + })(i) + ); } // Any future thens are guaranteed to be resolved after the listeners have been notified @@ -184,7 +186,10 @@ let freshRepoId = 1; const activeFreshApps = []; export function getFreshRepo(url, path?) { - const app = firebase.initializeApp({databaseURL: url}, 'ISOLATED_REPO_' + freshRepoId++); + const app = firebase.initializeApp( + { databaseURL: url }, + 'ISOLATED_REPO_' + freshRepoId++ + ); patchFakeAuthFunctions(app); activeFreshApps.push(app); return app.database().ref(path); @@ -199,7 +204,9 @@ export function getFreshRepoFromReference(ref) { // Little helpers to get the currently cached snapshot / value. export function getSnap(path) { let snap; - const callback = function(snapshot) { snap = snapshot; }; + const callback = function(snapshot) { + snap = snapshot; + }; path.once('value', callback); return snap; } @@ -229,6 +236,6 @@ export function testRepoInfo(url) { const regex = /https?:\/\/(.*).firebaseio.com/; const match = url.match(regex); if (!match) throw new Error('Couldnt get Namespace from passed URL'); - const [,ns] = match; + const [, ns] = match; return new ConnectionTarget(`${ns}.firebaseio.com`, true, ns, false); } diff --git a/tests/database/info.test.ts b/tests/database/info.test.ts index cd807d02970..5b857134cca 100644 --- a/tests/database/info.test.ts +++ b/tests/database/info.test.ts @@ -14,15 +14,15 @@ * limitations under the License. */ -import { expect } from "chai"; -import { +import { expect } from 'chai'; +import { getFreshRepo, getRootNode, getRandomNode, getPath -} from "./helpers/util"; -import { Reference } from "../../src/database/api/Reference"; -import { EventAccumulator } from "./helpers/EventAccumulator"; +} from './helpers/util'; +import { Reference } from '../../src/database/api/Reference'; +import { EventAccumulator } from './helpers/EventAccumulator'; /** * We have a test that depends on leveraging two properly @@ -35,38 +35,53 @@ declare const waitsFor; declare const TEST_ALT_NAMESPACE; declare const TEST_NAMESPACE; -describe(".info Tests", function () { +describe('.info Tests', function() { this.timeout(3000); - it("Can get a reference to .info nodes.", function() { - const f = (getRootNode() as Reference); + it('Can get a reference to .info nodes.', function() { + const f = getRootNode() as Reference; expect(getPath(f.child('.info'))).to.equal('/.info'); expect(getPath(f.child('.info/foo'))).to.equal('/.info/foo'); }); it("Can't write to .info", function() { const f = (getRootNode() as Reference).child('.info'); - expect(function() {f.set('hi');}).to.throw; - expect(function() {f.setWithPriority('hi', 5);}).to.throw; - expect(function() {f.setPriority('hi');}).to.throw; - expect(function() {f.transaction(function() { });}).to.throw; - expect(function() {f.push();}).to.throw; - expect(function() {f.remove();}).to.throw; - - expect(function() {f.child('test').set('hi');}).to.throw; + expect(function() { + f.set('hi'); + }).to.throw; + expect(function() { + f.setWithPriority('hi', 5); + }).to.throw; + expect(function() { + f.setPriority('hi'); + }).to.throw; + expect(function() { + f.transaction(function() {}); + }).to.throw; + expect(function() { + f.push(); + }).to.throw; + expect(function() { + f.remove(); + }).to.throw; + + expect(function() { + f.child('test').set('hi'); + }).to.throw; const f2 = f.child('foo/baz'); - expect(function() {f2.set('hi');}).to.throw; + expect(function() { + f2.set('hi'); + }).to.throw; }); - it("Can watch .info/connected.", function() { + it('Can watch .info/connected.', function() { return new Promise(resolve => { const f = (getRandomNode() as Reference).root; f.child('.info/connected').on('value', function(snap) { if (snap.val() === true) resolve(); }); - }) + }); }); - it('.info/connected correctly goes to false when disconnected.', async function() { const f = (getRandomNode() as Reference).root; let everConnected = false; @@ -74,11 +89,9 @@ describe(".info Tests", function () { const ea = new EventAccumulator(() => everConnected); f.child('.info/connected').on('value', function(snap) { - if (snap.val() === true) - everConnected = true; + if (snap.val() === true) everConnected = true; - if (everConnected) - connectHistory += snap.val() + ','; + if (everConnected) connectHistory += snap.val() + ','; ea.addEvent(); }); @@ -93,8 +106,8 @@ describe(".info Tests", function () { // Skipping this test as it is expecting a server time diff from a // local Firebase - it.skip(".info/serverTimeOffset", async function() { - const ref = (getRootNode() as Reference); + it.skip('.info/serverTimeOffset', async function() { + const ref = getRootNode() as Reference; // make sure push works const child = ref.push(); @@ -118,77 +131,86 @@ describe(".info Tests", function () { ref.child('.info/serverTimeOffset').off(); }); - it.skip("database.goOffline() / database.goOnline() connection management", function() { - const ref = getFreshRepo(TEST_NAMESPACE); - const refAlt = getFreshRepo(TEST_ALT_NAMESPACE); - let ready; - - // Wait until we're connected to both Firebases - runs(function() { - ready = 0; - const eventHandler = function(snap) { - if (snap.val() === true) { - snap.ref.off(); - ready += 1; - } - }; - ref.child(".info/connected").on("value", eventHandler); - refAlt.child(".info/connected").on("value", eventHandler); - }); - waitsFor(function() { return (ready == 2); }); + it.skip( + 'database.goOffline() / database.goOnline() connection management', + function() { + const ref = getFreshRepo(TEST_NAMESPACE); + const refAlt = getFreshRepo(TEST_ALT_NAMESPACE); + let ready; + + // Wait until we're connected to both Firebases + runs(function() { + ready = 0; + const eventHandler = function(snap) { + if (snap.val() === true) { + snap.ref.off(); + ready += 1; + } + }; + ref.child('.info/connected').on('value', eventHandler); + refAlt.child('.info/connected').on('value', eventHandler); + }); + waitsFor(function() { + return ready == 2; + }); - runs(function() { - ref.database.goOffline(); - refAlt.database.goOffline(); - }); + runs(function() { + ref.database.goOffline(); + refAlt.database.goOffline(); + }); - // Ensure we're disconnected from both Firebases - runs(function() { - ready = 0; - const eventHandler = function(snap) { - expect(snap.val() === false); - ready += 1; - }; - ref.child(".info/connected").once("value", eventHandler); - refAlt.child(".info/connected").once("value", eventHandler); - }); - waitsFor(function() { return (ready == 2); }); - - // Ensure that we don't automatically reconnect upon Reference creation - runs(function() { - ready = 0; - const refDup = ref.database.ref(); - refDup.child(".info/connected").on("value", function(snap) { - ready = (snap.val() === true) || ready; + // Ensure we're disconnected from both Firebases + runs(function() { + ready = 0; + const eventHandler = function(snap) { + expect(snap.val() === false); + ready += 1; + }; + ref.child('.info/connected').once('value', eventHandler); + refAlt.child('.info/connected').once('value', eventHandler); + }); + waitsFor(function() { + return ready == 2; }); - setTimeout(function() { - expect(ready).to.equal(0); - refDup.child(".info/connected").off(); - ready = -1; - }, 500); - }); - waitsFor(function() { return ready == -1; }); - runs(function() { - ref.database.goOnline(); - refAlt.database.goOnline(); - }); + // Ensure that we don't automatically reconnect upon Reference creation + runs(function() { + ready = 0; + const refDup = ref.database.ref(); + refDup.child('.info/connected').on('value', function(snap) { + ready = snap.val() === true || ready; + }); + setTimeout(function() { + expect(ready).to.equal(0); + refDup.child('.info/connected').off(); + ready = -1; + }, 500); + }); + waitsFor(function() { + return ready == -1; + }); - // Ensure we're connected to both Firebases - runs(function() { - ready = 0; - const eventHandler = function(snap) { - if (snap.val() === true) { - snap.ref.off(); - ready += 1; - } - }; - ref.child(".info/connected").on("value", eventHandler); - refAlt.child(".info/connected").on("value", eventHandler); - }); + runs(function() { + ref.database.goOnline(); + refAlt.database.goOnline(); + }); - waitsFor(function() { - return (ready == 2); - }); - }); + // Ensure we're connected to both Firebases + runs(function() { + ready = 0; + const eventHandler = function(snap) { + if (snap.val() === true) { + snap.ref.off(); + ready += 1; + } + }; + ref.child('.info/connected').on('value', eventHandler); + refAlt.child('.info/connected').on('value', eventHandler); + }); + + waitsFor(function() { + return ready == 2; + }); + } + ); }); diff --git a/tests/database/node.test.ts b/tests/database/node.test.ts index 480993d3edd..8612c2835bf 100644 --- a/tests/database/node.test.ts +++ b/tests/database/node.test.ts @@ -14,15 +14,15 @@ * limitations under the License. */ -import { expect } from "chai"; -import { PRIORITY_INDEX } from "../../src/database/core/snap/indexes/PriorityIndex"; -import { LeafNode } from "../../src/database/core/snap/LeafNode"; -import { IndexMap } from "../../src/database/core/snap/IndexMap"; -import { Path } from "../../src/database/core/util/Path"; -import { SortedMap } from "../../src/database/core/util/SortedMap"; -import { ChildrenNode } from "../../src/database/core/snap/ChildrenNode"; -import { NAME_COMPARATOR } from "../../src/database/core/snap/comparators"; -import { nodeFromJSON } from "../../src/database/core/snap/nodeFromJSON"; +import { expect } from 'chai'; +import { PRIORITY_INDEX } from '../../src/database/core/snap/indexes/PriorityIndex'; +import { LeafNode } from '../../src/database/core/snap/LeafNode'; +import { IndexMap } from '../../src/database/core/snap/IndexMap'; +import { Path } from '../../src/database/core/util/Path'; +import { SortedMap } from '../../src/database/core/util/SortedMap'; +import { ChildrenNode } from '../../src/database/core/snap/ChildrenNode'; +import { NAME_COMPARATOR } from '../../src/database/core/snap/comparators'; +import { nodeFromJSON } from '../../src/database/core/snap/nodeFromJSON'; import { Node } from '../../src/database/core/snap/Node'; describe('Node Tests', function() { @@ -40,40 +40,45 @@ describe('Node Tests', function() { expect(x.getValue()).to.equal(true); }); - it("LeafNode.updatePriority returns a new leaf node without changing the old.", function() { - const x = new LeafNode("test", new LeafNode(42)); + it('LeafNode.updatePriority returns a new leaf node without changing the old.', function() { + const x = new LeafNode('test', new LeafNode(42)); const y = x.updatePriority(new LeafNode(187)); // old node is the same. - expect(x.getValue()).to.equal("test"); + expect(x.getValue()).to.equal('test'); expect(x.getPriority().val()).to.equal(42); // new node has the new priority but the old value. - expect((y as any).getValue()).to.equal("test"); + expect((y as any).getValue()).to.equal('test'); expect(y.getPriority().val()).to.equal(187); }); - it("LeafNode.updateImmediateChild returns a new children node.", function() { - const x = new LeafNode("test", new LeafNode(42)); - const y = x.updateImmediateChild('test', new LeafNode("foo")); + it('LeafNode.updateImmediateChild returns a new children node.', function() { + const x = new LeafNode('test', new LeafNode(42)); + const y = x.updateImmediateChild('test', new LeafNode('foo')); expect(y.isLeafNode()).to.equal(false); expect(y.getPriority().val()).to.equal(42); - expect((y.getImmediateChild('test') as LeafNode).getValue()).to.equal('foo'); + expect((y.getImmediateChild('test') as LeafNode).getValue()).to.equal( + 'foo' + ); }); - it("LeafNode.getImmediateChild returns an empty node.", function() { - const x = new LeafNode("test"); + it('LeafNode.getImmediateChild returns an empty node.', function() { + const x = new LeafNode('test'); expect(x.getImmediateChild('foo')).to.equal(ChildrenNode.EMPTY_NODE); }); - it("LeafNode.getChild returns an empty node.", function() { + it('LeafNode.getChild returns an empty node.', function() { const x = new LeafNode('test'); expect(x.getChild(new Path('foo/bar'))).to.equal(ChildrenNode.EMPTY_NODE); }); it('ChildrenNode.updatePriority returns a new internal node without changing the old.', function() { - const x = ChildrenNode.EMPTY_NODE.updateImmediateChild("child", new LeafNode(5)); + const x = ChildrenNode.EMPTY_NODE.updateImmediateChild( + 'child', + new LeafNode(5) + ); const children = (x as any).children_; const y = x.updatePriority(new LeafNode(17)); expect((y as any).children_).to.equal((x as any).children_); @@ -82,179 +87,261 @@ describe('Node Tests', function() { expect(y.getPriority().val()).to.equal(17); }); - it('ChildrenNode.updateImmediateChild returns a new internal node with the new child, without changing the old.', - function() { + it('ChildrenNode.updateImmediateChild returns a new internal node with the new child, without changing the old.', function() { const children = new SortedMap(NAME_COMPARATOR); - const x = new ChildrenNode(children, ChildrenNode.EMPTY_NODE, IndexMap.Default); + const x = new ChildrenNode( + children, + ChildrenNode.EMPTY_NODE, + IndexMap.Default + ); const newValue = new LeafNode('new value'); const y = x.updateImmediateChild('test', newValue); expect((x as any).children_).to.equal(children); expect((y as any).children_.get('test')).to.equal(newValue); }); - it("ChildrenNode.updateChild returns a new internal node with the new child, without changing the old.", function() { + it('ChildrenNode.updateChild returns a new internal node with the new child, without changing the old.', function() { const children = new SortedMap(NAME_COMPARATOR); - const x = new ChildrenNode(children, ChildrenNode.EMPTY_NODE, IndexMap.Default); - const newValue = new LeafNode("new value"); + const x = new ChildrenNode( + children, + ChildrenNode.EMPTY_NODE, + IndexMap.Default + ); + const newValue = new LeafNode('new value'); const y = x.updateChild(new Path('test/foo'), newValue); expect((x as any).children_).to.equal(children); expect(y.getChild(new Path('test/foo'))).to.equal(newValue); }); - it("Node.hash() works correctly.", function() { + it('Node.hash() works correctly.', function() { const node = nodeFromJSON({ - intNode:4, - doubleNode:4.5623, - stringNode:"hey guys", - boolNode:true + intNode: 4, + doubleNode: 4.5623, + stringNode: 'hey guys', + boolNode: true }); // !!!NOTE!!! These hashes must match what the server generates. If you change anything so these hashes change, // make sure you change the corresponding server code. - expect(node.getImmediateChild("intNode").hash()).to.equal("eVih19a6ZDz3NL32uVBtg9KSgQY="); - expect(node.getImmediateChild("doubleNode").hash()).to.equal("vf1CL0tIRwXXunHcG/irRECk3lY="); - expect(node.getImmediateChild("stringNode").hash()).to.equal("CUNLXWpCVoJE6z7z1vE57lGaKAU="); - expect(node.getImmediateChild("boolNode").hash()).to.equal("E5z61QM0lN/U2WsOnusszCTkR8M="); - - expect(node.hash()).to.equal("6Mc4jFmNdrLVIlJJjz2/MakTK9I="); + expect(node.getImmediateChild('intNode').hash()).to.equal( + 'eVih19a6ZDz3NL32uVBtg9KSgQY=' + ); + expect(node.getImmediateChild('doubleNode').hash()).to.equal( + 'vf1CL0tIRwXXunHcG/irRECk3lY=' + ); + expect(node.getImmediateChild('stringNode').hash()).to.equal( + 'CUNLXWpCVoJE6z7z1vE57lGaKAU=' + ); + expect(node.getImmediateChild('boolNode').hash()).to.equal( + 'E5z61QM0lN/U2WsOnusszCTkR8M=' + ); + + expect(node.hash()).to.equal('6Mc4jFmNdrLVIlJJjz2/MakTK9I='); }); - it("Node.hash() works correctly with priorities.", function() { + it('Node.hash() works correctly with priorities.', function() { const node = nodeFromJSON({ - root: {c: {'.value': 99, '.priority': 'abc'}, '.priority': 'def'} + root: { c: { '.value': 99, '.priority': 'abc' }, '.priority': 'def' } }); - expect(node.hash()).to.equal("Fm6tzN4CVEu5WxFDZUdTtqbTVaA="); + expect(node.hash()).to.equal('Fm6tzN4CVEu5WxFDZUdTtqbTVaA='); }); - it("Node.hash() works correctly with number priorities.", function() { + it('Node.hash() works correctly with number priorities.', function() { const node = nodeFromJSON({ - root: {c: {'.value': 99, '.priority': 42}, '.priority': 3.14} + root: { c: { '.value': 99, '.priority': 42 }, '.priority': 3.14 } }); - expect(node.hash()).to.equal("B15QCqrzCxrI5zz1y00arWqFRFg="); + expect(node.hash()).to.equal('B15QCqrzCxrI5zz1y00arWqFRFg='); }); - it("Node.hash() stress...", function() { + it('Node.hash() stress...', function() { const node = nodeFromJSON({ - a:-1.7976931348623157e+308, - b:1.7976931348623157e+308, - c:"unicode ✔ 🐵 🌴 x͢", - d:3.14159265358979323846264338327950, + a: -1.7976931348623157e308, + b: 1.7976931348623157e308, + c: 'unicode ✔ 🐵 🌴 x͢', + d: 3.1415926535897932384626433832795, e: { '.value': 12345678901234568, - '.priority': "🐵" + '.priority': '🐵' }, - "✔": "foo", - '.priority':"✔" + '✔': 'foo', + '.priority': '✔' }); - expect(node.getImmediateChild('a').hash()).to.equal('7HxgOBDEC92uQwhCuuvKA2rbXDA='); - expect(node.getImmediateChild('b').hash()).to.equal('8R+ekVQmxs6ZWP0fdzFHxVeGnWo='); - expect(node.getImmediateChild('c').hash()).to.equal('JoKoFUnbmg3/DlY70KaDWslfYPk='); - expect(node.getImmediateChild('d').hash()).to.equal('Y41iC5+92GIqXfabOm33EanRI8s='); - expect(node.getImmediateChild('e').hash()).to.equal('+E+Mxlqh5MhT+On05bjsZ6JaaxI='); - expect(node.getImmediateChild('✔').hash()).to.equal('MRRL/+aA/uibaL//jghUpxXS/uY='); + expect(node.getImmediateChild('a').hash()).to.equal( + '7HxgOBDEC92uQwhCuuvKA2rbXDA=' + ); + expect(node.getImmediateChild('b').hash()).to.equal( + '8R+ekVQmxs6ZWP0fdzFHxVeGnWo=' + ); + expect(node.getImmediateChild('c').hash()).to.equal( + 'JoKoFUnbmg3/DlY70KaDWslfYPk=' + ); + expect(node.getImmediateChild('d').hash()).to.equal( + 'Y41iC5+92GIqXfabOm33EanRI8s=' + ); + expect(node.getImmediateChild('e').hash()).to.equal( + '+E+Mxlqh5MhT+On05bjsZ6JaaxI=' + ); + expect(node.getImmediateChild('✔').hash()).to.equal( + 'MRRL/+aA/uibaL//jghUpxXS/uY=' + ); expect(node.hash()).to.equal('CyC0OU8GSkOAKnsPjheWtWC0Yxo='); }); - it("ChildrenNode.getPredecessorChild works correctly.", function() { + it('ChildrenNode.getPredecessorChild works correctly.', function() { const node = nodeFromJSON({ - d: true, a: true, g: true, c: true, e: true + d: true, + a: true, + g: true, + c: true, + e: true }); // HACK: Pass null instead of the actual childNode, since it's not actually needed. - expect(node.getPredecessorChildName('a', null, DEFAULT_INDEX)).to.equal(null); - expect(node.getPredecessorChildName('c', null, DEFAULT_INDEX)).to.equal('a'); - expect(node.getPredecessorChildName('d', null, DEFAULT_INDEX)).to.equal('c'); - expect(node.getPredecessorChildName('e', null, DEFAULT_INDEX)).to.equal('d'); - expect(node.getPredecessorChildName('g', null, DEFAULT_INDEX)).to.equal('e'); + expect(node.getPredecessorChildName('a', null, DEFAULT_INDEX)).to.equal( + null + ); + expect(node.getPredecessorChildName('c', null, DEFAULT_INDEX)).to.equal( + 'a' + ); + expect(node.getPredecessorChildName('d', null, DEFAULT_INDEX)).to.equal( + 'c' + ); + expect(node.getPredecessorChildName('e', null, DEFAULT_INDEX)).to.equal( + 'd' + ); + expect(node.getPredecessorChildName('g', null, DEFAULT_INDEX)).to.equal( + 'e' + ); }); - it("SortedChildrenNode.getPredecessorChild works correctly.", function() { + it('SortedChildrenNode.getPredecessorChild works correctly.', function() { const node = nodeFromJSON({ - d: { '.value': true, '.priority' : 22 }, - a: { '.value': true, '.priority' : 25 }, - g: { '.value': true, '.priority' : 19 }, - c: { '.value': true, '.priority' : 23 }, - e: { '.value': true, '.priority' : 21 } + d: { '.value': true, '.priority': 22 }, + a: { '.value': true, '.priority': 25 }, + g: { '.value': true, '.priority': 19 }, + c: { '.value': true, '.priority': 23 }, + e: { '.value': true, '.priority': 21 } }); - expect(node.getPredecessorChildName('a', node.getImmediateChild('a'), DEFAULT_INDEX)).to.equal('c'); - expect(node.getPredecessorChildName('c', node.getImmediateChild('c'), DEFAULT_INDEX)).to.equal('d'); - expect(node.getPredecessorChildName('d', node.getImmediateChild('d'), DEFAULT_INDEX)).to.equal('e'); - expect(node.getPredecessorChildName('e', node.getImmediateChild('e'), DEFAULT_INDEX)).to.equal('g'); - expect(node.getPredecessorChildName('g', node.getImmediateChild('g'), DEFAULT_INDEX)).to.equal(null); + expect( + node.getPredecessorChildName( + 'a', + node.getImmediateChild('a'), + DEFAULT_INDEX + ) + ).to.equal('c'); + expect( + node.getPredecessorChildName( + 'c', + node.getImmediateChild('c'), + DEFAULT_INDEX + ) + ).to.equal('d'); + expect( + node.getPredecessorChildName( + 'd', + node.getImmediateChild('d'), + DEFAULT_INDEX + ) + ).to.equal('e'); + expect( + node.getPredecessorChildName( + 'e', + node.getImmediateChild('e'), + DEFAULT_INDEX + ) + ).to.equal('g'); + expect( + node.getPredecessorChildName( + 'g', + node.getImmediateChild('g'), + DEFAULT_INDEX + ) + ).to.equal(null); }); - it("SortedChildrenNode.updateImmediateChild works correctly.", function() { + it('SortedChildrenNode.updateImmediateChild works correctly.', function() { let node = nodeFromJSON({ - d: { '.value': true, '.priority' : 22 }, - a: { '.value': true, '.priority' : 25 }, - g: { '.value': true, '.priority' : 19 }, - c: { '.value': true, '.priority' : 23 }, - e: { '.value': true, '.priority' : 21 }, - '.priority' : 1000 + d: { '.value': true, '.priority': 22 }, + a: { '.value': true, '.priority': 25 }, + g: { '.value': true, '.priority': 19 }, + c: { '.value': true, '.priority': 23 }, + e: { '.value': true, '.priority': 21 }, + '.priority': 1000 }); node = node.updateImmediateChild('c', nodeFromJSON(false)); - expect((node.getImmediateChild('c') as LeafNode).getValue()).to.equal(false); + expect((node.getImmediateChild('c') as LeafNode).getValue()).to.equal( + false + ); expect(node.getImmediateChild('c').getPriority().val()).to.equal(null); expect(node.getPriority().val()).to.equal(1000); }); - it("removing nodes correctly removes intermediate nodes with no remaining children", function() { - const json = {a: {b: {c: 1}}}; + it('removing nodes correctly removes intermediate nodes with no remaining children', function() { + const json = { a: { b: { c: 1 } } }; const node = nodeFromJSON(json); - const newNode = node.updateChild(new Path('a/b/c'), ChildrenNode.EMPTY_NODE); + const newNode = node.updateChild( + new Path('a/b/c'), + ChildrenNode.EMPTY_NODE + ); expect(newNode.isEmpty()).to.equal(true); }); - it("removing nodes leaves intermediate nodes with other children", function() { - const json = {a: {b: {c: 1}, d: 2}}; + it('removing nodes leaves intermediate nodes with other children', function() { + const json = { a: { b: { c: 1 }, d: 2 } }; const node = nodeFromJSON(json); - const newNode = node.updateChild(new Path('a/b/c'), ChildrenNode.EMPTY_NODE); + const newNode = node.updateChild( + new Path('a/b/c'), + ChildrenNode.EMPTY_NODE + ); expect(newNode.isEmpty()).to.equal(false); expect(newNode.getChild(new Path('a/b/c')).isEmpty()).to.equal(true); expect(newNode.getChild(new Path('a/d')).val()).to.equal(2); }); - it("removing nodes leaves other leaf nodes", function() { - const json = {a: {b: {c: 1, d: 2}}}; + it('removing nodes leaves other leaf nodes', function() { + const json = { a: { b: { c: 1, d: 2 } } }; const node = nodeFromJSON(json); - const newNode = node.updateChild(new Path('a/b/c'), ChildrenNode.EMPTY_NODE); + const newNode = node.updateChild( + new Path('a/b/c'), + ChildrenNode.EMPTY_NODE + ); expect(newNode.isEmpty()).to.equal(false); expect(newNode.getChild(new Path('a/b/c')).isEmpty()).to.equal(true); expect(newNode.getChild(new Path('a/b/d')).val()).to.equal(2); }); - it("removing nodes correctly removes the root", function() { + it('removing nodes correctly removes the root', function() { let json = null; let node = nodeFromJSON(json); let newNode = node.updateChild(new Path(''), ChildrenNode.EMPTY_NODE); expect(newNode.isEmpty()).to.equal(true); - json = {a: 1}; + json = { a: 1 }; node = nodeFromJSON(json); newNode = node.updateChild(new Path('a'), ChildrenNode.EMPTY_NODE); expect(newNode.isEmpty()).to.equal(true); }); - it("ignores null values", function() { - const json = {a: 1, b: null}; + it('ignores null values', function() { + const json = { a: 1, b: null }; const node = nodeFromJSON(json); expect((node as any).children_.get('b')).to.equal(null); }); - it("Leading zeroes in path are handled properly", function() { - const json = {"1": 1, "01": 2, "001": 3}; + it('Leading zeroes in path are handled properly', function() { + const json = { '1': 1, '01': 2, '001': 3 }; const tree = nodeFromJSON(json); - expect(tree.getChild(new Path("1")).val()).to.equal(1); - expect(tree.getChild(new Path("01")).val()).to.equal(2); - expect(tree.getChild(new Path("001")).val()).to.equal(3); + expect(tree.getChild(new Path('1')).val()).to.equal(1); + expect(tree.getChild(new Path('01')).val()).to.equal(2); + expect(tree.getChild(new Path('001')).val()).to.equal(3); }); - it("Treats leading zeroes as objects, not array", function() { - const json = {"3": 1, "03": 2}; + it('Treats leading zeroes as objects, not array', function() { + const json = { '3': 1, '03': 2 }; const tree = nodeFromJSON(json); const val = tree.val(); expect(val).to.deep.equal(json); @@ -262,11 +349,13 @@ describe('Node Tests', function() { it("Updating empty children doesn't overwrite leaf node", function() { const empty = ChildrenNode.EMPTY_NODE; - const node = nodeFromJSON("value"); - expect(node).to.deep.equal(node.updateChild(new Path(".priority"), empty)); - expect(node).to.deep.equal(node.updateChild(new Path("child"), empty)); - expect(node).to.deep.equal(node.updateChild(new Path("child/.priority"), empty)); - expect(node).to.deep.equal(node.updateImmediateChild("child", empty)); - expect(node).to.deep.equal(node.updateImmediateChild(".priority", empty)); + const node = nodeFromJSON('value'); + expect(node).to.deep.equal(node.updateChild(new Path('.priority'), empty)); + expect(node).to.deep.equal(node.updateChild(new Path('child'), empty)); + expect(node).to.deep.equal( + node.updateChild(new Path('child/.priority'), empty) + ); + expect(node).to.deep.equal(node.updateImmediateChild('child', empty)); + expect(node).to.deep.equal(node.updateImmediateChild('.priority', empty)); }); }); diff --git a/tests/database/node/connection.test.ts b/tests/database/node/connection.test.ts index 53b875d021d..e441a2f7c4c 100644 --- a/tests/database/node/connection.test.ts +++ b/tests/database/node/connection.test.ts @@ -14,23 +14,25 @@ * limitations under the License. */ -import { expect } from "chai"; -import { TEST_PROJECT, testRepoInfo } from "../helpers/util"; -import { Connection } from "../../../src/database/realtime/Connection"; -import "../../../src/utils/nodePatches"; +import { expect } from 'chai'; +import { TEST_PROJECT, testRepoInfo } from '../helpers/util'; +import { Connection } from '../../../src/database/realtime/Connection'; +import '../../../src/utils/nodePatches'; describe('Connection', () => { it('return the session id', function(done) { - new Connection('1', - testRepoInfo(TEST_PROJECT.databaseURL), - message => {}, - (timestamp, sessionId) => { - expect(sessionId).not.to.be.null; - expect(sessionId).not.to.equal(''); - done(); - }, - () => {}, - reason => {}); + new Connection( + '1', + testRepoInfo(TEST_PROJECT.databaseURL), + message => {}, + (timestamp, sessionId) => { + expect(sessionId).not.to.be.null; + expect(sessionId).not.to.equal(''); + done(); + }, + () => {}, + reason => {} + ); }); // TODO(koss) - Flakey Test. When Dev Tools is closed on my Mac, this test @@ -38,19 +40,25 @@ describe('Connection', () => { // case a long-poll is opened first. it.skip('disconnect old session on new connection', function(done) { const info = testRepoInfo(TEST_PROJECT.databaseURL); - new Connection('1', info, - message => {}, - (timestamp, sessionId) => { - new Connection('2', info, - message => {}, - (timestamp, sessionId) => {}, - () => {}, - reason => {}, - sessionId); - }, - () => { - done(); // first connection was disconnected - }, - reason => {}); + new Connection( + '1', + info, + message => {}, + (timestamp, sessionId) => { + new Connection( + '2', + info, + message => {}, + (timestamp, sessionId) => {}, + () => {}, + reason => {}, + sessionId + ); + }, + () => { + done(); // first connection was disconnected + }, + reason => {} + ); }); }); diff --git a/tests/database/order.test.ts b/tests/database/order.test.ts index bdbb8606db4..ca08f8dbd0e 100644 --- a/tests/database/order.test.ts +++ b/tests/database/order.test.ts @@ -14,32 +14,30 @@ * limitations under the License. */ -import { expect } from "chai"; +import { expect } from 'chai'; import { getRandomNode } from './helpers/util'; import { Reference } from '../../src/database/api/Reference'; import { EventAccumulator } from './helpers/EventAccumulator'; -import { - eventTestHelper, -} from "./helpers/events"; +import { eventTestHelper } from './helpers/events'; -describe('Order Tests', function () { +describe('Order Tests', function() { // Kind of a hack, but a lot of these tests are written such that they'll fail if run before we're // connected to Firebase because they do a bunch of sets and then a listen and assume that they'll // arrive in that order. But if we aren't connected yet, the "reconnection" code will send them // in the opposite order. beforeEach(function() { return new Promise(resolve => { - const ref = (getRandomNode() as Reference); + const ref = getRandomNode() as Reference; let connected = false; ref.root.child('.info/connected').on('value', function(s) { connected = s.val() == true; if (connected) resolve(); }); - }) + }); }); - it("Push a bunch of data, enumerate it back; ensure order is correct.", async function () { - const node = (getRandomNode() as Reference); + it('Push a bunch of data, enumerate it back; ensure order is correct.', async function() { + const node = getRandomNode() as Reference; for (let i = 0; i < 10; i++) { node.push().set(i); } @@ -47,15 +45,15 @@ describe('Order Tests', function () { const snap = await node.once('value'); let expected = 0; - snap.forEach(function (child) { + snap.forEach(function(child) { expect(child.val()).to.equal(expected); expected++; }); expect(expected).to.equal(10); }); - it("Push a bunch of paths, then write; ensure order is correct.", async function() { - const node = (getRandomNode() as Reference); + it('Push a bunch of paths, then write; ensure order is correct.', async function() { + const node = getRandomNode() as Reference; const paths = []; // Push them first to try to call push() multiple times in the same ms. for (let i = 0; i < 20; i++) { @@ -68,28 +66,30 @@ describe('Order Tests', function () { const snap = await node.once('value'); let expected = 0; - snap.forEach(function (child) { + snap.forEach(function(child) { expect(child.val()).to.equal(expected); expected++; }); expect(expected).to.equal(20); }); - it("Push a bunch of data, reconnect, read it back; ensure order is chronological.", async function () { - const nodePair = (getRandomNode(2) as Reference[]); + it('Push a bunch of data, reconnect, read it back; ensure order is chronological.', async function() { + const nodePair = getRandomNode(2) as Reference[]; let expected; const node = nodePair[0]; let nodesSet = 0; for (let i = 0; i < 10; i++) { - node.push().set(i, function() { ++nodesSet }); + node.push().set(i, function() { + ++nodesSet; + }); } // read it back locally and make sure it's correct. const snap = await node.once('value'); expected = 0; - snap.forEach(function (child) { + snap.forEach(function(child) { expect(child.val()).to.equal(expected); expected++; }); @@ -106,28 +106,30 @@ describe('Order Tests', function () { await ea.promise; expected = 0; - readSnap.forEach(function (child) { + readSnap.forEach(function(child) { expect(child.val()).to.equal(expected); expected++; }); expect(expected).to.equal(10); }); - it("Push a bunch of data with explicit priority, reconnect, read it back; ensure order is correct.", async function () { - const nodePair = (getRandomNode(2) as Reference[]); + it('Push a bunch of data with explicit priority, reconnect, read it back; ensure order is correct.', async function() { + const nodePair = getRandomNode(2) as Reference[]; let expected; const node = nodePair[0]; let nodesSet = 0; for (let i = 0; i < 10; i++) { const pushedNode = node.push(); - pushedNode.setWithPriority(i, 10 - i, function() { ++nodesSet }); + pushedNode.setWithPriority(i, 10 - i, function() { + ++nodesSet; + }); } - // read it back locally and make sure it's correct. + // read it back locally and make sure it's correct. const snap = await node.once('value'); expected = 9; - snap.forEach(function (child) { + snap.forEach(function(child) { expect(child.val()).to.equal(expected); expected--; }); @@ -146,28 +148,34 @@ describe('Order Tests', function () { await ea.promise; expected = 9; - readSnap.forEach(function (child) { + readSnap.forEach(function(child) { expect(child.val()).to.equal(expected); expected--; }); expect(expected).to.equal(-1); }); - it("Push data with exponential priority and ensure order is correct.", async function () { - const nodePair = (getRandomNode(2) as Reference[]); + it('Push data with exponential priority and ensure order is correct.', async function() { + const nodePair = getRandomNode(2) as Reference[]; let expected; const node = nodePair[0]; let nodesSet = 0; for (let i = 0; i < 10; i++) { const pushedNode = node.push(); - pushedNode.setWithPriority(i, 111111111111111111111111111111 / Math.pow(10, i), function() { ++nodesSet }); + pushedNode.setWithPriority( + i, + 111111111111111111111111111111 / Math.pow(10, i), + function() { + ++nodesSet; + } + ); } // read it back locally and make sure it's correct. const snap = await node.once('value'); expected = 9; - snap.forEach(function (child) { + snap.forEach(function(child) { expect(child.val()).to.equal(expected); expected--; }); @@ -184,7 +192,7 @@ describe('Order Tests', function () { await ea.promise; expected = 9; - readSnap.forEach(function (child) { + readSnap.forEach(function(child) { expect(child.val()).to.equal(expected); expected--; }); @@ -192,13 +200,13 @@ describe('Order Tests', function () { }); it("Verify nodes without values aren't enumerated.", async function() { - const node = (getRandomNode() as Reference); + const node = getRandomNode() as Reference; node.child('foo'); node.child('bar').set('test'); let items = 0; const snap = await node.once('value'); - snap.forEach(function (child) { + snap.forEach(function(child) { items++; expect(child.key).to.equal('bar'); }); @@ -206,21 +214,21 @@ describe('Order Tests', function () { expect(items).to.equal(1); }); - it.skip("Receive child_moved event when priority changes.", async function() { - const node = (getRandomNode() as Reference); + it.skip('Receive child_moved event when priority changes.', async function() { + const node = getRandomNode() as Reference; // const ea = new EventAccumulator(() => eventHelper.watchesInitializedWaiter); const eventHelper = eventTestHelper([ - [ node, ['child_added', 'a'] ], - [ node, ['value', ''] ], - [ node, ['child_added', 'b'] ], - [ node, ['value', ''] ], - [ node, ['child_added', 'c'] ], - [ node, ['value', ''] ], - [ node, ['child_moved', 'a'] ], - [ node, ['child_changed', 'a'] ], - [ node, ['value', ''] ] + [node, ['child_added', 'a']], + [node, ['value', '']], + [node, ['child_added', 'b']], + [node, ['value', '']], + [node, ['child_added', 'c']], + [node, ['value', '']], + [node, ['child_moved', 'a']], + [node, ['child_changed', 'a']], + [node, ['value', '']] ]); // await ea.promise; @@ -236,8 +244,8 @@ describe('Order Tests', function () { expect(eventHelper.waiter()).to.equal(true); }); - it.skip("Can reset priority to null.", async function() { - const node = (getRandomNode() as Reference); + it.skip('Can reset priority to null.', async function() { + const node = getRandomNode() as Reference; node.child('a').setWithPriority('a', 1); node.child('b').setWithPriority('b', 2); @@ -245,17 +253,17 @@ describe('Order Tests', function () { // const ea = new EventAccumulator(() => eventHelper.waiter()); eventHelper = eventTestHelper([ - [ node, ['child_added', 'a'] ], - [ node, ['child_added', 'b'] ], - [ node, ['value', ''] ] + [node, ['child_added', 'a']], + [node, ['child_added', 'b']], + [node, ['value', '']] ]); // await ea.promise; - + eventHelper.addExpectedEvents([ - [ node, ['child_moved', 'b'] ], - [ node, ['child_changed', 'b'] ], - [ node, ['value', '']] + [node, ['child_moved', 'b']], + [node, ['child_changed', 'b']], + [node, ['value', '']] ]); node.child('b').setPriority(null); @@ -264,122 +272,138 @@ describe('Order Tests', function () { expect((await node.once('value')).child('b').getPriority()).to.equal(null); }); - it("Inserting a node under a leaf node preserves its priority.", function() { - const node = (getRandomNode() as Reference); + it('Inserting a node under a leaf node preserves its priority.', function() { + const node = getRandomNode() as Reference; let snap = null; - node.on('value', function(s) {snap = s;}); + node.on('value', function(s) { + snap = s; + }); node.setWithPriority('a', 10); node.child('deeper').set('deeper'); expect(snap.getPriority()).to.equal(10); }); - it("Verify order of mixed numbers / strings / no priorities.", async function () { - const nodePair = (getRandomNode(2) as Reference[]); + it('Verify order of mixed numbers / strings / no priorities.', async function() { + const nodePair = getRandomNode(2) as Reference[]; const nodeAndPriorities = [ - "alpha42", "zed", - "noPriorityC", null, - "num41", 500, - "noPriorityB", null, - "num80", 4000.1, - "num50", 4000, - "num10", 24, - "alpha41", "zed", - "alpha20", "horse", - "num20", 123, - "num70", 4000.01, - "noPriorityA", null, - "alpha30", "tree", - "num30", 300, - "num60", 4000.001, - "alpha10", "0horse", - "num42", 500, - "alpha40", "zed", - "num40", 500]; + 'alpha42', + 'zed', + 'noPriorityC', + null, + 'num41', + 500, + 'noPriorityB', + null, + 'num80', + 4000.1, + 'num50', + 4000, + 'num10', + 24, + 'alpha41', + 'zed', + 'alpha20', + 'horse', + 'num20', + 123, + 'num70', + 4000.01, + 'noPriorityA', + null, + 'alpha30', + 'tree', + 'num30', + 300, + 'num60', + 4000.001, + 'alpha10', + '0horse', + 'num42', + 500, + 'alpha40', + 'zed', + 'num40', + 500 + ]; let setsCompleted = 0; for (let i = 0; i < nodeAndPriorities.length; i++) { - const n = nodePair[0].child((nodeAndPriorities[i++] as string)); - n.setWithPriority(1, nodeAndPriorities[i], function() { setsCompleted++; }); + const n = nodePair[0].child(nodeAndPriorities[i++] as string); + n.setWithPriority(1, nodeAndPriorities[i], function() { + setsCompleted++; + }); } - const expectedOutput = "noPriorityA, noPriorityB, noPriorityC, num10, num20, num30, num40, num41, num42, num50, num60, num70, num80, alpha10, alpha20, alpha30, alpha40, alpha41, alpha42, "; + const expectedOutput = + 'noPriorityA, noPriorityB, noPriorityC, num10, num20, num30, num40, num41, num42, num50, num60, num70, num80, alpha10, alpha20, alpha30, alpha40, alpha41, alpha42, '; const snap = await nodePair[0].once('value'); - let output = ""; - snap.forEach(function (n) { - output += n.key + ", "; + let output = ''; + snap.forEach(function(n) { + output += n.key + ', '; }); expect(output).to.equal(expectedOutput); let eventsFired = false; - output = ""; + output = ''; nodePair[1].on('value', function(snap) { - snap.forEach(function (n) { - output += n.key + ", "; + snap.forEach(function(n) { + output += n.key + ', '; }); expect(output).to.equal(expectedOutput); eventsFired = true; }); }); - it("Verify order of integer keys.", async function () { - const ref = (getRandomNode() as Reference); - const keys = [ - "foo", - "bar", - "03", - "0", - "100", - "20", - "5", - "3", - "003", - "9" - ]; + it('Verify order of integer keys.', async function() { + const ref = getRandomNode() as Reference; + const keys = ['foo', 'bar', '03', '0', '100', '20', '5', '3', '003', '9']; let setsCompleted = 0; for (let i = 0; i < keys.length; i++) { const child = ref.child(keys[i]); - child.set(true, function() { setsCompleted++; }); + child.set(true, function() { + setsCompleted++; + }); } - const expectedOutput = "0, 3, 03, 003, 5, 9, 20, 100, bar, foo, "; + const expectedOutput = '0, 3, 03, 003, 5, 9, 20, 100, bar, foo, '; const snap = await ref.once('value'); - let output = ""; - snap.forEach(function (n) { - output += n.key + ", "; + let output = ''; + snap.forEach(function(n) { + output += n.key + ', '; }); expect(output).to.equal(expectedOutput); }); - it("Ensure prevName is correct on child_added event.", function() { - const node = (getRandomNode() as Reference); + it('Ensure prevName is correct on child_added event.', function() { + const node = getRandomNode() as Reference; let added = ''; node.on('child_added', function(snap, prevName) { - added += snap.key + " " + prevName + ", "; + added += snap.key + ' ' + prevName + ', '; }); - node.set({"a" : 1, "b": 2, "c": 3}); + node.set({ a: 1, b: 2, c: 3 }); expect(added).to.equal('a null, b a, c b, '); }); - it("Ensure prevName is correct when adding new nodes.", function() { - const node = (getRandomNode() as Reference); + it('Ensure prevName is correct when adding new nodes.', function() { + const node = getRandomNode() as Reference; let added = ''; node.on('child_added', function(snap, prevName) { - added += snap.key + " " + prevName + ", "; + added += snap.key + ' ' + prevName + ', '; }); - node.set({"b" : 2, "c": 3, "d": 4}); + node.set({ b: 2, c: 3, d: 4 }); expect(added).to.equal('b null, c b, d c, '); @@ -392,33 +416,33 @@ describe('Order Tests', function () { expect(added).to.equal('e d, '); }); - it("Ensure prevName is correct when adding new nodes with JSON.", function() { - const node = (getRandomNode() as Reference); + it('Ensure prevName is correct when adding new nodes with JSON.', function() { + const node = getRandomNode() as Reference; let added = ''; node.on('child_added', function(snap, prevName) { - added += snap.key + " " + prevName + ", "; + added += snap.key + ' ' + prevName + ', '; }); - node.set({"b" : 2, "c": 3, "d": 4}); + node.set({ b: 2, c: 3, d: 4 }); expect(added).to.equal('b null, c b, d c, '); added = ''; - node.set({"a": 1, "b" : 2, "c": 3, "d": 4}); + node.set({ a: 1, b: 2, c: 3, d: 4 }); expect(added).to.equal('a null, '); added = ''; - node.set({"a": 1, "b" : 2, "c": 3, "d": 4, "e": 5}); + node.set({ a: 1, b: 2, c: 3, d: 4, e: 5 }); expect(added).to.equal('e d, '); }); - it("Ensure prevName is correct when moving nodes.", function() { - const node = (getRandomNode() as Reference); + it('Ensure prevName is correct when moving nodes.', function() { + const node = getRandomNode() as Reference; let moved = ''; node.on('child_moved', function(snap, prevName) { - moved += snap.key + " " + prevName + ", "; + moved += snap.key + ' ' + prevName + ', '; }); node.child('a').setWithPriority('a', 1); @@ -438,50 +462,50 @@ describe('Order Tests', function () { expect(moved).to.equal('c d, '); }); - it("Ensure prevName is correct when moving nodes by setting whole JSON.", function() { - const node = (getRandomNode() as Reference); + it('Ensure prevName is correct when moving nodes by setting whole JSON.', function() { + const node = getRandomNode() as Reference; let moved = ''; node.on('child_moved', function(snap, prevName) { - moved += snap.key + " " + prevName + ", "; + moved += snap.key + ' ' + prevName + ', '; }); node.set({ - a: {'.value': 'a', '.priority': 1}, - b: {'.value': 'b', '.priority': 2}, - c: {'.value': 'c', '.priority': 3}, - d: {'.value': 'd', '.priority': 4} + a: { '.value': 'a', '.priority': 1 }, + b: { '.value': 'b', '.priority': 2 }, + c: { '.value': 'c', '.priority': 3 }, + d: { '.value': 'd', '.priority': 4 } }); node.set({ - d: {'.value': 'd', '.priority': 0}, - a: {'.value': 'a', '.priority': 1}, - b: {'.value': 'b', '.priority': 2}, - c: {'.value': 'c', '.priority': 3} + d: { '.value': 'd', '.priority': 0 }, + a: { '.value': 'a', '.priority': 1 }, + b: { '.value': 'b', '.priority': 2 }, + c: { '.value': 'c', '.priority': 3 } }); expect(moved).to.equal('d null, '); moved = ''; node.set({ - d: {'.value': 'd', '.priority': 0}, - b: {'.value': 'b', '.priority': 2}, - c: {'.value': 'c', '.priority': 3}, - a: {'.value': 'a', '.priority': 4} + d: { '.value': 'd', '.priority': 0 }, + b: { '.value': 'b', '.priority': 2 }, + c: { '.value': 'c', '.priority': 3 }, + a: { '.value': 'a', '.priority': 4 } }); expect(moved).to.equal('a c, '); moved = ''; node.set({ - d: {'.value': 'd', '.priority': 0}, - c: {'.value': 'c', '.priority': 0.5}, - b: {'.value': 'b', '.priority': 2}, - a: {'.value': 'a', '.priority': 4} + d: { '.value': 'd', '.priority': 0 }, + c: { '.value': 'c', '.priority': 0.5 }, + b: { '.value': 'b', '.priority': 2 }, + a: { '.value': 'a', '.priority': 4 } }); expect(moved).to.equal('c d, '); }); - it("Case 595: Should not get child_moved event when deleting prioritized grandchild.", function() { - const f = (getRandomNode() as Reference); + it('Case 595: Should not get child_moved event when deleting prioritized grandchild.', function() { + const f = getRandomNode() as Reference; let moves = 0; f.on('child_moved', function() { moves++; @@ -495,8 +519,8 @@ describe('Order Tests', function () { expect(moves).to.equal(0, 'Should *not* have received any move events.'); }); - it("Can set value with priority of 0.", function() { - const f = (getRandomNode() as Reference); + it('Can set value with priority of 0.', function() { + const f = getRandomNode() as Reference; let snap = null; f.on('value', function(s) { @@ -508,28 +532,30 @@ describe('Order Tests', function () { expect(snap.getPriority()).to.equal(0); }); - it("Can set object with priority of 0.", function() { - const f = (getRandomNode() as Reference); + it('Can set object with priority of 0.', function() { + const f = getRandomNode() as Reference; let snap = null; f.on('value', function(s) { snap = s; }); - f.setWithPriority({x: 'test', y: 7}, 0); + f.setWithPriority({ x: 'test', y: 7 }, 0); expect(snap.getPriority()).to.equal(0); }); - it("Case 2003: Should get child_moved for any priority change, regardless of whether it affects ordering.", function() { - const f = (getRandomNode() as Reference); + it('Case 2003: Should get child_moved for any priority change, regardless of whether it affects ordering.', function() { + const f = getRandomNode() as Reference; const moved = []; - f.on('child_moved', function(snap) { moved.push(snap.key); }); + f.on('child_moved', function(snap) { + moved.push(snap.key); + }); f.set({ - a: {'.value': 'a', '.priority': 0}, - b: {'.value': 'b', '.priority': 1}, - c: {'.value': 'c', '.priority': 2}, - d: {'.value': 'd', '.priority': 3} + a: { '.value': 'a', '.priority': 0 }, + b: { '.value': 'b', '.priority': 1 }, + c: { '.value': 'c', '.priority': 2 }, + d: { '.value': 'd', '.priority': 3 } }); expect(moved).to.deep.equal([]); @@ -537,23 +563,25 @@ describe('Order Tests', function () { expect(moved).to.deep.equal(['b']); }); - it("Case 2003: Should get child_moved for any priority change, regardless of whether it affects ordering (2).", function() { - const f = (getRandomNode() as Reference); + it('Case 2003: Should get child_moved for any priority change, regardless of whether it affects ordering (2).', function() { + const f = getRandomNode() as Reference; const moved = []; - f.on('child_moved', function(snap) { moved.push(snap.key); }); + f.on('child_moved', function(snap) { + moved.push(snap.key); + }); f.set({ - a: {'.value': 'a', '.priority': 0}, - b: {'.value': 'b', '.priority': 1}, - c: {'.value': 'c', '.priority': 2}, - d: {'.value': 'd', '.priority': 3} + a: { '.value': 'a', '.priority': 0 }, + b: { '.value': 'b', '.priority': 1 }, + c: { '.value': 'c', '.priority': 2 }, + d: { '.value': 'd', '.priority': 3 } }); expect(moved).to.deep.equal([]); f.set({ - a: {'.value': 'a', '.priority': 0}, - b: {'.value': 'b', '.priority': 1.5}, - c: {'.value': 'c', '.priority': 2}, - d: {'.value': 'd', '.priority': 3} + a: { '.value': 'a', '.priority': 0 }, + b: { '.value': 'b', '.priority': 1.5 }, + c: { '.value': 'c', '.priority': 2 }, + d: { '.value': 'd', '.priority': 3 } }); expect(moved).to.deep.equal(['b']); }); diff --git a/tests/database/order_by.test.ts b/tests/database/order_by.test.ts index 43112a6aa83..da0ec66ae4e 100644 --- a/tests/database/order_by.test.ts +++ b/tests/database/order_by.test.ts @@ -14,26 +14,25 @@ * limitations under the License. */ -import { expect } from "chai"; -import { getRandomNode } from "./helpers/util"; -import { EventAccumulatorFactory } from "./helpers/EventAccumulator"; -import { Reference } from "../../src/database/api/Reference"; +import { expect } from 'chai'; +import { getRandomNode } from './helpers/util'; +import { EventAccumulatorFactory } from './helpers/EventAccumulator'; +import { Reference } from '../../src/database/api/Reference'; describe('.orderBy tests', function() { - // TODO: setup spy on console.warn - const clearRef = (getRandomNode() as Reference); + const clearRef = getRandomNode() as Reference; it('Snapshots are iterated in order', function() { - const ref = (getRandomNode() as Reference); + const ref = getRandomNode() as Reference; const initial = { - alex: {nuggets: 60}, - rob: {nuggets: 56}, - vassili: {nuggets: 55.5}, - tony: {nuggets: 52}, - greg: {nuggets: 52} + alex: { nuggets: 60 }, + rob: { nuggets: 56 }, + vassili: { nuggets: 55.5 }, + tony: { nuggets: 52 }, + greg: { nuggets: 52 } }; const expectedOrder = ['greg', 'tony', 'vassili', 'rob', 'alex']; @@ -64,7 +63,7 @@ describe('.orderBy tests', function() { }); it('Snapshots are iterated in order for value', function() { - const ref = (getRandomNode() as Reference); + const ref = getRandomNode() as Reference; const initial = { alex: 60, @@ -102,14 +101,14 @@ describe('.orderBy tests', function() { }); it('Fires child_moved events', function() { - const ref = (getRandomNode() as Reference); + const ref = getRandomNode() as Reference; const initial = { - alex: {nuggets: 60}, - rob: {nuggets: 56}, - vassili: {nuggets: 55.5}, - tony: {nuggets: 52}, - greg: {nuggets: 52} + alex: { nuggets: 60 }, + rob: { nuggets: 56 }, + vassili: { nuggets: 55.5 }, + tony: { nuggets: 52 }, + greg: { nuggets: 52 } }; const orderedRef = ref.orderByChild('nuggets'); @@ -119,7 +118,7 @@ describe('.orderBy tests', function() { moved = true; expect(snap.key).to.equal('greg'); expect(prevName).to.equal('rob'); - expect(snap.val()).to.deep.equal({nuggets: 57}); + expect(snap.val()).to.deep.equal({ nuggets: 57 }); }); ref.set(initial); @@ -128,7 +127,7 @@ describe('.orderBy tests', function() { }); it('Callback removal works', async function() { - const ref = (getRandomNode() as Reference); + const ref = getRandomNode() as Reference; let reads = 0; let fooCb; @@ -177,11 +176,11 @@ describe('.orderBy tests', function() { }); it('child_added events are in the correct order', function() { - const ref = (getRandomNode() as Reference); + const ref = getRandomNode() as Reference; const initial = { - a: {value: 5}, - c: {value: 3} + a: { value: 5 }, + c: { value: 3 } }; const added = []; @@ -193,15 +192,15 @@ describe('.orderBy tests', function() { expect(added).to.deep.equal(['c', 'a']); ref.update({ - b: {value: 4}, - d: {value: 2} + b: { value: 4 }, + d: { value: 2 } }); expect(added).to.deep.equal(['c', 'a', 'd', 'b']); }); it('Can use key index', async function() { - const ref = (getRandomNode() as Reference); + const ref = getRandomNode() as Reference; const data = { a: { '.priority': 10, '.value': 'a' }, @@ -224,7 +223,7 @@ describe('.orderBy tests', function() { const ea = EventAccumulatorFactory.waitsForCount(5); keys = []; - + ref.orderByKey().limitToLast(5).on('child_added', function(child) { keys.push(child.key); ea.addEvent(); @@ -237,7 +236,7 @@ describe('.orderBy tests', function() { }); it('Queries work on leaf nodes', function(done) { - const ref = (getRandomNode() as Reference); + const ref = getRandomNode() as Reference; ref.set('leaf-node', function() { ref.orderByChild('foo').limitToLast(1).on('value', function(snap) { @@ -248,14 +247,14 @@ describe('.orderBy tests', function() { }); it('Updates for unindexed queries work', function(done) { - const refs = (getRandomNode(2) as Reference[]); + const refs = getRandomNode(2) as Reference[]; const reader = refs[0]; const writer = refs[1]; const value = { - 'one': { 'index': 1, 'value': 'one' }, - 'two': { 'index': 2, 'value': 'two' }, - 'three': { 'index': 3, 'value': 'three' } + one: { index: 1, value: 'one' }, + two: { index: 2, value: 'two' }, + three: { index: 3, value: 'three' } }; let count = 0; @@ -264,15 +263,15 @@ describe('.orderBy tests', function() { reader.orderByChild('index').limitToLast(2).on('value', function(snap) { if (count === 0) { expect(snap.val()).to.deep.equal({ - 'two': { 'index': 2, 'value': 'two' }, - 'three': { 'index': 3, 'value': 'three' } + two: { index: 2, value: 'two' }, + three: { index: 3, value: 'three' } }); // update child which should trigger value event writer.child('one/index').set(4); } else if (count === 1) { expect(snap.val()).to.deep.equal({ - 'three': { 'index': 3, 'value': 'three' }, - 'one': { 'index': 4, 'value': 'one' } + three: { index: 3, value: 'three' }, + one: { index: 4, value: 'one' } }); done(); } @@ -282,7 +281,7 @@ describe('.orderBy tests', function() { }); it('Server respects KeyIndex', function(done) { - const refs = (getRandomNode(2) as Reference[]); + const refs = getRandomNode(2) as Reference[]; const reader = refs[0]; const writer = refs[1]; @@ -309,7 +308,7 @@ describe('.orderBy tests', function() { }); it('startAt/endAt works on value index', function() { - const ref = (getRandomNode() as Reference); + const ref = getRandomNode() as Reference; const initial = { alex: 60, @@ -346,8 +345,10 @@ describe('.orderBy tests', function() { expect(addedPrevNames).to.deep.equal(expectedPrevNames); }); - it('Removing default listener removes non-default listener that loads all data', function(done) { - const ref = (getRandomNode() as Reference); + it('Removing default listener removes non-default listener that loads all data', function( + done + ) { + const ref = getRandomNode() as Reference; const initial = { key: 'value' }; ref.set(initial, function(err) { @@ -366,14 +367,14 @@ describe('.orderBy tests', function() { }); it('Can define and use an deep index', function(done) { - const ref = (getRandomNode() as Reference); + const ref = getRandomNode() as Reference; const initial = { - alex: {deep: {nuggets: 60}}, - rob: {deep: {nuggets: 56}}, - vassili: {deep: {nuggets: 55.5}}, - tony: {deep: {nuggets: 52}}, - greg: {deep: {nuggets: 52}} + alex: { deep: { nuggets: 60 } }, + rob: { deep: { nuggets: 56 } }, + vassili: { deep: { nuggets: 55.5 } }, + tony: { deep: { nuggets: 52 } }, + greg: { deep: { nuggets: 52 } } }; const expectedOrder = ['greg', 'tony', 'vassili']; diff --git a/tests/database/path.test.ts b/tests/database/path.test.ts index 7a5c9e06e4a..f72f93a0af0 100644 --- a/tests/database/path.test.ts +++ b/tests/database/path.test.ts @@ -14,20 +14,20 @@ * limitations under the License. */ -import { expect } from "chai"; -import { Path } from "../../src/database/core/util/Path"; +import { expect } from 'chai'; +import { Path } from '../../src/database/core/util/Path'; -describe('Path Tests', function () { +describe('Path Tests', function() { const expectGreater = function(left, right) { expect(Path.comparePaths(new Path(left), new Path(right))).to.equal(1); - expect(Path.comparePaths(new Path(right), new Path(left))).to.equal(-1) + expect(Path.comparePaths(new Path(right), new Path(left))).to.equal(-1); }; const expectEqual = function(left, right) { - expect(Path.comparePaths(new Path(left), new Path(right))).to.equal(0) + expect(Path.comparePaths(new Path(left), new Path(right))).to.equal(0); }; - it('contains() contains the path and any child path.', function () { + it('contains() contains the path and any child path.', function() { expect(new Path('/').contains(new Path('/a/b/c'))).to.equal(true); expect(new Path('/a').contains(new Path('/a/b/c'))).to.equal(true); expect(new Path('/a/b').contains(new Path('/a/b/c'))).to.equal(true); @@ -37,29 +37,47 @@ describe('Path Tests', function () { expect(new Path('/a/b/c').contains(new Path('/a'))).to.equal(false); expect(new Path('/a/b/c').contains(new Path('/'))).to.equal(false); - expect(new Path('/a/b/c').popFront().contains(new Path('/b/c'))).to.equal(true); - expect(new Path('/a/b/c').popFront().contains(new Path('/b/c/d'))).to.equal(true); + expect(new Path('/a/b/c').popFront().contains(new Path('/b/c'))).to.equal( + true + ); + expect(new Path('/a/b/c').popFront().contains(new Path('/b/c/d'))).to.equal( + true + ); expect(new Path('/a/b/c').contains(new Path('/b/c'))).to.equal(false); expect(new Path('/a/b/c').contains(new Path('/a/c/b'))).to.equal(false); - expect(new Path('/a/b/c').popFront().contains(new Path('/a/b/c'))).to.equal(false); - expect(new Path('/a/b/c').popFront().contains(new Path('/b/c'))).to.equal(true); - expect(new Path('/a/b/c').popFront().contains(new Path('/b/c/d'))).to.equal(true); + expect(new Path('/a/b/c').popFront().contains(new Path('/a/b/c'))).to.equal( + false + ); + expect(new Path('/a/b/c').popFront().contains(new Path('/b/c'))).to.equal( + true + ); + expect(new Path('/a/b/c').popFront().contains(new Path('/b/c/d'))).to.equal( + true + ); }); it('popFront() returns the parent', function() { expect(new Path('/a/b/c').popFront().toString()).to.equal('/b/c'); expect(new Path('/a/b/c').popFront().popFront().toString()).to.equal('/c'); - expect(new Path('/a/b/c').popFront().popFront().popFront().toString()).to.equal('/'); - expect(new Path('/a/b/c').popFront().popFront().popFront().popFront().toString()).to.equal('/'); + expect( + new Path('/a/b/c').popFront().popFront().popFront().toString() + ).to.equal('/'); + expect( + new Path('/a/b/c').popFront().popFront().popFront().popFront().toString() + ).to.equal('/'); }); it('parent() returns the parent', function() { expect(new Path('/a/b/c').parent().toString()).to.equal('/a/b'); expect(new Path('/a/b/c').parent().parent().toString()).to.equal('/a'); - expect(new Path('/a/b/c').parent().parent().parent().toString()).to.equal('/'); - expect(new Path('/a/b/c').parent().parent().parent().parent()).to.equal(null); + expect(new Path('/a/b/c').parent().parent().parent().toString()).to.equal( + '/' + ); + expect(new Path('/a/b/c').parent().parent().parent().parent()).to.equal( + null + ); }); it('comparePaths() works as expected', function() { diff --git a/tests/database/promise.test.ts b/tests/database/promise.test.ts index 2ec663fd545..cd31fa0abda 100644 --- a/tests/database/promise.test.ts +++ b/tests/database/promise.test.ts @@ -14,9 +14,9 @@ * limitations under the License. */ -import { expect } from "chai"; -import { getRandomNode, getRootNode } from "./helpers/util"; -import { Reference } from "../../src/database/api/Reference"; +import { expect } from 'chai'; +import { getRandomNode, getRootNode } from './helpers/util'; +import { Reference } from '../../src/database/api/Reference'; describe('Promise Tests', function() { /** @@ -31,107 +31,133 @@ describe('Promise Tests', function() { }); it('wraps Firebase.set', function() { - const ref = (getRandomNode() as Reference); - return ref.set(5).then(function() { - return ref.once('value'); - }).then(function(read) { - expect(read.val()).to.equal(5); - }); + const ref = getRandomNode() as Reference; + return ref + .set(5) + .then(function() { + return ref.once('value'); + }) + .then(function(read) { + expect(read.val()).to.equal(5); + }); }); it('wraps Firebase.push when no value is passed', function() { - const ref = (getRandomNode() as Reference); + const ref = getRandomNode() as Reference; const pushed = ref.push(); - return pushed.then(function(childRef) { - expect(pushed.ref.parent.toString()).to.equal(ref.toString()); - expect(pushed.toString()).to.equal(childRef.toString()); - return pushed.once('value'); - }) - .then(function(snap) { - expect(snap.val()).to.equal(null); - expect(snap.ref.toString()).to.equal(pushed.toString()); - }); + return pushed + .then(function(childRef) { + expect(pushed.ref.parent.toString()).to.equal(ref.toString()); + expect(pushed.toString()).to.equal(childRef.toString()); + return pushed.once('value'); + }) + .then(function(snap) { + expect(snap.val()).to.equal(null); + expect(snap.ref.toString()).to.equal(pushed.toString()); + }); }); it('wraps Firebase.push when a value is passed', function() { - const ref = (getRandomNode() as Reference); + const ref = getRandomNode() as Reference; const pushed = ref.push(6); - return pushed.then(function(childRef) { - expect(pushed.ref.parent.toString()).to.equal(ref.toString()); - expect(pushed.toString()).to.equal(childRef.toString()); - return pushed.once('value'); - }).then(function(snap) { - expect(snap.val()).to.equal(6); - expect(snap.ref.toString()).to.equal(pushed.toString()); - }); + return pushed + .then(function(childRef) { + expect(pushed.ref.parent.toString()).to.equal(ref.toString()); + expect(pushed.toString()).to.equal(childRef.toString()); + return pushed.once('value'); + }) + .then(function(snap) { + expect(snap.val()).to.equal(6); + expect(snap.ref.toString()).to.equal(pushed.toString()); + }); }); it('wraps Firebase.remove', function() { - const ref = (getRandomNode() as Reference); - return ref.set({'a': 'b'}).then(function() { - const p = ref.child('a').remove(); - expect(typeof p.then === 'function').to.equal(true); - return p; - }).then(function() { - return ref.once('value'); - }).then(function(snap) { - expect(snap.val()).to.equal(null); - }); + const ref = getRandomNode() as Reference; + return ref + .set({ a: 'b' }) + .then(function() { + const p = ref.child('a').remove(); + expect(typeof p.then === 'function').to.equal(true); + return p; + }) + .then(function() { + return ref.once('value'); + }) + .then(function(snap) { + expect(snap.val()).to.equal(null); + }); }); it('wraps Firebase.update', function() { - const ref = (getRandomNode() as Reference); - return ref.set({'a': 'b'}).then(function() { - const p = ref.update({'c': 'd'}); - expect(typeof p.then === 'function').to.equal(true); - return p; - }).then(function() { - return ref.once('value'); - }).then(function(snap) { - expect(snap.val()).to.deep.equal({'a': 'b', 'c': 'd'}); - }); + const ref = getRandomNode() as Reference; + return ref + .set({ a: 'b' }) + .then(function() { + const p = ref.update({ c: 'd' }); + expect(typeof p.then === 'function').to.equal(true); + return p; + }) + .then(function() { + return ref.once('value'); + }) + .then(function(snap) { + expect(snap.val()).to.deep.equal({ a: 'b', c: 'd' }); + }); }); it('wraps Fireabse.setPriority', function() { - const ref = (getRandomNode() as Reference); - return ref.set({'a': 'b'}).then(function() { - const p = ref.child('a').setPriority(5); - expect(typeof p.then === 'function').to.equal(true); - return p; - }).then(function() { - return ref.once('value'); - }).then(function(snap) { - expect(snap.child('a').getPriority()).to.equal(5); - }); + const ref = getRandomNode() as Reference; + return ref + .set({ a: 'b' }) + .then(function() { + const p = ref.child('a').setPriority(5); + expect(typeof p.then === 'function').to.equal(true); + return p; + }) + .then(function() { + return ref.once('value'); + }) + .then(function(snap) { + expect(snap.child('a').getPriority()).to.equal(5); + }); }); it('wraps Firebase.setWithPriority', function() { - const ref = (getRandomNode() as Reference); - return ref.setWithPriority('hi', 5).then(function() { - return ref.once('value'); - }).then(function(snap) { - expect(snap.getPriority()).to.equal(5); - expect(snap.val()).to.equal('hi'); - }); + const ref = getRandomNode() as Reference; + return ref + .setWithPriority('hi', 5) + .then(function() { + return ref.once('value'); + }) + .then(function(snap) { + expect(snap.getPriority()).to.equal(5); + expect(snap.val()).to.equal('hi'); + }); }); it('wraps Firebase.transaction', function() { - const ref = (getRandomNode() as Reference); - return ref.transaction(function() { - return 5; - }).then(function(result) { - expect(result.committed).to.equal(true); - expect(result.snapshot.val()).to.equal(5); - return ref.transaction(function() { return undefined; }); - }).then(function(result) { - expect(result.committed).to.equal(false); - }); + const ref = getRandomNode() as Reference; + return ref + .transaction(function() { + return 5; + }) + .then(function(result) { + expect(result.committed).to.equal(true); + expect(result.snapshot.val()).to.equal(5); + return ref.transaction(function() { + return undefined; + }); + }) + .then(function(result) { + expect(result.committed).to.equal(false); + }); }); it('exposes catch in the return of Firebase.push', function() { // Catch is a pain in the bum to provide safely because "catch" is a reserved word and ES3 and below require // you to use quotes to define it, but the closure linter really doesn't want you to do that either. - const ref = (getRandomNode() as Reference); + const ref = getRandomNode() as Reference; const pushed = ref.push(6); expect(typeof ref.then === 'function').to.equal(false); @@ -142,7 +168,7 @@ describe('Promise Tests', function() { }); it('wraps onDisconnect.remove', function() { - const refs = (getRandomNode(2) as Reference[]); + const refs = getRandomNode(2) as Reference[]; const writer = refs[0]; const reader = refs[1]; const refInfo = getRootNode(0, '.info/connected'); @@ -151,60 +177,81 @@ describe('Promise Tests', function() { expect(snapshot.val()).to.equal(true); }); - return writer.child('here today').set('gone tomorrow').then(function() { - const p = writer.child('here today').onDisconnect().remove(); - expect(typeof p.then === 'function').to.equal(true); - return p; - }).then(function() { - writer.database.goOffline(); - writer.database.goOnline(); - return reader.once('value'); - }).then(function(snap) { - expect(snap.val()).to.equal(null); - }); + return writer + .child('here today') + .set('gone tomorrow') + .then(function() { + const p = writer.child('here today').onDisconnect().remove(); + expect(typeof p.then === 'function').to.equal(true); + return p; + }) + .then(function() { + writer.database.goOffline(); + writer.database.goOnline(); + return reader.once('value'); + }) + .then(function(snap) { + expect(snap.val()).to.equal(null); + }); }); it('wraps onDisconnect.update', function() { - const refs = (getRandomNode(2) as Reference[]); + const refs = getRandomNode(2) as Reference[]; const writer = refs[0]; const reader = refs[1]; - return writer.set({'foo': 'baz'}).then(function() { - const p = writer.onDisconnect().update({'foo': 'bar'}); - expect(typeof p.then === 'function').to.equal(true); - return p; - }).then(function() { - writer.database.goOffline(); - writer.database.goOnline(); - return reader.once('value'); - }).then(function(snap) { - expect(snap.val()).to.deep.equal({'foo': 'bar'}); - }); + return writer + .set({ foo: 'baz' }) + .then(function() { + const p = writer.onDisconnect().update({ foo: 'bar' }); + expect(typeof p.then === 'function').to.equal(true); + return p; + }) + .then(function() { + writer.database.goOffline(); + writer.database.goOnline(); + return reader.once('value'); + }) + .then(function(snap) { + expect(snap.val()).to.deep.equal({ foo: 'bar' }); + }); }); it('wraps onDisconnect.set', function() { - const refs = (getRandomNode(2) as Reference[]); + const refs = getRandomNode(2) as Reference[]; const writer = refs[0]; const reader = refs[1]; - return writer.child('hello').onDisconnect().set('world').then(function() { - writer.database.goOffline(); - writer.database.goOnline(); - return reader.once('value'); - }).then(function(snap) { - expect(snap.val()).to.deep.equal({'hello': 'world'}); - }); + return writer + .child('hello') + .onDisconnect() + .set('world') + .then(function() { + writer.database.goOffline(); + writer.database.goOnline(); + return reader.once('value'); + }) + .then(function(snap) { + expect(snap.val()).to.deep.equal({ hello: 'world' }); + }); }); it('wraps onDisconnect.setWithPriority', function() { - const refs = (getRandomNode(2) as Reference[]); + const refs = getRandomNode(2) as Reference[]; const writer = refs[0]; const reader = refs[1]; - return writer.child('meaning of life').onDisconnect().setWithPriority('ultimate question', 42).then(function() { - writer.database.goOffline(); - writer.database.goOnline(); - return reader.once('value'); - }).then(function(snap) { - expect(snap.val()).to.deep.equal({'meaning of life': 'ultimate question'}); - expect(snap.child('meaning of life').getPriority()).to.equal(42); - }); + return writer + .child('meaning of life') + .onDisconnect() + .setWithPriority('ultimate question', 42) + .then(function() { + writer.database.goOffline(); + writer.database.goOnline(); + return reader.once('value'); + }) + .then(function(snap) { + expect(snap.val()).to.deep.equal({ + 'meaning of life': 'ultimate question' + }); + expect(snap.child('meaning of life').getPriority()).to.equal(42); + }); }); }); diff --git a/tests/database/query.test.ts b/tests/database/query.test.ts index a11e688ea9b..b6c6d4ff693 100644 --- a/tests/database/query.test.ts +++ b/tests/database/query.test.ts @@ -14,19 +14,15 @@ * limitations under the License. */ -import { expect } from "chai"; -import { Reference } from "../../src/database/api/Reference"; -import { Query } from "../../src/database/api/Query"; -import "../../src/database/core/snap/ChildrenNode"; -import { - getRandomNode, - getPath, - pause -} from "./helpers/util"; +import { expect } from 'chai'; +import { Reference } from '../../src/database/api/Reference'; +import { Query } from '../../src/database/api/Query'; +import '../../src/database/core/snap/ChildrenNode'; +import { getRandomNode, getPath, pause } from './helpers/util'; import { EventAccumulator, - EventAccumulatorFactory -} from "./helpers/EventAccumulator"; + EventAccumulatorFactory +} from './helpers/EventAccumulator'; const _ = require('lodash'); @@ -46,7 +42,7 @@ describe('Query Tests', function() { }; it('Can create basic queries.', function() { - const path = (getRandomNode() as Reference); + const path = getRandomNode() as Reference; path.limitToLast(10); path.startAt('199').limitToFirst(10); @@ -59,14 +55,14 @@ describe('Query Tests', function() { path.orderByKey().startAt('foo'); path.orderByKey().endAt('foo'); path.orderByKey().equalTo('foo'); - path.orderByChild("child"); - path.orderByChild("child/deep/path"); + path.orderByChild('child'); + path.orderByChild('child/deep/path'); path.orderByValue(); path.orderByPriority(); }); it('Exposes database as read-only property', function() { - const path = (getRandomNode() as Reference); + const path = getRandomNode() as Reference; const child = path.child('child'); const db = path.database; @@ -77,76 +73,182 @@ describe('Query Tests', function() { * TS throws an error here (as is expected) * casting to any to allow the code to run */ - expect(() => (path as any).database = "can't overwrite").to.throw(); + expect(() => ((path as any).database = "can't overwrite")).to.throw(); expect(path.database).to.equal(db); }); it('Invalid queries throw', function() { - const path = (getRandomNode() as Reference); - + const path = getRandomNode() as Reference; + /** * Because we are testing invalid queries, I am casting * to `any` to avoid the typechecking error. This can * occur when a user uses the SDK through a pure JS * client, rather than typescript */ - expect(function() { (path as any).limitToLast(); }).to.throw(); - expect(function() { (path as any).limitToLast('100'); }).to.throw(); - expect(function() { (path as any).limitToLast({ x: 5 }); }).to.throw(); - expect(function() { path.limitToLast(100).limitToLast(100); }).to.throw(); - expect(function() { path.limitToLast(100).limitToFirst(100); }).to.throw(); - expect(function() { path.limitToLast(100).limitToLast(100); }).to.throw(); - expect(function() { path.limitToFirst(100).limitToLast(100); }).to.throw(); - expect(function() { path.limitToFirst(100).limitToFirst(100); }).to.throw(); - expect(function() { path.limitToFirst(100).limitToLast(100); }).to.throw(); - expect(function() { path.limitToLast(100).limitToLast(100); }).to.throw(); - expect(function() { path.limitToLast(100).limitToFirst(100); }).to.throw(); - expect(function() { path.limitToLast(100).limitToLast(100); }).to.throw(); - expect(function() { path.orderByPriority().orderByPriority(); }).to.throw(); - expect(function() { path.orderByPriority().orderByKey(); }).to.throw(); - expect(function() { path.orderByPriority().orderByChild('foo'); }).to.throw(); - expect(function() { path.orderByPriority().startAt(true); }).to.throw(); - expect(function() { path.orderByPriority().endAt(false); }).to.throw(); - expect(function() { path.orderByPriority().equalTo(true); }).to.throw(); - expect(function() { path.orderByKey().orderByPriority(); }).to.throw(); - expect(function() { path.orderByKey().orderByKey(); }).to.throw(); - expect(function() { path.orderByKey().orderByChild('foo'); }).to.throw(); - expect(function() { path.orderByChild('foo').orderByPriority(); }).to.throw(); - expect(function() { path.orderByChild('foo').orderByKey(); }).to.throw(); - expect(function() { path.orderByChild('foo').orderByChild('foo'); }).to.throw(); - expect(function() { (path as any).orderByChild('foo').startAt({a: 1}); }).to.throw(); - expect(function() { (path as any).orderByChild('foo').endAt({a: 1}); }).to.throw(); - expect(function() { (path as any).orderByChild('foo').equalTo({a: 1}); }).to.throw(); - expect(function() { path.startAt('foo').startAt('foo')}).to.throw(); - expect(function() { path.startAt('foo').equalTo('foo')}).to.throw(); - expect(function() { path.endAt('foo').endAt('foo')}).to.throw(); - expect(function() { path.endAt('foo').equalTo('foo')}).to.throw(); - expect(function() { path.equalTo('foo').startAt('foo')}).to.throw(); - expect(function() { path.equalTo('foo').endAt('foo')}).to.throw(); - expect(function() { path.equalTo('foo').equalTo('foo')}).to.throw(); - expect(function() { path.orderByKey().startAt('foo', 'foo')}).to.throw(); - expect(function() { path.orderByKey().endAt('foo', 'foo')}).to.throw(); - expect(function() { path.orderByKey().equalTo('foo', 'foo')}).to.throw(); - expect(function() { path.orderByKey().startAt(1)}).to.throw(); - expect(function() { path.orderByKey().startAt(true)}).to.throw(); - expect(function() { path.orderByKey().startAt(null)}).to.throw(); - expect(function() { path.orderByKey().endAt(1)}).to.throw(); - expect(function() { path.orderByKey().endAt(true)}).to.throw(); - expect(function() { path.orderByKey().endAt(null)}).to.throw(); - expect(function() { path.orderByKey().equalTo(1)}).to.throw(); - expect(function() { path.orderByKey().equalTo(true)}).to.throw(); - expect(function() { path.orderByKey().equalTo(null)}).to.throw(); - expect(function() { path.startAt('foo', 'foo').orderByKey()}).to.throw(); - expect(function() { path.endAt('foo', 'foo').orderByKey()}).to.throw(); - expect(function() { path.equalTo('foo', 'foo').orderByKey()}).to.throw(); - expect(function() { path.startAt(1).orderByKey()}).to.throw(); - expect(function() { path.startAt(true).orderByKey()}).to.throw(); - expect(function() { path.endAt(1).orderByKey()}).to.throw(); - expect(function() { path.endAt(true).orderByKey()}).to.throw(); + expect(function() { + (path as any).limitToLast(); + }).to.throw(); + expect(function() { + (path as any).limitToLast('100'); + }).to.throw(); + expect(function() { + (path as any).limitToLast({ x: 5 }); + }).to.throw(); + expect(function() { + path.limitToLast(100).limitToLast(100); + }).to.throw(); + expect(function() { + path.limitToLast(100).limitToFirst(100); + }).to.throw(); + expect(function() { + path.limitToLast(100).limitToLast(100); + }).to.throw(); + expect(function() { + path.limitToFirst(100).limitToLast(100); + }).to.throw(); + expect(function() { + path.limitToFirst(100).limitToFirst(100); + }).to.throw(); + expect(function() { + path.limitToFirst(100).limitToLast(100); + }).to.throw(); + expect(function() { + path.limitToLast(100).limitToLast(100); + }).to.throw(); + expect(function() { + path.limitToLast(100).limitToFirst(100); + }).to.throw(); + expect(function() { + path.limitToLast(100).limitToLast(100); + }).to.throw(); + expect(function() { + path.orderByPriority().orderByPriority(); + }).to.throw(); + expect(function() { + path.orderByPriority().orderByKey(); + }).to.throw(); + expect(function() { + path.orderByPriority().orderByChild('foo'); + }).to.throw(); + expect(function() { + path.orderByPriority().startAt(true); + }).to.throw(); + expect(function() { + path.orderByPriority().endAt(false); + }).to.throw(); + expect(function() { + path.orderByPriority().equalTo(true); + }).to.throw(); + expect(function() { + path.orderByKey().orderByPriority(); + }).to.throw(); + expect(function() { + path.orderByKey().orderByKey(); + }).to.throw(); + expect(function() { + path.orderByKey().orderByChild('foo'); + }).to.throw(); + expect(function() { + path.orderByChild('foo').orderByPriority(); + }).to.throw(); + expect(function() { + path.orderByChild('foo').orderByKey(); + }).to.throw(); + expect(function() { + path.orderByChild('foo').orderByChild('foo'); + }).to.throw(); + expect(function() { + (path as any).orderByChild('foo').startAt({ a: 1 }); + }).to.throw(); + expect(function() { + (path as any).orderByChild('foo').endAt({ a: 1 }); + }).to.throw(); + expect(function() { + (path as any).orderByChild('foo').equalTo({ a: 1 }); + }).to.throw(); + expect(function() { + path.startAt('foo').startAt('foo'); + }).to.throw(); + expect(function() { + path.startAt('foo').equalTo('foo'); + }).to.throw(); + expect(function() { + path.endAt('foo').endAt('foo'); + }).to.throw(); + expect(function() { + path.endAt('foo').equalTo('foo'); + }).to.throw(); + expect(function() { + path.equalTo('foo').startAt('foo'); + }).to.throw(); + expect(function() { + path.equalTo('foo').endAt('foo'); + }).to.throw(); + expect(function() { + path.equalTo('foo').equalTo('foo'); + }).to.throw(); + expect(function() { + path.orderByKey().startAt('foo', 'foo'); + }).to.throw(); + expect(function() { + path.orderByKey().endAt('foo', 'foo'); + }).to.throw(); + expect(function() { + path.orderByKey().equalTo('foo', 'foo'); + }).to.throw(); + expect(function() { + path.orderByKey().startAt(1); + }).to.throw(); + expect(function() { + path.orderByKey().startAt(true); + }).to.throw(); + expect(function() { + path.orderByKey().startAt(null); + }).to.throw(); + expect(function() { + path.orderByKey().endAt(1); + }).to.throw(); + expect(function() { + path.orderByKey().endAt(true); + }).to.throw(); + expect(function() { + path.orderByKey().endAt(null); + }).to.throw(); + expect(function() { + path.orderByKey().equalTo(1); + }).to.throw(); + expect(function() { + path.orderByKey().equalTo(true); + }).to.throw(); + expect(function() { + path.orderByKey().equalTo(null); + }).to.throw(); + expect(function() { + path.startAt('foo', 'foo').orderByKey(); + }).to.throw(); + expect(function() { + path.endAt('foo', 'foo').orderByKey(); + }).to.throw(); + expect(function() { + path.equalTo('foo', 'foo').orderByKey(); + }).to.throw(); + expect(function() { + path.startAt(1).orderByKey(); + }).to.throw(); + expect(function() { + path.startAt(true).orderByKey(); + }).to.throw(); + expect(function() { + path.endAt(1).orderByKey(); + }).to.throw(); + expect(function() { + path.endAt(true).orderByKey(); + }).to.throw(); }); it('can produce a valid ref', function() { - const path = (getRandomNode() as Reference); + const path = getRandomNode() as Reference; const query = path.limitToLast(1); const ref = query.ref; @@ -155,59 +257,106 @@ describe('Query Tests', function() { }); it('Passing invalidKeys to startAt / endAt throws.', function() { - const f = (getRandomNode() as Reference); - const badKeys = ['.test', 'test.', 'fo$o', '[what', 'ever]', 'ha#sh', '/thing', 'th/ing', 'thing/']; + const f = getRandomNode() as Reference; + const badKeys = [ + '.test', + 'test.', + 'fo$o', + '[what', + 'ever]', + 'ha#sh', + '/thing', + 'th/ing', + 'thing/' + ]; // Changed from basic array iteration to avoid closure issues accessing mutable state _.each(badKeys, function(badKey) { - expect(function() { f.startAt(null, badKey); }).to.throw(); - expect(function() { f.endAt(null, badKey); }).to.throw(); + expect(function() { + f.startAt(null, badKey); + }).to.throw(); + expect(function() { + f.endAt(null, badKey); + }).to.throw(); }); }); it('Passing invalid paths to orderBy throws', function() { - const ref = (getRandomNode() as Reference); - expect(function() { ref.orderByChild('$child/foo'); }).to.throw(); - expect(function() { ref.orderByChild('$key'); }).to.throw(); - expect(function() { ref.orderByChild('$priority'); }).to.throw(); + const ref = getRandomNode() as Reference; + expect(function() { + ref.orderByChild('$child/foo'); + }).to.throw(); + expect(function() { + ref.orderByChild('$key'); + }).to.throw(); + expect(function() { + ref.orderByChild('$priority'); + }).to.throw(); }); it('Query.queryIdentifier works.', function() { - const path = (getRandomNode() as Reference); + const path = getRandomNode() as Reference; const queryId = function(query) { return query.queryIdentifier(query); }; expect(queryId(path)).to.equal('default'); - expect(queryId(path.startAt('pri', 'name'))) - .to.equal('{"sn":"name","sp":"pri"}'); - expect(queryId(path.startAt('spri').endAt('epri'))) - .to.equal('{"ep":"epri","sp":"spri"}'); - expect(queryId(path.startAt('spri', 'sname').endAt('epri', 'ename'))) - .to.equal('{"en":"ename","ep":"epri","sn":"sname","sp":"spri"}'); - expect(queryId(path.startAt('pri').limitToFirst(100))) - .to.equal('{"l":100,"sp":"pri","vf":"l"}'); - expect(queryId(path.startAt('bar').orderByChild('foo'))) - .to.equal('{"i":"foo","sp":"bar"}'); + expect(queryId(path.startAt('pri', 'name'))).to.equal( + '{"sn":"name","sp":"pri"}' + ); + expect(queryId(path.startAt('spri').endAt('epri'))).to.equal( + '{"ep":"epri","sp":"spri"}' + ); + expect( + queryId(path.startAt('spri', 'sname').endAt('epri', 'ename')) + ).to.equal('{"en":"ename","ep":"epri","sn":"sname","sp":"spri"}'); + expect(queryId(path.startAt('pri').limitToFirst(100))).to.equal( + '{"l":100,"sp":"pri","vf":"l"}' + ); + expect(queryId(path.startAt('bar').orderByChild('foo'))).to.equal( + '{"i":"foo","sp":"bar"}' + ); }); it('Passing invalid queries to isEqual throws', function() { - const ref = (getRandomNode() as Reference); - expect(function() { (ref as any).isEqual(); }).to.throw(); - expect(function() { (ref as any).isEqual(''); }).to.throw(); - expect(function() { (ref as any).isEqual('foo'); }).to.throw(); - expect(function() { (ref as any).isEqual({}); }).to.throw(); - expect(function() { (ref as any).isEqual([]); }).to.throw(); - expect(function() { (ref as any).isEqual(0); }).to.throw(); - expect(function() { (ref as any).isEqual(1); }).to.throw(); - expect(function() { (ref as any).isEqual(NaN); }).to.throw(); - expect(function() { ref.isEqual(null); }).to.throw(); - expect(function() { (ref as any).isEqual({a:1}); }).to.throw(); - expect(function() { (ref as any).isEqual(ref, 'extra'); }).to.throw(); + const ref = getRandomNode() as Reference; + expect(function() { + (ref as any).isEqual(); + }).to.throw(); + expect(function() { + (ref as any).isEqual(''); + }).to.throw(); + expect(function() { + (ref as any).isEqual('foo'); + }).to.throw(); + expect(function() { + (ref as any).isEqual({}); + }).to.throw(); + expect(function() { + (ref as any).isEqual([]); + }).to.throw(); + expect(function() { + (ref as any).isEqual(0); + }).to.throw(); + expect(function() { + (ref as any).isEqual(1); + }).to.throw(); + expect(function() { + (ref as any).isEqual(NaN); + }).to.throw(); + expect(function() { + ref.isEqual(null); + }).to.throw(); + expect(function() { + (ref as any).isEqual({ a: 1 }); + }).to.throw(); + expect(function() { + (ref as any).isEqual(ref, 'extra'); + }).to.throw(); }); it('Query.isEqual works.', function() { - const path = (getRandomNode() as Reference); + const path = getRandomNode() as Reference; const rootRef = path.root; const childRef = rootRef.child('child'); @@ -215,7 +364,8 @@ describe('Query Tests', function() { expect(path.isEqual(path), 'Query.isEqual - 1').to.be.true; expect(rootRef.isEqual(rootRef), 'Query.isEqual - 2').to.be.true; expect(rootRef.isEqual(childRef.parent), 'Query.isEqual - 3').to.be.true; - expect(rootRef.child('child').isEqual(childRef), 'Query.isEqual - 4').to.be.true; + expect(rootRef.child('child').isEqual(childRef), 'Query.isEqual - 4').to.be + .true; // Refs with different repos // var rootRefDifferentRepo = TESTS.getFreshRepo(TEST_ALT_NAMESPACE); @@ -226,108 +376,156 @@ describe('Query Tests', function() { // Refs with different paths expect(rootRef.isEqual(childRef), 'Query.isEqual - 7').to.be.false; - expect(childRef.isEqual(rootRef.child('otherChild')), 'Query.isEqual - 8').to.be.false; + expect(childRef.isEqual(rootRef.child('otherChild')), 'Query.isEqual - 8') + .to.be.false; const childQueryLast25 = childRef.limitToLast(25); const childQueryOrderedByKey = childRef.orderByKey(); const childQueryOrderedByPriority = childRef.orderByPriority(); - const childQueryOrderedByTimestamp = childRef.orderByChild("timestamp"); + const childQueryOrderedByTimestamp = childRef.orderByChild('timestamp'); const childQueryStartAt1 = childQueryOrderedByTimestamp.startAt(1); const childQueryStartAt2 = childQueryOrderedByTimestamp.startAt(2); const childQueryEndAt2 = childQueryOrderedByTimestamp.endAt(2); - const childQueryStartAt1EndAt2 = childQueryOrderedByTimestamp.startAt(1).endAt(2); + const childQueryStartAt1EndAt2 = childQueryOrderedByTimestamp + .startAt(1) + .endAt(2); // Equivalent queries - expect(childRef.isEqual(childQueryLast25.ref), 'Query.isEqual - 9').to.be.true; - expect(childQueryLast25.isEqual(childRef.limitToLast(25)), 'Query.isEqual - 10').to.be.true; - expect(childQueryStartAt1EndAt2.isEqual(childQueryOrderedByTimestamp.startAt(1).endAt(2)), 'Query.isEqual - 11').to.be.true; + expect(childRef.isEqual(childQueryLast25.ref), 'Query.isEqual - 9').to.be + .true; + expect( + childQueryLast25.isEqual(childRef.limitToLast(25)), + 'Query.isEqual - 10' + ).to.be.true; + expect( + childQueryStartAt1EndAt2.isEqual( + childQueryOrderedByTimestamp.startAt(1).endAt(2) + ), + 'Query.isEqual - 11' + ).to.be.true; // Non-equivalent queries - expect(childQueryLast25.isEqual(childRef), 'Query.isEqual - 12').to.be.false; - expect(childQueryLast25.isEqual(childQueryOrderedByKey), 'Query.isEqual - 13').to.be.false; - expect(childQueryLast25.isEqual(childQueryOrderedByPriority), 'Query.isEqual - 14').to.be.false; - expect(childQueryLast25.isEqual(childQueryOrderedByTimestamp), 'Query.isEqual - 15').to.be.false; - expect(childQueryOrderedByKey.isEqual(childQueryOrderedByPriority), 'Query.isEqual - 16').to.be.false; - expect(childQueryOrderedByKey.isEqual(childQueryOrderedByTimestamp), 'Query.isEqual - 17').to.be.false; - expect(childQueryStartAt1.isEqual(childQueryStartAt2), 'Query.isEqual - 18').to.be.false; - expect(childQueryStartAt1.isEqual(childQueryStartAt1EndAt2), 'Query.isEqual - 19').to.be.false; - expect(childQueryEndAt2.isEqual(childQueryStartAt2), 'Query.isEqual - 20').to.be.false; - expect(childQueryEndAt2.isEqual(childQueryStartAt1EndAt2), 'Query.isEqual - 21').to.be.false; + expect(childQueryLast25.isEqual(childRef), 'Query.isEqual - 12').to.be + .false; + expect( + childQueryLast25.isEqual(childQueryOrderedByKey), + 'Query.isEqual - 13' + ).to.be.false; + expect( + childQueryLast25.isEqual(childQueryOrderedByPriority), + 'Query.isEqual - 14' + ).to.be.false; + expect( + childQueryLast25.isEqual(childQueryOrderedByTimestamp), + 'Query.isEqual - 15' + ).to.be.false; + expect( + childQueryOrderedByKey.isEqual(childQueryOrderedByPriority), + 'Query.isEqual - 16' + ).to.be.false; + expect( + childQueryOrderedByKey.isEqual(childQueryOrderedByTimestamp), + 'Query.isEqual - 17' + ).to.be.false; + expect(childQueryStartAt1.isEqual(childQueryStartAt2), 'Query.isEqual - 18') + .to.be.false; + expect( + childQueryStartAt1.isEqual(childQueryStartAt1EndAt2), + 'Query.isEqual - 19' + ).to.be.false; + expect(childQueryEndAt2.isEqual(childQueryStartAt2), 'Query.isEqual - 20') + .to.be.false; + expect( + childQueryEndAt2.isEqual(childQueryStartAt1EndAt2), + 'Query.isEqual - 21' + ).to.be.false; }); it('Query.off can be called on the default query.', function() { - const path = (getRandomNode() as Reference); + const path = getRandomNode() as Reference; let eventFired = false; - const callback = function() { eventFired = true; }; + const callback = function() { + eventFired = true; + }; path.limitToLast(5).on('value', callback); - path.set({a: 5, b: 6}); + path.set({ a: 5, b: 6 }); expect(eventFired).to.be.true; eventFired = false; path.off('value', callback); - path.set({a: 6, b: 5}); + path.set({ a: 6, b: 5 }); expect(eventFired).to.be.false; }); it('Query.off can be called on the specific query.', function() { - const path = (getRandomNode() as Reference); + const path = getRandomNode() as Reference; let eventFired = false; - const callback = function() { eventFired = true; }; + const callback = function() { + eventFired = true; + }; path.limitToLast(5).on('value', callback); - path.set({a: 5, b: 6}); + path.set({ a: 5, b: 6 }); expect(eventFired).to.be.true; eventFired = false; path.limitToLast(5).off('value', callback); - path.set({a: 6, b: 5}); + path.set({ a: 6, b: 5 }); expect(eventFired).to.be.false; }); it('Query.off can be called without a callback specified.', function() { - const path = (getRandomNode() as Reference); + const path = getRandomNode() as Reference; let eventFired = false; - const callback1 = function() { eventFired = true; }; - const callback2 = function() { eventFired = true; }; + const callback1 = function() { + eventFired = true; + }; + const callback2 = function() { + eventFired = true; + }; path.on('value', callback1); path.limitToLast(5).on('value', callback2); - path.set({a: 5, b: 6}); + path.set({ a: 5, b: 6 }); expect(eventFired).to.be.true; eventFired = false; path.off('value'); - path.set({a: 6, b: 5}); + path.set({ a: 6, b: 5 }); expect(eventFired).to.be.false; }); it('Query.off can be called without an event type or callback specified.', function() { - const path = (getRandomNode() as Reference); + const path = getRandomNode() as Reference; let eventFired = false; - const callback1 = function() { eventFired = true; }; - const callback2 = function() { eventFired = true; }; + const callback1 = function() { + eventFired = true; + }; + const callback2 = function() { + eventFired = true; + }; path.on('value', callback1); path.limitToLast(5).on('value', callback2); - path.set({a: 5, b: 6}); + path.set({ a: 5, b: 6 }); expect(eventFired).to.be.true; eventFired = false; path.off(); - path.set({a: 6, b: 5}); + path.set({ a: 6, b: 5 }); expect(eventFired).to.be.false; }); it('Query.off respects provided context (for value events).', function() { - const ref = (getRandomNode() as Reference); + const ref = getRandomNode() as Reference; const a = new EventReceiver(), - b = new EventReceiver(); + b = new EventReceiver(); ref.on('value', a.onValue, a); ref.on('value', b.onValue, b); @@ -349,10 +547,10 @@ describe('Query Tests', function() { }); it('Query.off respects provided context (for child events).', function() { - const ref = (getRandomNode() as Reference); + const ref = getRandomNode() as Reference; const a = new EventReceiver(), - b = new EventReceiver(); + b = new EventReceiver(); ref.on('child_added', a.onChildAdded, a); ref.on('child_added', b.onChildAdded, b); @@ -374,10 +572,10 @@ describe('Query Tests', function() { }); it('Query.off with no callback/context removes all callbacks, even with contexts (for value events).', function() { - const ref = (getRandomNode() as Reference); + const ref = getRandomNode() as Reference; const a = new EventReceiver(), - b = new EventReceiver(); + b = new EventReceiver(); ref.on('value', a.onValue, a); ref.on('value', b.onValue, b); @@ -397,10 +595,10 @@ describe('Query Tests', function() { }); it('Query.off with no callback/context removes all callbacks, even with contexts (for child events).', function() { - const ref = (getRandomNode() as Reference); + const ref = getRandomNode() as Reference; const a = new EventReceiver(), - b = new EventReceiver(); + b = new EventReceiver(); ref.on('child_added', a.onChildAdded, a); ref.on('child_added', b.onChildAdded, b); @@ -420,10 +618,10 @@ describe('Query Tests', function() { }); it('Query.off with no event type / callback removes all callbacks (even those with contexts).', function() { - const ref = (getRandomNode() as Reference); + const ref = getRandomNode() as Reference; const a = new EventReceiver(), - b = new EventReceiver(); + b = new EventReceiver(); ref.on('value', a.onValue, a); ref.on('value', b.onValue, b); @@ -450,9 +648,11 @@ describe('Query Tests', function() { }); it('Set a limit of 5, add a bunch of nodes, ensure only last 5 items are kept.', function() { - const node = (getRandomNode() as Reference); + const node = getRandomNode() as Reference; let snap = null; - node.limitToLast(5).on('value', function(s) { snap = s; }); + node.limitToLast(5).on('value', function(s) { + snap = s; + }); node.set({}); for (let i = 0; i < 10; i++) { @@ -469,7 +669,7 @@ describe('Query Tests', function() { }); it('Set a limit of 5, add a bunch of nodes, ensure only last 5 items are sent from server.', async function() { - const node = (getRandomNode() as Reference); + const node = getRandomNode() as Reference; await node.set({}); const pushPromises = []; @@ -490,7 +690,7 @@ describe('Query Tests', function() { const [snap] = await ea.promise; let expected = 5; - + snap.forEach(function(child) { expect(child.val()).to.equal(expected); expected++; @@ -500,134 +700,147 @@ describe('Query Tests', function() { }); it('Set various limits, ensure resulting data is correct.', async function() { - const node = (getRandomNode() as Reference); + const node = getRandomNode() as Reference; - await node.set({a: 1, b: 2, c: 3}); + await node.set({ a: 1, b: 2, c: 3 }); const tasks: TaskList = [ - [node.limitToLast(1), {c: 3}], - [node.endAt().limitToLast(1), {c: 3}], - [node.limitToLast(2), {b: 2, c: 3}], - [node.limitToLast(3), {a: 1, b: 2, c: 3}], - [node.limitToLast(4), {a: 1, b: 2, c: 3}] + [node.limitToLast(1), { c: 3 }], + [node.endAt().limitToLast(1), { c: 3 }], + [node.limitToLast(2), { b: 2, c: 3 }], + [node.limitToLast(3), { a: 1, b: 2, c: 3 }], + [node.limitToLast(4), { a: 1, b: 2, c: 3 }] ]; - return Promise.all(tasks.map(async task => { - const [query, val] = task; - const ea = EventAccumulatorFactory.waitsForCount(1); - query.on('value', snap => { - ea.addEvent(snap.val()); - }); - const [newVal] = await ea.promise; - expect(newVal).to.deep.equal(val); - })); + return Promise.all( + tasks.map(async task => { + const [query, val] = task; + const ea = EventAccumulatorFactory.waitsForCount(1); + query.on('value', snap => { + ea.addEvent(snap.val()); + }); + const [newVal] = await ea.promise; + expect(newVal).to.deep.equal(val); + }) + ); }); it('Set various limits with a startAt name, ensure resulting data is correct.', async function() { - const node = (getRandomNode() as Reference); + const node = getRandomNode() as Reference; - await node.set({a: 1, b: 2, c: 3}); + await node.set({ a: 1, b: 2, c: 3 }); const tasks: TaskList = [ - [node.startAt().limitToFirst(1), {a: 1}], - [node.startAt(null, 'c').limitToFirst(1), {c: 3}], - [node.startAt(null, 'b').limitToFirst(1), {b: 2}], - [node.startAt(null, 'b').limitToFirst(2), {b: 2, c: 3}], - [node.startAt(null, 'b').limitToFirst(3), {b: 2, c: 3}], - [node.startAt(null, 'b').limitToLast(1), {c: 3}], - [node.startAt(null, 'b').limitToLast(1), {c: 3}], - [node.startAt(null, 'b').limitToLast(2), {b: 2, c: 3}], - [node.startAt(null, 'b').limitToLast(3), {b: 2, c: 3}], - [node.limitToFirst(1).startAt(null, 'c'), {c: 3}], - [node.limitToFirst(1).startAt(null, 'b'), {b: 2}], - [node.limitToFirst(2).startAt(null, 'b'), {b: 2, c: 3}], - [node.limitToFirst(3).startAt(null, 'b'), {b: 2, c: 3}], - [node.limitToLast(1).startAt(null, 'b'), {c: 3}], - [node.limitToLast(1).startAt(null, 'b'), {c: 3}], - [node.limitToLast(2).startAt(null, 'b'), {b: 2, c: 3}], - [node.limitToLast(3).startAt(null, 'b'), {b: 2, c: 3}], + [node.startAt().limitToFirst(1), { a: 1 }], + [node.startAt(null, 'c').limitToFirst(1), { c: 3 }], + [node.startAt(null, 'b').limitToFirst(1), { b: 2 }], + [node.startAt(null, 'b').limitToFirst(2), { b: 2, c: 3 }], + [node.startAt(null, 'b').limitToFirst(3), { b: 2, c: 3 }], + [node.startAt(null, 'b').limitToLast(1), { c: 3 }], + [node.startAt(null, 'b').limitToLast(1), { c: 3 }], + [node.startAt(null, 'b').limitToLast(2), { b: 2, c: 3 }], + [node.startAt(null, 'b').limitToLast(3), { b: 2, c: 3 }], + [node.limitToFirst(1).startAt(null, 'c'), { c: 3 }], + [node.limitToFirst(1).startAt(null, 'b'), { b: 2 }], + [node.limitToFirst(2).startAt(null, 'b'), { b: 2, c: 3 }], + [node.limitToFirst(3).startAt(null, 'b'), { b: 2, c: 3 }], + [node.limitToLast(1).startAt(null, 'b'), { c: 3 }], + [node.limitToLast(1).startAt(null, 'b'), { c: 3 }], + [node.limitToLast(2).startAt(null, 'b'), { b: 2, c: 3 }], + [node.limitToLast(3).startAt(null, 'b'), { b: 2, c: 3 }] ]; - return Promise.all(tasks.map(async task => { - const [query, val] = task; - const ea = EventAccumulatorFactory.waitsForCount(1); - query.on('value', snap => { - ea.addEvent(snap.val()); - }); - const [newVal] = await ea.promise; - expect(newVal).to.deep.equal(val); - })); + return Promise.all( + tasks.map(async task => { + const [query, val] = task; + const ea = EventAccumulatorFactory.waitsForCount(1); + query.on('value', snap => { + ea.addEvent(snap.val()); + }); + const [newVal] = await ea.promise; + expect(newVal).to.deep.equal(val); + }) + ); }); it('Set various limits with a endAt name, ensure resulting data is correct.', async function() { - const node = (getRandomNode() as Reference); + const node = getRandomNode() as Reference; - await node.set({a: 1, b: 2, c: 3}); + await node.set({ a: 1, b: 2, c: 3 }); const tasks: TaskList = [ - [node.endAt().limitToFirst(1), {a: 1}], - [node.endAt(null, 'c').limitToFirst(1), {a: 1}], - [node.endAt(null, 'b').limitToFirst(1), {a: 1}], - [node.endAt(null, 'b').limitToFirst(2), {a: 1, b: 2}], - [node.endAt(null, 'b').limitToFirst(3), {a: 1, b: 2}], - [node.endAt(null, 'c').limitToLast(1), {c: 3}], - [node.endAt(null, 'b').limitToLast(1), {b: 2}], - [node.endAt(null, 'b').limitToLast(2), {a: 1, b: 2}], - [node.endAt(null, 'b').limitToLast(3), {a: 1, b: 2}], - [node.limitToFirst(1).endAt(null, 'c'), {a: 1}], - [node.limitToFirst(1).endAt(null, 'b'), {a: 1}], - [node.limitToFirst(2).endAt(null, 'b'), {a: 1, b: 2}], - [node.limitToFirst(3).endAt(null, 'b'), {a: 1, b: 2}], - [node.limitToLast(1).endAt(null, 'c'), {c: 3}], - [node.limitToLast(1).endAt(null, 'b'), {b: 2}], - [node.limitToLast(2).endAt(null, 'b'), {a: 1, b: 2}], - [node.limitToLast(3).endAt(null, 'b'), {a: 1, b: 2}], + [node.endAt().limitToFirst(1), { a: 1 }], + [node.endAt(null, 'c').limitToFirst(1), { a: 1 }], + [node.endAt(null, 'b').limitToFirst(1), { a: 1 }], + [node.endAt(null, 'b').limitToFirst(2), { a: 1, b: 2 }], + [node.endAt(null, 'b').limitToFirst(3), { a: 1, b: 2 }], + [node.endAt(null, 'c').limitToLast(1), { c: 3 }], + [node.endAt(null, 'b').limitToLast(1), { b: 2 }], + [node.endAt(null, 'b').limitToLast(2), { a: 1, b: 2 }], + [node.endAt(null, 'b').limitToLast(3), { a: 1, b: 2 }], + [node.limitToFirst(1).endAt(null, 'c'), { a: 1 }], + [node.limitToFirst(1).endAt(null, 'b'), { a: 1 }], + [node.limitToFirst(2).endAt(null, 'b'), { a: 1, b: 2 }], + [node.limitToFirst(3).endAt(null, 'b'), { a: 1, b: 2 }], + [node.limitToLast(1).endAt(null, 'c'), { c: 3 }], + [node.limitToLast(1).endAt(null, 'b'), { b: 2 }], + [node.limitToLast(2).endAt(null, 'b'), { a: 1, b: 2 }], + [node.limitToLast(3).endAt(null, 'b'), { a: 1, b: 2 }] ]; - return Promise.all(tasks.map(async task => { - const [query, val] = task; - const ea = EventAccumulatorFactory.waitsForCount(1); - query.on('value', snap => { - ea.addEvent(snap.val()); - }); - const [newVal] = await ea.promise; - expect(newVal).to.deep.equal(val); - })); + return Promise.all( + tasks.map(async task => { + const [query, val] = task; + const ea = EventAccumulatorFactory.waitsForCount(1); + query.on('value', snap => { + ea.addEvent(snap.val()); + }); + const [newVal] = await ea.promise; + expect(newVal).to.deep.equal(val); + }) + ); }); it('Set various limits with a startAt name, ensure resulting data is correct from the server.', async function() { - const node = (getRandomNode() as Reference); + const node = getRandomNode() as Reference; - await node.set({a: 1, b: 2, c: 3}); + await node.set({ a: 1, b: 2, c: 3 }); const tasks: TaskList = [ - [node.startAt().limitToFirst(1), {a: 1}], - [node.startAt(null, 'c').limitToFirst(1), {c: 3}], - [node.startAt(null, 'b').limitToFirst(1), {b: 2}], + [node.startAt().limitToFirst(1), { a: 1 }], + [node.startAt(null, 'c').limitToFirst(1), { c: 3 }], + [node.startAt(null, 'b').limitToFirst(1), { b: 2 }], // NOTE: technically there is a race condition here. The limitToFirst(1) query will return a single value, which will be // raised for the limitToFirst(2) callback as well, if it exists already. However, once the server gets the limitToFirst(2) // query, it will send more data and the correct state will be returned. - [node.startAt(null, 'b').limitToFirst(2), {b: 2, c: 3}], - [node.startAt(null, 'b').limitToFirst(3), {b: 2, c: 3}], + [node.startAt(null, 'b').limitToFirst(2), { b: 2, c: 3 }], + [node.startAt(null, 'b').limitToFirst(3), { b: 2, c: 3 }] ]; - return Promise.all(tasks.map(async task => { - const [query, val] = task; - const ea = EventAccumulatorFactory.waitsForCount(1); - query.on('value', snap => { - ea.addEvent(snap.val()); - }); - const [newVal] = await ea.promise; - expect(newVal).to.deep.equal(val); - })); + return Promise.all( + tasks.map(async task => { + const [query, val] = task; + const ea = EventAccumulatorFactory.waitsForCount(1); + query.on('value', snap => { + ea.addEvent(snap.val()); + }); + const [newVal] = await ea.promise; + expect(newVal).to.deep.equal(val); + }) + ); }); it('Set limit, ensure child_removed and child_added events are fired when limit is hit.', function() { - const node = (getRandomNode() as Reference); - let added = '', removed = ''; - node.limitToLast(2).on('child_added', function(snap) { added += snap.key + ' '}); - node.limitToLast(2).on('child_removed', function(snap) { removed += snap.key + ' '}); - node.set({a: 1, b: 2, c: 3}); + const node = getRandomNode() as Reference; + let added = '', + removed = ''; + node.limitToLast(2).on('child_added', function(snap) { + added += snap.key + ' '; + }); + node.limitToLast(2).on('child_removed', function(snap) { + removed += snap.key + ' '; + }); + node.set({ a: 1, b: 2, c: 3 }); expect(added).to.equal('b c '); expect(removed).to.equal(''); @@ -639,19 +852,20 @@ describe('Query Tests', function() { }); it('Set limit, ensure child_removed and child_added events are fired when limit is hit, using server data', async function() { - const node = (getRandomNode() as Reference); + const node = getRandomNode() as Reference; - await node.set({a: 1, b: 2, c: 3}); + await node.set({ a: 1, b: 2, c: 3 }); const ea = EventAccumulatorFactory.waitsForCount(2); - let added = '', removed = ''; - node.limitToLast(2).on('child_added', function(snap) { - added += snap.key + ' '; + let added = '', + removed = ''; + node.limitToLast(2).on('child_added', function(snap) { + added += snap.key + ' '; ea.addEvent(); }); - node.limitToLast(2).on('child_removed', function(snap) { - removed += snap.key + ' ' + node.limitToLast(2).on('child_removed', function(snap) { + removed += snap.key + ' '; }); await ea.promise; @@ -667,12 +881,17 @@ describe('Query Tests', function() { }); it('Set start and limit, ensure child_removed and child_added events are fired when limit is hit.', function() { - const node = (getRandomNode() as Reference); + const node = getRandomNode() as Reference; - let added = '', removed = ''; - node.startAt(null, 'a').limitToFirst(2).on('child_added', function(snap) { added += snap.key + ' '}); - node.startAt(null, 'a').limitToFirst(2).on('child_removed', function(snap) { removed += snap.key + ' '}); - node.set({a: 1, b: 2, c: 3}); + let added = '', + removed = ''; + node.startAt(null, 'a').limitToFirst(2).on('child_added', function(snap) { + added += snap.key + ' '; + }); + node.startAt(null, 'a').limitToFirst(2).on('child_removed', function(snap) { + removed += snap.key + ' '; + }); + node.set({ a: 1, b: 2, c: 3 }); expect(added).to.equal('a b '); expect(removed).to.equal(''); @@ -685,18 +904,19 @@ describe('Query Tests', function() { it('Set start and limit, ensure child_removed and child_added events are fired when limit is hit, using server data', async function() { const node = getRandomNode(); - await node.set({a: 1, b: 2, c: 3}); + await node.set({ a: 1, b: 2, c: 3 }); const ea = EventAccumulatorFactory.waitsForCount(2); - let added = '', removed = ''; - node.startAt(null, 'a').limitToFirst(2).on('child_added', function(snap) { - added += snap.key + ' '; + let added = '', + removed = ''; + node.startAt(null, 'a').limitToFirst(2).on('child_added', function(snap) { + added += snap.key + ' '; ea.addEvent(); }); - node.startAt(null, 'a').limitToFirst(2).on('child_removed', function(snap) { - removed += snap.key + ' ' + node.startAt(null, 'a').limitToFirst(2).on('child_removed', function(snap) { + removed += snap.key + ' '; }); - + await ea.promise; expect(added).to.equal('a b '); @@ -710,12 +930,17 @@ describe('Query Tests', function() { }); it("Set start and limit, ensure child_added events are fired when limit isn't hit yet.", function() { - const node = (getRandomNode() as Reference); + const node = getRandomNode() as Reference; - let added = '', removed = ''; - node.startAt(null, 'a').limitToFirst(2).on('child_added', function(snap) { added += snap.key + ' '}); - node.startAt(null, 'a').limitToFirst(2).on('child_removed', function(snap) { removed += snap.key + ' '}); - node.set({c: 3}); + let added = '', + removed = ''; + node.startAt(null, 'a').limitToFirst(2).on('child_added', function(snap) { + added += snap.key + ' '; + }); + node.startAt(null, 'a').limitToFirst(2).on('child_removed', function(snap) { + removed += snap.key + ' '; + }); + node.set({ c: 3 }); expect(added).to.equal('c '); expect(removed).to.equal(''); @@ -726,20 +951,20 @@ describe('Query Tests', function() { }); it("Set start and limit, ensure child_added events are fired when limit isn't hit yet, using server data", async function() { - const node = (getRandomNode() as Reference); + const node = getRandomNode() as Reference; - await node.set({c: 3}); + await node.set({ c: 3 }); const ea = EventAccumulatorFactory.waitsForCount(1); let added = ''; let removed = ''; - node.startAt(null, 'a').limitToFirst(2).on('child_added', function(snap) { + node.startAt(null, 'a').limitToFirst(2).on('child_added', function(snap) { added += snap.key + ' '; ea.addEvent(); }); - node.startAt(null, 'a').limitToFirst(2).on('child_removed', function(snap) { - removed += snap.key + ' ' + node.startAt(null, 'a').limitToFirst(2).on('child_removed', function(snap) { + removed += snap.key + ' '; }); await ea.promise; @@ -755,39 +980,43 @@ describe('Query Tests', function() { }); it('Set a limit, ensure child_removed and child_added events are fired when limit is satisfied and you remove an item.', async function() { - const node = (getRandomNode() as Reference); + const node = getRandomNode() as Reference; const ea = EventAccumulatorFactory.waitsForCount(1); - let added = '', removed = ''; - node.limitToLast(2).on('child_added', function(snap) { + let added = '', + removed = ''; + node.limitToLast(2).on('child_added', function(snap) { added += snap.key + ' '; ea.addEvent(); }); - node.limitToLast(2).on('child_removed', function(snap) { removed += snap.key + ' '}); - node.set({a: 1, b: 2, c: 3}); + node.limitToLast(2).on('child_removed', function(snap) { + removed += snap.key + ' '; + }); + node.set({ a: 1, b: 2, c: 3 }); expect(added).to.equal('b c '); expect(removed).to.equal(''); added = ''; node.child('b').remove(); expect(removed).to.equal('b '); - + await ea.promise; }); it('Set a limit, ensure child_removed and child_added events are fired when limit is satisfied and you remove an item. Using server data', async function() { - const node = (getRandomNode() as Reference); + const node = getRandomNode() as Reference; - await node.set({a: 1, b: 2, c: 3}); + await node.set({ a: 1, b: 2, c: 3 }); let ea = EventAccumulatorFactory.waitsForCount(2); - let added = '', removed = ''; - node.limitToLast(2).on('child_added', function(snap) { - added += snap.key + ' '; + let added = '', + removed = ''; + node.limitToLast(2).on('child_added', function(snap) { + added += snap.key + ' '; ea.addEvent(); }); - node.limitToLast(2).on('child_removed', function(snap) { - removed += snap.key + ' ' + node.limitToLast(2).on('child_removed', function(snap) { + removed += snap.key + ' '; }); await ea.promise; @@ -796,7 +1025,7 @@ describe('Query Tests', function() { expect(removed).to.equal(''); // We are going to wait for one more event before closing - ea = EventAccumulatorFactory.waitsForCount(1); + ea = EventAccumulatorFactory.waitsForCount(1); added = ''; await node.child('b').remove(); @@ -807,12 +1036,17 @@ describe('Query Tests', function() { }); it('Set a limit, ensure child_removed events are fired when limit is satisfied, you remove an item, and there are no more.', function() { - const node = (getRandomNode() as Reference); + const node = getRandomNode() as Reference; - let added = '', removed = ''; - node.limitToLast(2).on('child_added', function(snap) { added += snap.key + ' '}); - node.limitToLast(2).on('child_removed', function(snap) { removed += snap.key + ' '}); - node.set({b: 2, c: 3}); + let added = '', + removed = ''; + node.limitToLast(2).on('child_added', function(snap) { + added += snap.key + ' '; + }); + node.limitToLast(2).on('child_removed', function(snap) { + removed += snap.key + ' '; + }); + node.set({ b: 2, c: 3 }); expect(added).to.equal('b c '); expect(removed).to.equal(''); @@ -825,18 +1059,18 @@ describe('Query Tests', function() { }); it('Set a limit, ensure child_removed events are fired when limit is satisfied, you remove an item, and there are no more. Using server data', async function() { - const node = (getRandomNode() as Reference); + const node = getRandomNode() as Reference; const ea = EventAccumulatorFactory.waitsForCount(2); let added = ''; let removed = ''; - await node.set({b: 2, c: 3}); + await node.set({ b: 2, c: 3 }); - node.limitToLast(2).on('child_added', function(snap) { + node.limitToLast(2).on('child_added', function(snap) { added += snap.key + ' '; ea.addEvent(); }); - node.limitToLast(2).on('child_removed', function(snap) { - removed += snap.key + ' ' + node.limitToLast(2).on('child_removed', function(snap) { + removed += snap.key + ' '; }); await ea.promise; @@ -845,7 +1079,7 @@ describe('Query Tests', function() { expect(removed).to.equal(''); added = ''; - + await node.child('b').remove(); expect(added).to.equal(''); @@ -853,167 +1087,179 @@ describe('Query Tests', function() { }); it('Ensure startAt / endAt with priority works.', async function() { - const node = (getRandomNode() as Reference); + const node = getRandomNode() as Reference; const tasks: TaskList = [ - [node.startAt('w').endAt('y'), {b: 2, c: 3, d: 4}], - [node.startAt('w').endAt('w'), {d: 4 }], - [node.startAt('a').endAt('c'), null], + [node.startAt('w').endAt('y'), { b: 2, c: 3, d: 4 }], + [node.startAt('w').endAt('w'), { d: 4 }], + [node.startAt('a').endAt('c'), null] ]; await node.set({ - a: {'.value': 1, '.priority': 'z'}, - b: {'.value': 2, '.priority': 'y'}, - c: {'.value': 3, '.priority': 'x'}, - d: {'.value': 4, '.priority': 'w'} - }); - - return Promise.all(tasks.map(async task => { - const [query, val] = task; - const ea = EventAccumulatorFactory.waitsForCount(1); - query.on('value', snap => { - ea.addEvent(snap.val()); - }); - const [newVal] = await ea.promise; - expect(newVal).to.deep.equal(val); - })); + a: { '.value': 1, '.priority': 'z' }, + b: { '.value': 2, '.priority': 'y' }, + c: { '.value': 3, '.priority': 'x' }, + d: { '.value': 4, '.priority': 'w' } + }); + + return Promise.all( + tasks.map(async task => { + const [query, val] = task; + const ea = EventAccumulatorFactory.waitsForCount(1); + query.on('value', snap => { + ea.addEvent(snap.val()); + }); + const [newVal] = await ea.promise; + expect(newVal).to.deep.equal(val); + }) + ); }); it('Ensure startAt / endAt with priority work with server data.', async function() { - const node = (getRandomNode() as Reference); - + const node = getRandomNode() as Reference; + await node.set({ - a: {'.value': 1, '.priority': 'z'}, - b: {'.value': 2, '.priority': 'y'}, - c: {'.value': 3, '.priority': 'x'}, - d: {'.value': 4, '.priority': 'w'} + a: { '.value': 1, '.priority': 'z' }, + b: { '.value': 2, '.priority': 'y' }, + c: { '.value': 3, '.priority': 'x' }, + d: { '.value': 4, '.priority': 'w' } }); const tasks: TaskList = [ - [node.startAt('w').endAt('y'), {b: 2, c: 3, d: 4}], - [node.startAt('w').endAt('w'), {d: 4 }], - [node.startAt('a').endAt('c'), null], + [node.startAt('w').endAt('y'), { b: 2, c: 3, d: 4 }], + [node.startAt('w').endAt('w'), { d: 4 }], + [node.startAt('a').endAt('c'), null] ]; - return Promise.all(tasks.map(async task => { - const [query, val] = task; - const ea = EventAccumulatorFactory.waitsForCount(1); - query.on('value', snap => { - ea.addEvent(snap.val()); - }); - const [newVal] = await ea.promise; - expect(newVal).to.deep.equal(val); - })); + return Promise.all( + tasks.map(async task => { + const [query, val] = task; + const ea = EventAccumulatorFactory.waitsForCount(1); + query.on('value', snap => { + ea.addEvent(snap.val()); + }); + const [newVal] = await ea.promise; + expect(newVal).to.deep.equal(val); + }) + ); }); it('Ensure startAt / endAt with priority and name works.', async function() { - const node = (getRandomNode() as Reference); + const node = getRandomNode() as Reference; await node.set({ - a: {'.value': 1, '.priority': 1}, - b: {'.value': 2, '.priority': 1}, - c: {'.value': 3, '.priority': 2}, - d: {'.value': 4, '.priority': 2} + a: { '.value': 1, '.priority': 1 }, + b: { '.value': 2, '.priority': 1 }, + c: { '.value': 3, '.priority': 2 }, + d: { '.value': 4, '.priority': 2 } }); const tasks: TaskList = [ - [node.startAt(1, 'a').endAt(2, 'd'), {a: 1, b: 2, c: 3, d: 4}], - [node.startAt(1, 'b').endAt(2, 'c'), {b: 2, c: 3}], - [node.startAt(1, 'c').endAt(2), {c: 3, d: 4}], + [node.startAt(1, 'a').endAt(2, 'd'), { a: 1, b: 2, c: 3, d: 4 }], + [node.startAt(1, 'b').endAt(2, 'c'), { b: 2, c: 3 }], + [node.startAt(1, 'c').endAt(2), { c: 3, d: 4 }] ]; - return Promise.all(tasks.map(async task => { - const [query, val] = task; - const ea = EventAccumulatorFactory.waitsForCount(1); - query.on('value', snap => { - ea.addEvent(snap.val()); - }); - const [newVal] = await ea.promise; - expect(newVal).to.deep.equal(val); - })); + return Promise.all( + tasks.map(async task => { + const [query, val] = task; + const ea = EventAccumulatorFactory.waitsForCount(1); + query.on('value', snap => { + ea.addEvent(snap.val()); + }); + const [newVal] = await ea.promise; + expect(newVal).to.deep.equal(val); + }) + ); }); it('Ensure startAt / endAt with priority and name work with server data', async function() { - const node = (getRandomNode() as Reference); + const node = getRandomNode() as Reference; await node.set({ - a: {'.value': 1, '.priority': 1}, - b: {'.value': 2, '.priority': 1}, - c: {'.value': 3, '.priority': 2}, - d: {'.value': 4, '.priority': 2} + a: { '.value': 1, '.priority': 1 }, + b: { '.value': 2, '.priority': 1 }, + c: { '.value': 3, '.priority': 2 }, + d: { '.value': 4, '.priority': 2 } }); const tasks: TaskList = [ - [node.startAt(1, 'a').endAt(2, 'd'), {a: 1, b: 2, c: 3, d: 4}], - [node.startAt(1, 'b').endAt(2, 'c'), {b: 2, c: 3}], - [node.startAt(1, 'c').endAt(2), {c: 3, d: 4}], + [node.startAt(1, 'a').endAt(2, 'd'), { a: 1, b: 2, c: 3, d: 4 }], + [node.startAt(1, 'b').endAt(2, 'c'), { b: 2, c: 3 }], + [node.startAt(1, 'c').endAt(2), { c: 3, d: 4 }] ]; - return Promise.all(tasks.map(async task => { - const [query, val] = task; - const ea = EventAccumulatorFactory.waitsForCount(1); - query.on('value', snap => { - ea.addEvent(snap.val()); - }); - const [newVal] = await ea.promise; - expect(newVal).to.deep.equal(val); - })); + return Promise.all( + tasks.map(async task => { + const [query, val] = task; + const ea = EventAccumulatorFactory.waitsForCount(1); + query.on('value', snap => { + ea.addEvent(snap.val()); + }); + const [newVal] = await ea.promise; + expect(newVal).to.deep.equal(val); + }) + ); }); it('Ensure startAt / endAt with priority and name works (2).', function() { - const node = (getRandomNode() as Reference); + const node = getRandomNode() as Reference; const tasks: TaskList = [ - [node.startAt(1, 'c').endAt(2, 'b'), {a: 1, b: 2, c: 3, d: 4}], - [node.startAt(1, 'd').endAt(2, 'a'), {d: 4, a: 1}], - [node.startAt(1, 'e').endAt(2), {a: 1, b: 2}], + [node.startAt(1, 'c').endAt(2, 'b'), { a: 1, b: 2, c: 3, d: 4 }], + [node.startAt(1, 'd').endAt(2, 'a'), { d: 4, a: 1 }], + [node.startAt(1, 'e').endAt(2), { a: 1, b: 2 }] ]; node.set({ - c: {'.value': 3, '.priority': 1}, - d: {'.value': 4, '.priority': 1}, - a: {'.value': 1, '.priority': 2}, - b: {'.value': 2, '.priority': 2} - }); - - return Promise.all(tasks.map(async task => { - const [query, val] = task; - const ea = EventAccumulatorFactory.waitsForCount(1); - query.on('value', snap => { - ea.addEvent(snap.val()); - }); - const [newVal] = await ea.promise; - expect(newVal).to.deep.equal(val); - })); + c: { '.value': 3, '.priority': 1 }, + d: { '.value': 4, '.priority': 1 }, + a: { '.value': 1, '.priority': 2 }, + b: { '.value': 2, '.priority': 2 } + }); + + return Promise.all( + tasks.map(async task => { + const [query, val] = task; + const ea = EventAccumulatorFactory.waitsForCount(1); + query.on('value', snap => { + ea.addEvent(snap.val()); + }); + const [newVal] = await ea.promise; + expect(newVal).to.deep.equal(val); + }) + ); }); it('Ensure startAt / endAt with priority and name works (2). With server data', async function() { - const node = (getRandomNode() as Reference); - + const node = getRandomNode() as Reference; + await node.set({ - c: {'.value': 3, '.priority': 1}, - d: {'.value': 4, '.priority': 1}, - a: {'.value': 1, '.priority': 2}, - b: {'.value': 2, '.priority': 2} + c: { '.value': 3, '.priority': 1 }, + d: { '.value': 4, '.priority': 1 }, + a: { '.value': 1, '.priority': 2 }, + b: { '.value': 2, '.priority': 2 } }); const tasks: TaskList = [ - [node.startAt(1, 'c').endAt(2, 'b'), {a: 1, b: 2, c: 3, d: 4}], - [node.startAt(1, 'd').endAt(2, 'a'), {d: 4, a: 1}], - [node.startAt(1, 'e').endAt(2), {a: 1, b: 2}], + [node.startAt(1, 'c').endAt(2, 'b'), { a: 1, b: 2, c: 3, d: 4 }], + [node.startAt(1, 'd').endAt(2, 'a'), { d: 4, a: 1 }], + [node.startAt(1, 'e').endAt(2), { a: 1, b: 2 }] ]; - - return Promise.all(tasks.map(async task => { - const [query, val] = task; - const ea = EventAccumulatorFactory.waitsForCount(1); - query.on('value', snap => { - ea.addEvent(snap.val()); - }); - const [newVal] = await ea.promise; - expect(newVal).to.deep.equal(val); - })); + + return Promise.all( + tasks.map(async task => { + const [query, val] = task; + const ea = EventAccumulatorFactory.waitsForCount(1); + query.on('value', snap => { + ea.addEvent(snap.val()); + }); + const [newVal] = await ea.promise; + expect(newVal).to.deep.equal(val); + }) + ); }); it('Set a limit, add some nodes, ensure prevName works correctly.', function() { - const node = (getRandomNode() as Reference); + const node = getRandomNode() as Reference; let added = ''; node.limitToLast(2).on('child_added', function(snap, prevName) { @@ -1037,11 +1283,11 @@ describe('Query Tests', function() { }); it('Set a limit, add some nodes, ensure prevName works correctly. With server data', async function() { - const node = (getRandomNode() as Reference); + const node = getRandomNode() as Reference; let added = ''; await node.child('a').set(1); - + const ea = EventAccumulatorFactory.waitsForCount(1); node.limitToLast(2).on('child_added', function(snap, prevName) { added += snap.key + ' ' + prevName + ', '; @@ -1069,7 +1315,7 @@ describe('Query Tests', function() { }); it('Set a limit, move some nodes, ensure prevName works correctly.', function() { - const node = (getRandomNode() as Reference); + const node = getRandomNode() as Reference; let moved = ''; node.limitToLast(2).on('child_moved', function(snap, prevName) { moved += snap.key + ' ' + prevName + ', '; @@ -1093,7 +1339,7 @@ describe('Query Tests', function() { }); it('Set a limit, move some nodes, ensure prevName works correctly, with server data', async function() { - const node = (getRandomNode() as Reference); + const node = getRandomNode() as Reference; let moved = ''; node.child('a').setWithPriority('a', 10); @@ -1113,16 +1359,16 @@ describe('Query Tests', function() { moved = ''; await node.child('c').setPriority(35); - + expect(moved).to.equal('c null, '); moved = ''; await node.child('b').setPriority(33); - + expect(moved).to.equal(''); }); it('Numeric priorities: Set a limit, move some nodes, ensure prevName works correctly.', function() { - const node = (getRandomNode() as Reference); + const node = getRandomNode() as Reference; let moved = ''; node.limitToLast(2).on('child_moved', function(snap, prevName) { @@ -1139,7 +1385,7 @@ describe('Query Tests', function() { }); it('Numeric priorities: Set a limit, move some nodes, ensure prevName works correctly. With server data', async function() { - const node = (getRandomNode() as Reference); + const node = getRandomNode() as Reference; let moved = ''; node.child('a').setWithPriority('a', 1); @@ -1154,12 +1400,12 @@ describe('Query Tests', function() { await node.limitToLast(2).once('value'); await node.child('c').setPriority(10); - + expect(moved).to.equal('c d, '); }); it('Set a limit, add a bunch of nodes, ensure local events are correct.', function() { - const node = (getRandomNode() as Reference); + const node = getRandomNode() as Reference; node.set({}); let eventHistory = ''; @@ -1175,7 +1421,9 @@ describe('Query Tests', function() { n.set(i); } - expect(eventHistory).to.equal('0 added, 1 added, 0 removed, 2 added, 1 removed, 3 added, 2 removed, 4 added, '); + expect(eventHistory).to.equal( + '0 added, 1 added, 0 removed, 2 added, 1 removed, 3 added, 2 removed, 4 added, ' + ); }); it('Set a limit, add a bunch of nodes, ensure remote events are correct.', async function() { @@ -1186,7 +1434,7 @@ describe('Query Tests', function() { try { expect(eventHistory).to.equal('3 added, 4 added, '); return true; - } catch(err) { + } catch (err) { return false; } }); @@ -1216,21 +1464,21 @@ describe('Query Tests', function() { }); it('Ensure on() returns callback function.', function() { - const node = (getRandomNode() as Reference); - const callback = function() { }; + const node = getRandomNode() as Reference; + const callback = function() {}; const ret = node.on('value', callback); expect(ret).to.equal(callback); }); it("Limit on unsynced node fires 'value'.", function(done) { - const f = (getRandomNode() as Reference); + const f = getRandomNode() as Reference; f.limitToLast(1).on('value', function() { done(); }); }); it('Filtering to only null priorities works.', async function() { - const f = (getRandomNode() as Reference); + const f = getRandomNode() as Reference; const ea = EventAccumulatorFactory.waitsForCount(1); f.root.child('.info/connected').on('value', function(snap) { @@ -1240,11 +1488,11 @@ describe('Query Tests', function() { await ea.promise; f.set({ - a: {'.priority': null, '.value': 0}, - b: {'.priority': null, '.value': 1}, - c: {'.priority': '2', '.value': 2}, - d: {'.priority': 3, '.value': 3}, - e: {'.priority': 'hi', '.value': 4} + a: { '.priority': null, '.value': 0 }, + b: { '.priority': null, '.value': 1 }, + c: { '.priority': '2', '.value': 2 }, + d: { '.priority': 3, '.value': 3 }, + e: { '.priority': 'hi', '.value': 4 } }); const snapAcc = EventAccumulatorFactory.waitsForCount(1); @@ -1253,48 +1501,48 @@ describe('Query Tests', function() { }); const [val] = await snapAcc.promise; - expect(val).to.deep.equal({a: 0, b: 1}); + expect(val).to.deep.equal({ a: 0, b: 1 }); }); it('null priorities included in endAt(2).', async function() { - const f = (getRandomNode() as Reference); - + const f = getRandomNode() as Reference; + f.set({ - a: {'.priority': null, '.value': 0}, - b: {'.priority': null, '.value': 1}, - c: {'.priority': 2, '.value': 2}, - d: {'.priority': 3, '.value': 3}, - e: {'.priority': 'hi', '.value': 4} + a: { '.priority': null, '.value': 0 }, + b: { '.priority': null, '.value': 1 }, + c: { '.priority': 2, '.value': 2 }, + d: { '.priority': 3, '.value': 3 }, + e: { '.priority': 'hi', '.value': 4 } }); const ea = EventAccumulatorFactory.waitsForCount(1); f.endAt(2).on('value', snap => { ea.addEvent(snap.val()); }); - + const [val] = await ea.promise; - expect(val).to.deep.equal({a: 0, b: 1, c: 2}); + expect(val).to.deep.equal({ a: 0, b: 1, c: 2 }); }); it('null priorities not included in startAt(2).', async function() { - const f = (getRandomNode() as Reference); - + const f = getRandomNode() as Reference; + f.set({ - a: {'.priority': null, '.value': 0}, - b: {'.priority': null, '.value': 1}, - c: {'.priority': 2, '.value': 2}, - d: {'.priority': 3, '.value': 3}, - e: {'.priority': 'hi', '.value': 4} + a: { '.priority': null, '.value': 0 }, + b: { '.priority': null, '.value': 1 }, + c: { '.priority': 2, '.value': 2 }, + d: { '.priority': 3, '.value': 3 }, + e: { '.priority': 'hi', '.value': 4 } }); const ea = EventAccumulatorFactory.waitsForCount(1); - + f.startAt(2).on('value', snap => { ea.addEvent(snap.val()); }); const [val] = await ea.promise; - expect(val).to.deep.equal({c: 2, d: 3, e: 4}); + expect(val).to.deep.equal({ c: 2, d: 3, e: 4 }); }); function dumpListens(node: Query) { @@ -1310,14 +1558,15 @@ describe('Query Tests', function() { listenPaths.sort(); const dumpPieces = []; for (let i = 0; i < listenPaths.length; i++) { - const queryIds = []; for (let queryId in listens[listenPaths[i]]) { queryIds.push(queryId); } queryIds.sort(); if (queryIds.length > 0) { - dumpPieces.push(listenPaths[i].substring(nodePath.length) + ':' + queryIds.join(',')); + dumpPieces.push( + listenPaths[i].substring(nodePath.length) + ':' + queryIds.join(',') + ); } } @@ -1325,10 +1574,10 @@ describe('Query Tests', function() { } it('Dedupe listens: listen on parent.', function() { - const node = (getRandomNode() as Reference); + const node = getRandomNode() as Reference; expect(dumpListens(node)).to.equal(''); - const aOn = node.child('a').on('value', function() { }); + const aOn = node.child('a').on('value', function() {}); expect(dumpListens(node)).to.equal('/a:default'); const rootOn = node.on('value', function() {}); @@ -1342,12 +1591,12 @@ describe('Query Tests', function() { }); it('Dedupe listens: listen on grandchild.', function() { - const node = (getRandomNode() as Reference); + const node = getRandomNode() as Reference; const rootOn = node.on('value', function() {}); expect(dumpListens(node)).to.equal(':default'); - const aaOn = node.child('a/aa').on('value', function() { }); + const aaOn = node.child('a/aa').on('value', function() {}); expect(dumpListens(node)).to.equal(':default'); node.off('value', rootOn); @@ -1356,13 +1605,13 @@ describe('Query Tests', function() { }); it('Dedupe listens: listen on grandparent of two children.', function() { - const node = (getRandomNode() as Reference); + const node = getRandomNode() as Reference; expect(dumpListens(node)).to.equal(''); - const aaOn = node.child('a/aa').on('value', function() { }); + const aaOn = node.child('a/aa').on('value', function() {}); expect(dumpListens(node)).to.equal('/a/aa:default'); - const bbOn = node.child('a/bb').on('value', function() { }); + const bbOn = node.child('a/bb').on('value', function() {}); expect(dumpListens(node)).to.equal('/a/aa:default;/a/bb:default'); const rootOn = node.on('value', function() {}); @@ -1379,17 +1628,19 @@ describe('Query Tests', function() { }); it('Dedupe queried listens: multiple queried listens; no dupes', function() { - const node = (getRandomNode() as Reference); + const node = getRandomNode() as Reference; expect(dumpListens(node)).to.equal(''); - const aLim1On = node.child('a').limitToLast(1).on('value', function() { }); + const aLim1On = node.child('a').limitToLast(1).on('value', function() {}); expect(dumpListens(node)).to.equal('/a:{"l":1,"vf":"r"}'); - const rootLim1On = node.limitToLast(1).on('value', function() { }); + const rootLim1On = node.limitToLast(1).on('value', function() {}); expect(dumpListens(node)).to.equal(':{"l":1,"vf":"r"};/a:{"l":1,"vf":"r"}'); - const aLim5On = node.child('a').limitToLast(5).on('value', function() { }); - expect(dumpListens(node)).to.equal(':{"l":1,"vf":"r"};/a:{"l":1,"vf":"r"},{"l":5,"vf":"r"}'); + const aLim5On = node.child('a').limitToLast(5).on('value', function() {}); + expect(dumpListens(node)).to.equal( + ':{"l":1,"vf":"r"};/a:{"l":1,"vf":"r"},{"l":5,"vf":"r"}' + ); node.limitToLast(1).off('value', rootLim1On); expect(dumpListens(node)).to.equal('/a:{"l":1,"vf":"r"},{"l":5,"vf":"r"}'); @@ -1400,15 +1651,17 @@ describe('Query Tests', function() { }); it('Dedupe queried listens: listen on parent of queried children.', function() { - const node = (getRandomNode() as Reference); + const node = getRandomNode() as Reference; - const aLim1On = node.child('a').limitToLast(1).on('value', function() { }); + const aLim1On = node.child('a').limitToLast(1).on('value', function() {}); expect(dumpListens(node)).to.equal('/a:{"l":1,"vf":"r"}'); - const bLim1On = node.child('b').limitToLast(1).on('value', function() { }); - expect(dumpListens(node)).to.equal('/a:{"l":1,"vf":"r"};/b:{"l":1,"vf":"r"}'); + const bLim1On = node.child('b').limitToLast(1).on('value', function() {}); + expect(dumpListens(node)).to.equal( + '/a:{"l":1,"vf":"r"};/b:{"l":1,"vf":"r"}' + ); - const rootOn = node.on('value', function() { }); + const rootOn = node.on('value', function() {}); expect(dumpListens(node)).to.equal(':default'); // remove in slightly random order. @@ -1423,7 +1676,7 @@ describe('Query Tests', function() { }); it('Limit with mix of null and non-null priorities.', function() { - const node = (getRandomNode() as Reference); + const node = getRandomNode() as Reference; const children = []; node.limitToLast(5).on('child_added', function(childSnap) { @@ -1431,12 +1684,12 @@ describe('Query Tests', function() { }); node.set({ - 'Vikrum': {'.priority': 1000, 'score': 1000, 'name': 'Vikrum'}, - 'Mike': {'.priority': 500, 'score': 500, 'name': 'Mike'}, - 'Andrew': {'.priority': 50, 'score': 50, 'name': 'Andrew'}, - 'James': {'.priority': 7, 'score': 7, 'name': 'James'}, - 'Sally': {'.priority': -7, 'score': -7, 'name': 'Sally'}, - 'Fred': {'score': 0, 'name': 'Fred'} + Vikrum: { '.priority': 1000, score: 1000, name: 'Vikrum' }, + Mike: { '.priority': 500, score: 500, name: 'Mike' }, + Andrew: { '.priority': 50, score: 50, name: 'Andrew' }, + James: { '.priority': 7, score: 7, name: 'James' }, + Sally: { '.priority': -7, score: -7, name: 'Sally' }, + Fred: { score: 0, name: 'Fred' } }); expect(children.join(',')).to.equal('Sally,James,Andrew,Mike,Vikrum'); @@ -1447,12 +1700,12 @@ describe('Query Tests', function() { const children = []; await node.set({ - 'Vikrum': {'.priority': 1000, 'score': 1000, 'name': 'Vikrum'}, - 'Mike': {'.priority': 500, 'score': 500, 'name': 'Mike'}, - 'Andrew': {'.priority': 50, 'score': 50, 'name': 'Andrew'}, - 'James': {'.priority': 7, 'score': 7, 'name': 'James'}, - 'Sally': {'.priority': -7, 'score': -7, 'name': 'Sally'}, - 'Fred': {'score': 0, 'name': 'Fred'} + Vikrum: { '.priority': 1000, score: 1000, name: 'Vikrum' }, + Mike: { '.priority': 500, score: 500, name: 'Mike' }, + Andrew: { '.priority': 50, score: 50, name: 'Andrew' }, + James: { '.priority': 7, score: 7, name: 'James' }, + Sally: { '.priority': -7, score: -7, name: 'Sally' }, + Fred: { score: 0, name: 'Fred' } }); const ea = EventAccumulatorFactory.waitsForCount(5); @@ -1467,9 +1720,11 @@ describe('Query Tests', function() { }); it('.on() with a context works.', function() { - const ref = (getRandomNode() as Reference); + const ref = getRandomNode() as Reference; - const ListenerDoohickey = function() { this.snap = null; }; + const ListenerDoohickey = function() { + this.snap = null; + }; ListenerDoohickey.prototype.onEvent = function(snap) { this.snap = snap; }; @@ -1488,9 +1743,11 @@ describe('Query Tests', function() { }); it('.once() with a context works.', function() { - const ref = (getRandomNode() as Reference); + const ref = getRandomNode() as Reference; - const ListenerDoohickey = function() { this.snap = null; }; + const ListenerDoohickey = function() { + this.snap = null; + }; ListenerDoohickey.prototype.onEvent = function(snap) { this.snap = snap; }; @@ -1507,7 +1764,7 @@ describe('Query Tests', function() { }); it('handles an update that deletes the entire window in a query', function() { - const ref = (getRandomNode() as Reference); + const ref = getRandomNode() as Reference; const snaps = []; ref.limitToLast(2).on('value', function(snap) { @@ -1515,9 +1772,9 @@ describe('Query Tests', function() { }); ref.set({ - a: {'.value': 1, '.priority': 1}, - b: {'.value': 2, '.priority': 2}, - c: {'.value': 3, '.priority': 3} + a: { '.value': 1, '.priority': 1 }, + b: { '.value': 2, '.priority': 2 }, + c: { '.value': 3, '.priority': 3 } }); ref.update({ b: null, @@ -1525,13 +1782,13 @@ describe('Query Tests', function() { }); expect(snaps.length).to.equal(2); - expect(snaps[0]).to.deep.equal({b: 2, c: 3}); + expect(snaps[0]).to.deep.equal({ b: 2, c: 3 }); // The original set is still outstanding (synchronous API), so we have a full cache to re-window against - expect(snaps[1]).to.deep.equal({a: 1}); + expect(snaps[1]).to.deep.equal({ a: 1 }); }); it('handles an out-of-view query on a child', function() { - const ref = (getRandomNode() as Reference); + const ref = getRandomNode() as Reference; let parent = null; ref.limitToLast(1).on('value', function(snap) { @@ -1543,17 +1800,17 @@ describe('Query Tests', function() { child = snap.val(); }); - ref.set({a: 1, b: 2}); - expect(parent).to.deep.equal({b: 2}); + ref.set({ a: 1, b: 2 }); + expect(parent).to.deep.equal({ b: 2 }); expect(child).to.equal(1); - ref.update({c: 3}); - expect(parent).to.deep.equal({c: 3}); + ref.update({ c: 3 }); + expect(parent).to.deep.equal({ c: 3 }); expect(child).to.equal(1); }); it('handles a child query going out of view of the parent', function() { - const ref = (getRandomNode() as Reference); + const ref = getRandomNode() as Reference; let parent = null; ref.limitToLast(1).on('value', function(snap) { @@ -1565,19 +1822,19 @@ describe('Query Tests', function() { child = snap.val(); }); - ref.set({a: 1}); - expect(parent).to.deep.equal({a: 1}); + ref.set({ a: 1 }); + expect(parent).to.deep.equal({ a: 1 }); expect(child).to.equal(1); ref.child('b').set(2); - expect(parent).to.deep.equal({b: 2}); + expect(parent).to.deep.equal({ b: 2 }); expect(child).to.equal(1); ref.child('b').remove(); - expect(parent).to.deep.equal({a: 1}); + expect(parent).to.deep.equal({ a: 1 }); expect(child).to.equal(1); }); it('handles diverging views', function() { - const ref = (getRandomNode() as Reference); + const ref = getRandomNode() as Reference; let c = null; ref.limitToLast(1).endAt(null, 'c').on('value', function(snap) { @@ -1589,16 +1846,16 @@ describe('Query Tests', function() { d = snap.val(); }); - ref.set({a: 1, b: 2, c: 3}); - expect(c).to.deep.equal({c: 3}); - expect(d).to.deep.equal({c: 3}); + ref.set({ a: 1, b: 2, c: 3 }); + expect(c).to.deep.equal({ c: 3 }); + expect(d).to.deep.equal({ c: 3 }); ref.child('d').set(4); - expect(c).to.deep.equal({c: 3}); - expect(d).to.deep.equal({d: 4}); + expect(c).to.deep.equal({ c: 3 }); + expect(d).to.deep.equal({ d: 4 }); }); it('handles removing a queried element', async function() { - const ref = (getRandomNode() as Reference); + const ref = getRandomNode() as Reference; let val; const ea = EventAccumulatorFactory.waitsForCount(1); @@ -1607,7 +1864,7 @@ describe('Query Tests', function() { ea.addEvent(); }); - ref.set({a: 1, b: 2}); + ref.set({ a: 1, b: 2 }); expect(val).to.equal(2); ref.child('b').remove(); @@ -1618,9 +1875,9 @@ describe('Query Tests', function() { }); it('.startAt().limitToFirst(1) works.', function(done) { - const ref = (getRandomNode() as Reference); - ref.set({a: 1, b: 2}); - + const ref = getRandomNode() as Reference; + ref.set({ a: 1, b: 2 }); + let val; ref.startAt().limitToFirst(1).on('child_added', function(snap) { val = snap.val(); @@ -1631,8 +1888,8 @@ describe('Query Tests', function() { }); it('.startAt().limitToFirst(1) and then remove first child (case 1664).', async function() { - const ref = (getRandomNode() as Reference); - ref.set({a: 1, b: 2}); + const ref = getRandomNode() as Reference; + ref.set({ a: 1, b: 2 }); const ea = EventAccumulatorFactory.waitsForCount(1); let val; @@ -1651,24 +1908,28 @@ describe('Query Tests', function() { expect(val).to.equal(2); }); - it('.startAt() with two arguments works properly (case 1169).', function(done) { - const ref = (getRandomNode() as Reference); - const data = { - 'Walker': { - name: 'Walker', - score: 20, - '.priority': 20 - }, - 'Michael': { - name: 'Michael', - score: 100, - '.priority': 100 - } + it('.startAt() with two arguments works properly (case 1169).', function( + done + ) { + const ref = getRandomNode() as Reference; + const data = { + Walker: { + name: 'Walker', + score: 20, + '.priority': 20 + }, + Michael: { + name: 'Michael', + score: 100, + '.priority': 100 + } }; ref.set(data, function() { ref.startAt(20, 'Walker').limitToFirst(2).on('value', function(s) { const childNames = []; - s.forEach(function(node) { childNames.push(node.key); }); + s.forEach(function(node) { + childNames.push(node.key); + }); expect(childNames).to.deep.equal(['Walker', 'Michael']); done(); }); @@ -1676,7 +1937,7 @@ describe('Query Tests', function() { }); it('handles multiple queries on the same node', async function() { - const ref = (getRandomNode() as Reference); + const ref = getRandomNode() as Reference; await ref.set({ a: 1, @@ -1703,11 +1964,11 @@ describe('Query Tests', function() { await ref.limitToLast(1).once('value'); const snap = await ref.limitToLast(1).once('value'); const val = snap.val(); - expect(val).to.deep.equal({f: 6}); + expect(val).to.deep.equal({ f: 6 }); }); it('handles once called on a node with a default listener', async function() { - const ref = (getRandomNode() as Reference); + const ref = getRandomNode() as Reference; await ref.set({ a: 1, @@ -1732,7 +1993,6 @@ describe('Query Tests', function() { expect(val).to.equal(6); }); - it('handles once called on a node with a default listener and non-complete limit', async function() { const ref = getRandomNode(); @@ -1753,65 +2013,99 @@ describe('Query Tests', function() { // now do the once call const snap = await ref.limitToLast(5).once('value'); const val = snap.val(); - expect(val).to.deep.equal({a: 1, b: 2, c: 3}); + expect(val).to.deep.equal({ a: 1, b: 2, c: 3 }); }); it('Remote remove triggers events.', function(done) { - const refPair = getRandomNode(2), writeRef = refPair[0], readRef = refPair[1]; + const refPair = getRandomNode(2), + writeRef = refPair[0], + readRef = refPair[1]; writeRef.set({ a: 'a', b: 'b', c: 'c', d: 'd', e: 'e' }, function() { - // Wait to get the initial data, and then remove 'c' remotely and wait for new data. let count = 0; readRef.limitToLast(5).on('value', function(s) { count++; if (count == 1) { - expect(s.val()).to.deep.equal({a: 'a', b: 'b', c: 'c', d: 'd', e: 'e' }); + expect(s.val()).to.deep.equal({ + a: 'a', + b: 'b', + c: 'c', + d: 'd', + e: 'e' + }); writeRef.child('c').remove(); } else { expect(count).to.equal(2); - expect(s.val()).to.deep.equal({a: 'a', b: 'b', d: 'd', e: 'e' }); + expect(s.val()).to.deep.equal({ a: 'a', b: 'b', d: 'd', e: 'e' }); done(); } }); }); }); - it(".endAt(null, 'f').limitToLast(5) returns the right set of children.", function(done) { - const ref = (getRandomNode() as Reference); - ref.set({ a: 'a', b: 'b', c: 'c', d: 'd', e: 'e', f: 'f', g: 'g', h: 'h' }, function() { - ref.endAt(null, 'f').limitToLast(5).on('value', function(s) { - expect(s.val()).to.deep.equal({b: 'b', c: 'c', d: 'd', e: 'e', f: 'f' }); - done(); - }); - }); + it(".endAt(null, 'f').limitToLast(5) returns the right set of children.", function( + done + ) { + const ref = getRandomNode() as Reference; + ref.set( + { a: 'a', b: 'b', c: 'c', d: 'd', e: 'e', f: 'f', g: 'g', h: 'h' }, + function() { + ref.endAt(null, 'f').limitToLast(5).on('value', function(s) { + expect(s.val()).to.deep.equal({ + b: 'b', + c: 'c', + d: 'd', + e: 'e', + f: 'f' + }); + done(); + }); + } + ); }); - it('complex update() at query root raises correct value event', function(done) { + it('complex update() at query root raises correct value event', function( + done + ) { const nodePair = getRandomNode(2); const writer = nodePair[0]; const reader = nodePair[1]; - let readerLoaded = false, numEventsReceived = 0; - writer.child('foo').set({a: 1, b: 2, c: 3, d: 4, e: 5}, function(error, dummy) { - reader.child('foo').startAt().limitToFirst(4).on('value', function(snapshot) { - const val = snapshot.val(); - if (!readerLoaded) { - readerLoaded = true; - expect(val).to.deep.equal({a: 1, b: 2, c: 3, d: 4}); - - // This update causes the following to happen: - // 1. An in-view child is set to null (b) - // 2. An in-view child has its value changed (c) - // 3. An in-view child is changed and bumped out-of-view (d) - // We expect to get null values for b and d, along with the new children and updated value for c - writer.child('foo').update({b: null, c: 'a', cc: 'new', cd: 'new2', d: 'gone'}); - } else { - done(); - expect(val).to.deep.equal({a: 1, c: 'a', cc: 'new', cd: 'new2'}); - } + let readerLoaded = false, + numEventsReceived = 0; + writer + .child('foo') + .set({ a: 1, b: 2, c: 3, d: 4, e: 5 }, function(error, dummy) { + reader + .child('foo') + .startAt() + .limitToFirst(4) + .on('value', function(snapshot) { + const val = snapshot.val(); + if (!readerLoaded) { + readerLoaded = true; + expect(val).to.deep.equal({ a: 1, b: 2, c: 3, d: 4 }); + + // This update causes the following to happen: + // 1. An in-view child is set to null (b) + // 2. An in-view child has its value changed (c) + // 3. An in-view child is changed and bumped out-of-view (d) + // We expect to get null values for b and d, along with the new children and updated value for c + writer + .child('foo') + .update({ b: null, c: 'a', cc: 'new', cd: 'new2', d: 'gone' }); + } else { + done(); + expect(val).to.deep.equal({ + a: 1, + c: 'a', + cc: 'new', + cd: 'new2' + }); + } + }); }); - }); }); it('update() at query root raises correct value event', function(done) { @@ -1819,25 +2113,28 @@ describe('Query Tests', function() { const writer = nodePair[0]; const reader = nodePair[1]; - let readerLoaded = false, numEventsReceived = 0; - writer.child('foo').set({ 'bar': 'a', 'baz': 'b', 'bam': 'c' }, function(error, dummy) { - reader.child('foo').limitToLast(10).on('value', function(snapshot) { - const val = snapshot.val(); - if (!readerLoaded) { - readerLoaded = true; - expect(val.bar).to.equal('a'); - expect(val.baz).to.equal('b'); - expect(val.bam).to.equal('c'); - writer.child('foo').update({ 'bar': 'd', 'bam': null, 'bat': 'e' }); - } else { - expect(val.bar).to.equal('d'); - expect(val.baz).to.equal('b'); - expect(val.bat).to.equal('e'); - expect(val.bam).to.equal(undefined); - done(); - } + let readerLoaded = false, + numEventsReceived = 0; + writer + .child('foo') + .set({ bar: 'a', baz: 'b', bam: 'c' }, function(error, dummy) { + reader.child('foo').limitToLast(10).on('value', function(snapshot) { + const val = snapshot.val(); + if (!readerLoaded) { + readerLoaded = true; + expect(val.bar).to.equal('a'); + expect(val.baz).to.equal('b'); + expect(val.bam).to.equal('c'); + writer.child('foo').update({ bar: 'd', bam: null, bat: 'e' }); + } else { + expect(val.bar).to.equal('d'); + expect(val.baz).to.equal('b'); + expect(val.bat).to.equal('e'); + expect(val.bam).to.equal(undefined); + done(); + } + }); }); - }); }); it('set() at query root raises correct value event', function(done) { @@ -1845,72 +2142,90 @@ describe('Query Tests', function() { const writer = nodePair[0]; const reader = nodePair[1]; - let readerLoaded = false, numEventsReceived = 0; - writer.child('foo').set({ 'bar': 'a', 'baz': 'b', 'bam': 'c' }, function(error, dummy) { - reader.child('foo').limitToLast(10).on('value', function(snapshot) { - const val = snapshot.val(); - if (!readerLoaded) { - readerLoaded = true; - expect(val.bar).to.equal('a'); - expect(val.baz).to.equal('b'); - expect(val.bam).to.equal('c'); - writer.child('foo').set({ 'bar': 'd', 'baz': 'b', 'bat': 'e' }); - } else { - expect(val.bar).to.equal('d'); - expect(val.baz).to.equal('b'); - expect(val.bat).to.equal('e'); - expect(val.bam).to.equal(undefined); - done(); - } + let readerLoaded = false, + numEventsReceived = 0; + writer + .child('foo') + .set({ bar: 'a', baz: 'b', bam: 'c' }, function(error, dummy) { + reader.child('foo').limitToLast(10).on('value', function(snapshot) { + const val = snapshot.val(); + if (!readerLoaded) { + readerLoaded = true; + expect(val.bar).to.equal('a'); + expect(val.baz).to.equal('b'); + expect(val.bam).to.equal('c'); + writer.child('foo').set({ bar: 'd', baz: 'b', bat: 'e' }); + } else { + expect(val.bar).to.equal('d'); + expect(val.baz).to.equal('b'); + expect(val.bat).to.equal('e'); + expect(val.bam).to.equal(undefined); + done(); + } + }); }); - }); }); - - it('listen for child_added events with limit and different types fires properly', function(done) { + it('listen for child_added events with limit and different types fires properly', function( + done + ) { const nodePair = getRandomNode(2); const writer = nodePair[0]; const reader = nodePair[1]; - let numEventsReceived = 0, gotA = false, gotB = false, gotC = false; + let numEventsReceived = 0, + gotA = false, + gotB = false, + gotC = false; writer.child('a').set(1, function(error, dummy) { writer.child('b').set('b', function(error, dummy) { - writer.child('c').set({ 'deep': 'path', 'of': { 'stuff': true }}, function(error, dummy) { - reader.limitToLast(3).on('child_added', function(snap) { - const val = snap.val(); - switch (snap.key) { - case 'a': - gotA = true; - expect(val).to.equal(1); - break; - case 'b': - gotB = true; - expect(val).to.equal('b'); - break; - case 'c': - gotC = true; - expect(val.deep).to.equal('path'); - expect(val.of.stuff).to.be.true; - break; - default: - expect(false).to.be.true; - } - numEventsReceived += 1; - expect(numEventsReceived).to.be.lessThan(4); - if (gotA && gotB && gotC) done(); + writer + .child('c') + .set({ deep: 'path', of: { stuff: true } }, function(error, dummy) { + reader.limitToLast(3).on('child_added', function(snap) { + const val = snap.val(); + switch (snap.key) { + case 'a': + gotA = true; + expect(val).to.equal(1); + break; + case 'b': + gotB = true; + expect(val).to.equal('b'); + break; + case 'c': + gotC = true; + expect(val.deep).to.equal('path'); + expect(val.of.stuff).to.be.true; + break; + default: + expect(false).to.be.true; + } + numEventsReceived += 1; + expect(numEventsReceived).to.be.lessThan(4); + if (gotA && gotB && gotC) done(); + }); }); - }); }); }); }); - it('listen for child_changed events with limit and different types fires properly', function(done) { + it('listen for child_changed events with limit and different types fires properly', function( + done + ) { const nodePair = getRandomNode(2); const writer = nodePair[0]; const reader = nodePair[1]; - let numEventsReceived = 0, gotA = false, gotB = false, gotC = false, readerLoaded = false; - writer.set({ a: 'something', b: "we'll", c: 'overwrite '}, function(error, dummy) { + let numEventsReceived = 0, + gotA = false, + gotB = false, + gotC = false, + readerLoaded = false; + writer.set({ a: 'something', b: "we'll", c: 'overwrite ' }, function( + error, + dummy + ) { reader.limitToLast(3).on('value', function(snapshot) { if (!readerLoaded) { readerLoaded = true; @@ -1942,55 +2257,64 @@ describe('Query Tests', function() { // Begin changing every key writer.child('a').set(1); writer.child('b').set('b'); - writer.child('c').set({ 'deep': 'path', 'of': { 'stuff': true }}); + writer.child('c').set({ deep: 'path', of: { stuff: true } }); } }); }); }); - it('listen for child_remove events with limit and different types fires properly', function(done) { + it('listen for child_remove events with limit and different types fires properly', function( + done + ) { const nodePair = getRandomNode(2); const writer = nodePair[0]; const reader = nodePair[1]; - let numEventsReceived = 0, gotA = false, gotB = false, gotC = false, readerLoaded = false; - writer.set({ a: 1, b: 'b', c: { 'deep': 'path', 'of': { 'stuff': true }} }, function(error, dummy) { - reader.limitToLast(3).on('value', function(snapshot) { - if (!readerLoaded) { - readerLoaded = true; - - // Set up listener for upcoming change events - reader.limitToLast(3).on('child_removed', function(snap) { - const val = snap.val(); - switch (snap.key) { - case 'a': - gotA = true; - expect(val).to.equal(1); - break; - case 'b': - gotB = true; - expect(val).to.equal('b'); - break; - case 'c': - gotC = true; - expect(val.deep).to.equal('path'); - expect(val.of.stuff).to.be.true; - break; - default: - expect(false).to.be.true; - } - numEventsReceived += 1; - expect(numEventsReceived).to.be.lessThan(4); - if (gotA && gotB && gotC) done(); - }); + let numEventsReceived = 0, + gotA = false, + gotB = false, + gotC = false, + readerLoaded = false; + writer.set( + { a: 1, b: 'b', c: { deep: 'path', of: { stuff: true } } }, + function(error, dummy) { + reader.limitToLast(3).on('value', function(snapshot) { + if (!readerLoaded) { + readerLoaded = true; + + // Set up listener for upcoming change events + reader.limitToLast(3).on('child_removed', function(snap) { + const val = snap.val(); + switch (snap.key) { + case 'a': + gotA = true; + expect(val).to.equal(1); + break; + case 'b': + gotB = true; + expect(val).to.equal('b'); + break; + case 'c': + gotC = true; + expect(val.deep).to.equal('path'); + expect(val.of.stuff).to.be.true; + break; + default: + expect(false).to.be.true; + } + numEventsReceived += 1; + expect(numEventsReceived).to.be.lessThan(4); + if (gotA && gotB && gotC) done(); + }); - // Begin removing every key - writer.child('a').remove(); - writer.child('b').remove(); - writer.child('c').remove(); - } - }); - }); + // Begin removing every key + writer.child('a').remove(); + writer.child('b').remove(); + writer.child('c').remove(); + } + }); + } + ); }); it('listen for child_remove events when parent removed', function(done) { @@ -1998,97 +2322,110 @@ describe('Query Tests', function() { const writer = nodePair[0]; const reader = nodePair[1]; - let numEventsReceived = 0, gotA = false, gotB = false, gotC = false, readerLoaded = false; - writer.set({ a: 1, b: 'b', c: { 'deep': 'path', 'of': { 'stuff': true }} }, function(error, dummy) { - - reader.limitToLast(3).on('value', function(snapshot) { - if (!readerLoaded) { - readerLoaded = true; - - // Set up listener for upcoming change events - reader.limitToLast(3).on('child_removed', function(snap) { - const val = snap.val(); - switch (snap.key) { - case 'a': - gotA = true; - expect(val).to.equal(1); - break; - case 'b': - gotB = true; - expect(val).to.equal('b'); - break; - case 'c': - gotC = true; - expect(val.deep).to.equal('path'); - expect(val.of.stuff).to.be.true; - break; - default: - expect(false).to.be.true; - } - numEventsReceived += 1; - expect(numEventsReceived).to.be.lessThan(4); - if (gotA && gotB && gotC) done(); - }); + let numEventsReceived = 0, + gotA = false, + gotB = false, + gotC = false, + readerLoaded = false; + writer.set( + { a: 1, b: 'b', c: { deep: 'path', of: { stuff: true } } }, + function(error, dummy) { + reader.limitToLast(3).on('value', function(snapshot) { + if (!readerLoaded) { + readerLoaded = true; + + // Set up listener for upcoming change events + reader.limitToLast(3).on('child_removed', function(snap) { + const val = snap.val(); + switch (snap.key) { + case 'a': + gotA = true; + expect(val).to.equal(1); + break; + case 'b': + gotB = true; + expect(val).to.equal('b'); + break; + case 'c': + gotC = true; + expect(val.deep).to.equal('path'); + expect(val.of.stuff).to.be.true; + break; + default: + expect(false).to.be.true; + } + numEventsReceived += 1; + expect(numEventsReceived).to.be.lessThan(4); + if (gotA && gotB && gotC) done(); + }); - // Remove the query parent - writer.remove(); - } - }); - }); + // Remove the query parent + writer.remove(); + } + }); + } + ); }); - it('listen for child_remove events when parent set to scalar', function(done) { + it('listen for child_remove events when parent set to scalar', function( + done + ) { const nodePair = getRandomNode(2); const writer = nodePair[0]; const reader = nodePair[1]; - let numEventsReceived = 0, gotA = false, gotB = false, gotC = false, readerLoaded = false; - writer.set({ a: 1, b: 'b', c: { 'deep': 'path', 'of': { 'stuff': true }} }, function(error, dummy) { - - reader.limitToLast(3).on('value', function(snapshot) { - if (!readerLoaded) { - readerLoaded = true; - - // Set up listener for upcoming change events - reader.limitToLast(3).on('child_removed', function(snap) { - const val = snap.val(); - switch (snap.key) { - case 'a': - gotA = true; - expect(val).to.equal(1); - break; - case 'b': - gotB = true; - expect(val).to.equal('b'); - break; - case 'c': - gotC = true; - expect(val.deep).to.equal('path'); - expect(val.of.stuff).to.be.true; - break; - default: - expect(false).to.be.true; - } - numEventsReceived += 1; - expect(numEventsReceived).to.be.lessThan(4); - if (gotA && gotB && gotC) done(); - }); + let numEventsReceived = 0, + gotA = false, + gotB = false, + gotC = false, + readerLoaded = false; + writer.set( + { a: 1, b: 'b', c: { deep: 'path', of: { stuff: true } } }, + function(error, dummy) { + reader.limitToLast(3).on('value', function(snapshot) { + if (!readerLoaded) { + readerLoaded = true; + + // Set up listener for upcoming change events + reader.limitToLast(3).on('child_removed', function(snap) { + const val = snap.val(); + switch (snap.key) { + case 'a': + gotA = true; + expect(val).to.equal(1); + break; + case 'b': + gotB = true; + expect(val).to.equal('b'); + break; + case 'c': + gotC = true; + expect(val.deep).to.equal('path'); + expect(val.of.stuff).to.be.true; + break; + default: + expect(false).to.be.true; + } + numEventsReceived += 1; + expect(numEventsReceived).to.be.lessThan(4); + if (gotA && gotB && gotC) done(); + }); - // Set the parent to a scalar - writer.set('scalar'); - } - }); - }); + // Set the parent to a scalar + writer.set('scalar'); + } + }); + } + ); }); - it('Queries behave wrong after .once().', async function() { const refPair = getRandomNode(2), - writeRef = refPair[0], - readRef = refPair[1]; + writeRef = refPair[0], + readRef = refPair[1]; let startAtCount, defaultCount; - await writeRef.set({a: 1, b: 2, c: 3, d: 4 }); + await writeRef.set({ a: 1, b: 2, c: 3, d: 4 }); await readRef.once('value'); @@ -2115,12 +2452,23 @@ describe('Query Tests', function() { }); it('Case 2003: Correctly get events for startAt/endAt queries when priority changes.', function() { - const ref = (getRandomNode() as Reference); - const addedFirst = [], removedFirst = [], addedSecond = [], removedSecond = []; - ref.startAt(0).endAt(10).on('child_added', function(snap) { addedFirst.push(snap.key); }); - ref.startAt(0).endAt(10).on('child_removed', function(snap) { removedFirst.push(snap.key); }); - ref.startAt(10).endAt(20).on('child_added', function(snap) { addedSecond.push(snap.key); }); - ref.startAt(10).endAt(20).on('child_removed', function(snap) { removedSecond.push(snap.key); }); + const ref = getRandomNode() as Reference; + const addedFirst = [], + removedFirst = [], + addedSecond = [], + removedSecond = []; + ref.startAt(0).endAt(10).on('child_added', function(snap) { + addedFirst.push(snap.key); + }); + ref.startAt(0).endAt(10).on('child_removed', function(snap) { + removedFirst.push(snap.key); + }); + ref.startAt(10).endAt(20).on('child_added', function(snap) { + addedSecond.push(snap.key); + }); + ref.startAt(10).endAt(20).on('child_removed', function(snap) { + removedSecond.push(snap.key); + }); ref.child('a').setWithPriority('a', 5); expect(addedFirst).to.deep.equal(['a']); @@ -2141,7 +2489,7 @@ describe('Query Tests', function() { const reader = refs[1]; await writer.set({ - a: {b: 1, c: 2}, + a: { b: 1, c: 2 }, e: 3 }); @@ -2165,9 +2513,9 @@ describe('Query Tests', function() { const val = snap.val(); count++; if (count == 1) { - expect(val).to.deep.equal({a: {b: 1, c: 2}, e: 3}); + expect(val).to.deep.equal({ a: { b: 1, c: 2 }, e: 3 }); } else if (count == 2) { - expect(val).to.deep.equal({d: 4, e: 3}); + expect(val).to.deep.equal({ d: 4, e: 3 }); } }); @@ -2180,7 +2528,9 @@ describe('Query Tests', function() { }); it('Priority-only updates are processed correctly by server.', async function() { - const refPair = (getRandomNode(2) as Reference[]), readRef = refPair[0], writeRef = refPair[1]; + const refPair = getRandomNode(2) as Reference[], + readRef = refPair[0], + writeRef = refPair[1]; const ea = EventAccumulatorFactory.waitsForCount(1); let readVal; @@ -2190,10 +2540,10 @@ describe('Query Tests', function() { ea.addEvent(); } }); - writeRef.set({ - a: { '.priority': 10, '.value': 1}, - b: { '.priority': 20, '.value': 2}, - c: { '.priority': 30, '.value': 3} + writeRef.set({ + a: { '.priority': 10, '.value': 1 }, + b: { '.priority': 20, '.value': 2 }, + c: { '.priority': 30, '.value': 3 } }); await ea.promise; @@ -2207,7 +2557,9 @@ describe('Query Tests', function() { }); it('Server: Test re-listen', function(done) { - const refPair = (getRandomNode(2) as Reference[]), ref = refPair[0], ref2 = refPair[1]; + const refPair = getRandomNode(2) as Reference[], + ref = refPair[0], + ref2 = refPair[1]; ref.set({ a: 'a', b: 'b', @@ -2232,7 +2584,9 @@ describe('Query Tests', function() { }); it('Server: Test re-listen 2', function(done) { - const refPair = getRandomNode(2), ref = refPair[0], ref2 = refPair[1]; + const refPair = getRandomNode(2), + ref = refPair[0], + ref2 = refPair[1]; ref.set({ a: 'a', b: 'b', @@ -2248,16 +2602,20 @@ describe('Query Tests', function() { before = b.val(); }); - ref.child('aa').update({ 'a': 5, 'aa': 4, 'b': 7, 'c': 4, 'd': 4, 'dd': 3 }, function() { - ref2.startAt(null, 'b').limitToFirst(3).on('value', function(b) { - expect(b.val()).to.deep.equal(before); - done(); + ref + .child('aa') + .update({ a: 5, aa: 4, b: 7, c: 4, d: 4, dd: 3 }, function() { + ref2.startAt(null, 'b').limitToFirst(3).on('value', function(b) { + expect(b.val()).to.deep.equal(before); + done(); + }); }); - }); }); it('Server: Test re-listen 3', function(done) { - const refPair = getRandomNode(2), ref = refPair[0], ref2 = refPair[1]; + const refPair = getRandomNode(2), + ref = refPair[0], + ref2 = refPair[1]; ref.set({ a: 'a', b: 'b', @@ -2283,19 +2641,19 @@ describe('Query Tests', function() { it('Server limit below limit works properly.', async function() { const refPair = getRandomNode(2), - readRef = refPair[0], - writeRef = refPair[1]; + readRef = refPair[0], + writeRef = refPair[1]; let childData; await writeRef.set({ a: { - aa: {'.priority': 1, '.value': 1 }, - ab: {'.priority': 1, '.value': 1 } - } + aa: { '.priority': 1, '.value': 1 }, + ab: { '.priority': 1, '.value': 1 } + } }); readRef.limitToLast(1).on('value', function(s) { - expect(s.val()).to.deep.equal({a: { aa: 1, ab: 1}}); + expect(s.val()).to.deep.equal({ a: { aa: 1, ab: 1 } }); }); const ea = EventAccumulatorFactory.waitsForCount(1); @@ -2315,15 +2673,19 @@ describe('Query Tests', function() { await ea.promise; - expect(childData).to.deep.equal({ aa: 1 }); + expect(childData).to.deep.equal({ aa: 1 }); }); it('Server: Setting grandchild of item in limit works.', async function() { - const refPair = getRandomNode(2), ref = refPair[0], ref2 = refPair[1]; + const refPair = getRandomNode(2), + ref = refPair[0], + ref2 = refPair[1]; - ref.set({ a: { - name: 'Mike' - }}); + ref.set({ + a: { + name: 'Mike' + } + }); const ea = EventAccumulatorFactory.waitsForCount(1); const snaps = []; @@ -2336,21 +2698,28 @@ describe('Query Tests', function() { }); await ea.promise; - expect(snaps).to.deep.equal( [{ a: { name: 'Mike' } }]); + expect(snaps).to.deep.equal([{ a: { name: 'Mike' } }]); ea.reset(); ref.child('a/name').set('Fred'); await ea.promise; - expect(snaps).to.deep.equal([{ a: { name: 'Mike' } }, { a: { name: 'Fred' } }]); + expect(snaps).to.deep.equal([ + { a: { name: 'Mike' } }, + { a: { name: 'Fred' } } + ]); }); it('Server: Updating grandchildren of item in limit works.', async function() { - const refPair = getRandomNode(2), ref = refPair[0], ref2 = refPair[1]; + const refPair = getRandomNode(2), + ref = refPair[0], + ref2 = refPair[1]; - ref.set({ a: { - name: 'Mike' - }}); + ref.set({ + a: { + name: 'Mike' + } + }); const ea = EventAccumulatorFactory.waitsForCount(1); const snaps = []; @@ -2373,11 +2742,16 @@ describe('Query Tests', function() { ref.child('a').update({ name: null, Name: 'Fred' }); await ea.promise; - expect(snaps).to.deep.equal([{ a: { name: 'Mike' } }, { a: { Name: 'Fred' } }]); + expect(snaps).to.deep.equal([ + { a: { name: 'Mike' } }, + { a: { Name: 'Fred' } } + ]); }); it('Server: New child at end of limit shows up.', async function() { - const refPair = getRandomNode(2), ref = refPair[0], ref2 = refPair[1]; + const refPair = getRandomNode(2), + ref = refPair[0], + ref2 = refPair[1]; const ea = EventAccumulatorFactory.waitsForCount(1); let snap; @@ -2401,7 +2775,9 @@ describe('Query Tests', function() { }); it('Server: Priority-only updates are processed correctly by server (1).', async function() { - const refPair = getRandomNode(2), readRef = refPair[0], writeRef = refPair[1]; + const refPair = getRandomNode(2), + readRef = refPair[0], + writeRef = refPair[1]; const ea = EventAccumulatorFactory.waitsForCount(1); let readVal; @@ -2411,15 +2787,15 @@ describe('Query Tests', function() { ea.addEvent(); } }); - writeRef.set({ - a: { '.priority': 10, '.value': 1}, - b: { '.priority': 20, '.value': 2}, - c: { '.priority': 30, '.value': 3} + writeRef.set({ + a: { '.priority': 10, '.value': 1 }, + b: { '.priority': 20, '.value': 2 }, + c: { '.priority': 30, '.value': 3 } }); await ea.promise; expect(readVal).to.deep.equal({ b: 2, c: 3 }); - + ea.reset(); writeRef.child('a').setPriority(25); @@ -2429,7 +2805,9 @@ describe('Query Tests', function() { // Same as above but with an endAt() so we hit CompoundQueryView instead of SimpleLimitView. it('Server: Priority-only updates are processed correctly by server (2).', async function() { - const refPair = getRandomNode(2), readRef = refPair[0], writeRef = refPair[1]; + const refPair = getRandomNode(2), + readRef = refPair[0], + writeRef = refPair[1]; const ea = EventAccumulatorFactory.waitsForCount(1); let readVal; @@ -2439,11 +2817,11 @@ describe('Query Tests', function() { ea.addEvent(); } }); - - writeRef.set({ - a: { '.priority': 10, '.value': 1}, - b: { '.priority': 20, '.value': 2}, - c: { '.priority': 30, '.value': 3} + + writeRef.set({ + a: { '.priority': 10, '.value': 1 }, + b: { '.priority': 20, '.value': 2 }, + c: { '.priority': 30, '.value': 3 } }); await ea.promise; @@ -2457,19 +2835,23 @@ describe('Query Tests', function() { }); it('Latency compensation works with limit and pushed object.', function() { - const ref = (getRandomNode() as Reference); + const ref = getRandomNode() as Reference; const events = []; - ref.limitToLast(3).on('child_added', function(s) { events.push(s.val()); }); + ref.limitToLast(3).on('child_added', function(s) { + events.push(s.val()); + }); // If you change this to ref.push('foo') it works. - ref.push({a: 'foo'}); + ref.push({ a: 'foo' }); // Should have synchronously gotten an event. expect(events.length).to.equal(1); }); it("Cache doesn't remove items that have fallen out of view.", async function() { - const refPair = getRandomNode(2), readRef = refPair[0], writeRef = refPair[1]; + const refPair = getRandomNode(2), + readRef = refPair[0], + writeRef = refPair[1]; let ea = EventAccumulatorFactory.waitsForCount(1); let readVal; @@ -2489,8 +2871,8 @@ describe('Query Tests', function() { await ea.promise; await pause(500); - expect(readVal).to.deep.equal({'k2': 2, 'k3': 3}); - + expect(readVal).to.deep.equal({ k2: 2, k3: 3 }); + ea = EventAccumulatorFactory.waitsForCount(1); writeRef.remove(); @@ -2503,12 +2885,12 @@ describe('Query Tests', function() { const reader = refs[0]; const writer = refs[1]; - await writer.set({ - a: { '.priority': 10, '.value': 1}, + await writer.set({ + a: { '.priority': 10, '.value': 1 }, b: { '.priority': 20, d: 4 }, - c: { '.priority': 30, '.value': 3} + c: { '.priority': 30, '.value': 3 } }); - + reader.child('b/d').on('value', function(snap) { expect(snap.val()).to.equal(4); }); @@ -2523,73 +2905,114 @@ describe('Query Tests', function() { }); await ea.promise; - expect(val).to.deep.equal({b: {d: 4}, c: 3}); + expect(val).to.deep.equal({ b: { d: 4 }, c: 3 }); ea.reset(); writer.child('a').setWithPriority(1, 40); await ea.promise; - expect(val).to.deep.equal({c: 3, a: 1}); + expect(val).to.deep.equal({ c: 3, a: 1 }); }); it('Integer keys behave numerically 1.', function(done) { - const ref = (getRandomNode() as Reference); - ref.set({1: true, 50: true, 550: true, 6: true, 600: true, 70: true, 8: true, 80: true }, function() { - ref.startAt(null, '80').once('value', function(s) { - expect(s.val()).to.deep.equal({80: true, 550: true, 600: true }); - done(); - }); - }); + const ref = getRandomNode() as Reference; + ref.set( + { + 1: true, + 50: true, + 550: true, + 6: true, + 600: true, + 70: true, + 8: true, + 80: true + }, + function() { + ref.startAt(null, '80').once('value', function(s) { + expect(s.val()).to.deep.equal({ 80: true, 550: true, 600: true }); + done(); + }); + } + ); }); it('Integer keys behave numerically 2.', function(done) { - const ref = (getRandomNode() as Reference); - ref.set({1: true, 50: true, 550: true, 6: true, 600: true, 70: true, 8: true, 80: true }, function() { - ref.endAt(null, '50').once('value', function(s) { - expect(s.val()).to.deep.equal({1: true, 6: true, 8: true, 50: true }); - done(); - }); - }); + const ref = getRandomNode() as Reference; + ref.set( + { + 1: true, + 50: true, + 550: true, + 6: true, + 600: true, + 70: true, + 8: true, + 80: true + }, + function() { + ref.endAt(null, '50').once('value', function(s) { + expect(s.val()).to.deep.equal({ + 1: true, + 6: true, + 8: true, + 50: true + }); + done(); + }); + } + ); }); it('Integer keys behave numerically 3.', function(done) { - const ref = (getRandomNode() as Reference); - ref.set({1: true, 50: true, 550: true, 6: true, 600: true, 70: true, 8: true, 80: true}, function() { - ref.startAt(null, '50').endAt(null, '80').once('value', function(s) { - expect(s.val()).to.deep.equal({50: true, 70: true, 80: true }); - done(); - }); - }); + const ref = getRandomNode() as Reference; + ref.set( + { + 1: true, + 50: true, + 550: true, + 6: true, + 600: true, + 70: true, + 8: true, + 80: true + }, + function() { + ref.startAt(null, '50').endAt(null, '80').once('value', function(s) { + expect(s.val()).to.deep.equal({ 50: true, 70: true, 80: true }); + done(); + }); + } + ); }); it('.limitToLast() on node with priority.', function(done) { - const ref = (getRandomNode() as Reference); - ref.set({'a': 'blah', '.priority': 'priority'}, function() { + const ref = getRandomNode() as Reference; + ref.set({ a: 'blah', '.priority': 'priority' }, function() { ref.limitToLast(2).once('value', function(s) { - expect(s.exportVal()).to.deep.equal({a: 'blah' }); + expect(s.exportVal()).to.deep.equal({ a: 'blah' }); done(); }); }); }); it('.equalTo works', async function() { - const ref = (getRandomNode() as Reference); + const ref = getRandomNode() as Reference; const done = false; await ref.set({ a: 1, - b: {'.priority': 2, '.value': 2}, - c: {'.priority': '3', '.value': 3} + b: { '.priority': 2, '.value': 2 }, + c: { '.priority': '3', '.value': 3 } }); const snap1 = await ref.equalTo(2).once('value'); const val1 = snap1.exportVal(); - expect(val1).to.deep.equal({b: {'.priority': 2, '.value': 2}}); + expect(val1).to.deep.equal({ b: { '.priority': 2, '.value': 2 } }); const snap2 = await ref.equalTo('3', 'c').once('value'); - + const val2 = snap2.exportVal(); - expect(val2).to.deep.equal({c: {'.priority': '3', '.value': 3}}); + expect(val2).to.deep.equal({ c: { '.priority': '3', '.value': 3 } }); const snap3 = await ref.equalTo(null, 'c').once('value'); const val3 = snap3.exportVal(); @@ -2597,35 +3020,35 @@ describe('Query Tests', function() { }); it('Handles fallback for orderBy', async function() { - const ref = (getRandomNode() as Reference); + const ref = getRandomNode() as Reference; const children = []; - + ref.orderByChild('foo').on('child_added', function(snap) { children.push(snap.key); }); // Set initial data await ref.set({ - a: {foo: 3}, - b: {foo: 1}, - c: {foo: 2} + a: { foo: 3 }, + b: { foo: 1 }, + c: { foo: 2 } }); expect(children).to.deep.equal(['b', 'c', 'a']); }); - it("Get notified of deletes that happen while offline.", async function() { + it('Get notified of deletes that happen while offline.', async function() { const refPair = getRandomNode(2); const queryRef = refPair[0]; const writerRef = refPair[1]; let readSnapshot = null; // Write 3 children and then start our limit query. - await writerRef.set({a: 1, b: 2, c: 3}); + await writerRef.set({ a: 1, b: 2, c: 3 }); const ea = EventAccumulatorFactory.waitsForCount(1); - queryRef.limitToLast(3).on('value', function(s) { + queryRef.limitToLast(3).on('value', function(s) { readSnapshot = s; if (readSnapshot) { ea.addEvent(); @@ -2635,7 +3058,7 @@ describe('Query Tests', function() { // Wait for us to read the 3 children. await ea.promise; - expect(readSnapshot.val()).to.deep.equal({a: 1, b: 2, c: 3 }); + expect(readSnapshot.val()).to.deep.equal({ a: 1, b: 2, c: 3 }); queryRef.database.goOffline(); @@ -2650,20 +3073,21 @@ describe('Query Tests', function() { it('Snapshot children respect default ordering', function(done) { const refPair = getRandomNode(2); - const queryRef = refPair[0], writerRef = refPair[1]; + const queryRef = refPair[0], + writerRef = refPair[1]; const list = { - 'a': { + a: { thisvaluefirst: { '.value': true, '.priority': 1 }, name: { '.value': 'Michael', '.priority': 2 }, thisvaluelast: { '.value': true, '.priority': 3 } }, - 'b': { + b: { thisvaluefirst: { '.value': true, '.priority': null }, name: { '.value': 'Rob', '.priority': 2 }, thisvaluelast: { '.value': true, '.priority': 3 } }, - 'c': { + c: { thisvaluefirst: { '.value': true, '.priority': 1 }, name: { '.value': 'Jonny', '.priority': 2 }, thisvaluelast: { '.value': true, '.priority': 'somestring' } @@ -2675,7 +3099,6 @@ describe('Query Tests', function() { const expectedKeys = ['thisvaluefirst', 'name', 'thisvaluelast']; const expectedNames = ['Jonny', 'Michael', 'Rob']; - // Validate that snap.child() resets order to default for child snaps const orderedKeys = []; snap.child('b').forEach(function(childSnap) { @@ -2691,7 +3114,11 @@ describe('Query Tests', function() { childSnap.forEach(function(grandchildSnap) { orderedKeys.push(grandchildSnap.key); }); - expect(orderedKeys).to.deep.equal(['thisvaluefirst', 'name', 'thisvaluelast']); + expect(orderedKeys).to.deep.equal([ + 'thisvaluefirst', + 'name', + 'thisvaluelast' + ]); }); expect(orderedNames).to.deep.equal(expectedNames); done(); @@ -2709,13 +3136,16 @@ describe('Query Tests', function() { // create a listener for the same path (asana#61028598952586). const ref = getRandomNode(1)[0]; - ref.child('child').set({name: "John"}, function() { + ref.child('child').set({ name: 'John' }, function() { ref.orderByChild('name').equalTo('John').on('value', function(snap) { ref.child('child').on('value', function(snap) { - ref.child('child').child('favoriteToy').once('value', function (snap) { - ref.child('child').child('favoriteToy').once('value', function (snap) { - done(); - }); + ref.child('child').child('favoriteToy').once('value', function(snap) { + ref + .child('child') + .child('favoriteToy') + .once('value', function(snap) { + done(); + }); }); }); }); @@ -2723,7 +3153,7 @@ describe('Query Tests', function() { }); it('Can JSON serialize refs', function() { - const ref = (getRandomNode() as Reference); + const ref = getRandomNode() as Reference; expect(JSON.stringify(ref)).to.equal('"' + ref.toString() + '"'); }); }); diff --git a/tests/database/repoinfo.test.ts b/tests/database/repoinfo.test.ts index 9f1b81505c9..55ea13443df 100644 --- a/tests/database/repoinfo.test.ts +++ b/tests/database/repoinfo.test.ts @@ -33,9 +33,13 @@ describe('RepoInfo', function() { urlParams[LAST_SESSION_PARAM] = 'test'; const websocketUrl = repoInfo.connectionURL(WEBSOCKET, urlParams); - expect(websocketUrl).to.equal('wss://test-ns.firebaseio.com/.ws?v=5&ls=test'); + expect(websocketUrl).to.equal( + 'wss://test-ns.firebaseio.com/.ws?v=5&ls=test' + ); const longPollingUrl = repoInfo.connectionURL(LONG_POLLING, urlParams); - expect(longPollingUrl).to.equal('https://test-ns.firebaseio.com/.lp?v=5&ls=test'); + expect(longPollingUrl).to.equal( + 'https://test-ns.firebaseio.com/.lp?v=5&ls=test' + ); }); }); diff --git a/tests/database/sortedmap.test.ts b/tests/database/sortedmap.test.ts index 59f39d81c19..eb60043fef2 100644 --- a/tests/database/sortedmap.test.ts +++ b/tests/database/sortedmap.test.ts @@ -14,17 +14,13 @@ * limitations under the License. */ -import { expect } from "chai"; -import { - SortedMap, - LLRBNode -} from "../../src/database/core/util/SortedMap"; -import { shuffle } from "./helpers/util"; - +import { expect } from 'chai'; +import { SortedMap, LLRBNode } from '../../src/database/core/util/SortedMap'; +import { shuffle } from './helpers/util'; // Many of these were adapted from the mugs source code. // http://mads379.github.com/mugs/ -describe("SortedMap Tests", function() { +describe('SortedMap Tests', function() { const defaultCmp = function(a, b) { if (a === b) { return 0; @@ -35,46 +31,46 @@ describe("SortedMap Tests", function() { } }; - it("Create node", function() { - const map = new SortedMap(defaultCmp).insert("key", "value"); + it('Create node', function() { + const map = new SortedMap(defaultCmp).insert('key', 'value'); expect((map as any).root_.left.isEmpty()).to.equal(true); expect((map as any).root_.right.isEmpty()).to.equal(true); }); - it("You can search a map for a specific key", function() { - const map = new SortedMap(defaultCmp).insert(1,1).insert(2,2); + it('You can search a map for a specific key', function() { + const map = new SortedMap(defaultCmp).insert(1, 1).insert(2, 2); expect(map.get(1)).to.equal(1); expect(map.get(2)).to.equal(2); expect(map.get(3)).to.equal(null); }); - it("You can insert a new key/value pair into the tree", function() { - const map = new SortedMap(defaultCmp).insert(1,1).insert(2,2); + it('You can insert a new key/value pair into the tree', function() { + const map = new SortedMap(defaultCmp).insert(1, 1).insert(2, 2); expect((map as any).root_.key).to.equal(2); expect((map as any).root_.left.key).to.equal(1); }); - it("You can remove a key/value pair from the map",function() { - const map = new SortedMap(defaultCmp).insert(1,1).insert(2,2); + it('You can remove a key/value pair from the map', function() { + const map = new SortedMap(defaultCmp).insert(1, 1).insert(2, 2); const newMap = map.remove(1); expect(newMap.get(2)).to.equal(2); expect(newMap.get(1)).to.equal(null); }); - it("More removals",function(){ + it('More removals', function() { const map = new SortedMap(defaultCmp) - .insert(1,1) - .insert(50,50) - .insert(3,3) - .insert(4,4) - .insert(7,7) - .insert(9,9) - .insert(20,20) - .insert(18,18) - .insert(2,2) - .insert(71,71) - .insert(42,42) - .insert(88,88); + .insert(1, 1) + .insert(50, 50) + .insert(3, 3) + .insert(4, 4) + .insert(7, 7) + .insert(9, 9) + .insert(20, 20) + .insert(18, 18) + .insert(2, 2) + .insert(71, 71) + .insert(42, 42) + .insert(88, 88); const m1 = map.remove(7); const m2 = m1.remove(3); @@ -86,76 +82,95 @@ describe("SortedMap Tests", function() { expect(m3.get(20)).to.equal(20); }); - it("Removal bug", function() { + it('Removal bug', function() { const map = new SortedMap(defaultCmp) - .insert(1, 1) - .insert(2, 2) - .insert(3, 3); + .insert(1, 1) + .insert(2, 2) + .insert(3, 3); const m1 = map.remove(2); expect(m1.get(1)).to.equal(1); expect(m1.get(3)).to.equal(3); }); - it("Test increasing", function(){ + it('Test increasing', function() { const total = 100; let item; - let map = new SortedMap(defaultCmp).insert(1,1); - for (item = 2; item < total ; item++) { - map = map.insert(item,item); + let map = new SortedMap(defaultCmp).insert(1, 1); + for (item = 2; item < total; item++) { + map = map.insert(item, item); } expect((map as any).root_.checkMaxDepth_()).to.equal(true); - for (item = 2; item < total ; item++) { + for (item = 2; item < total; item++) { map = map.remove(item); } expect((map as any).root_.checkMaxDepth_()).to.equal(true); }); - it("The structure should be valid after insertion (1)",function(){ - const map = new SortedMap(defaultCmp).insert(1,1).insert(2,2).insert(3,3); + it('The structure should be valid after insertion (1)', function() { + const map = new SortedMap(defaultCmp) + .insert(1, 1) + .insert(2, 2) + .insert(3, 3); expect((map as any).root_.key).to.equal(2); expect((map as any).root_.left.key).to.equal(1); expect((map as any).root_.right.key).to.equal(3); }); - it("The structure should be valid after insertion (2)",function(){ + it('The structure should be valid after insertion (2)', function() { const map = new SortedMap(defaultCmp) - .insert(1,1) - .insert(2,2) - .insert(3,3) - .insert(4,4) - .insert(5,5) - .insert(6,6) - .insert(7,7) - .insert(8,8) - .insert(9,9) - .insert(10,10) - .insert(11,11) - .insert(12,12); + .insert(1, 1) + .insert(2, 2) + .insert(3, 3) + .insert(4, 4) + .insert(5, 5) + .insert(6, 6) + .insert(7, 7) + .insert(8, 8) + .insert(9, 9) + .insert(10, 10) + .insert(11, 11) + .insert(12, 12); expect(map.count()).to.equal(12); expect((map as any).root_.checkMaxDepth_()).to.equal(true); }); - it("Rotate left leaves the tree in a valid state",function(){ - const node = new LLRBNode(4,4,false, - new LLRBNode(2,2,false,null, null), - new LLRBNode(7,7,true, - new LLRBNode(5,5,false,null,null), - new LLRBNode(8,8,false,null,null))); + it('Rotate left leaves the tree in a valid state', function() { + const node = new LLRBNode( + 4, + 4, + false, + new LLRBNode(2, 2, false, null, null), + new LLRBNode( + 7, + 7, + true, + new LLRBNode(5, 5, false, null, null), + new LLRBNode(8, 8, false, null, null) + ) + ); const node2 = (node as any).rotateLeft_(); expect(node2.count()).to.equal(5); expect(node2.checkMaxDepth_()).to.equal(true); }); - it("Rotate right leaves the tree in a valid state", function(){ - const node = new LLRBNode(7,7,false, - new LLRBNode(4,4,true, - new LLRBNode(2,2,false, null, null), - new LLRBNode(5,5,false, null, null)), - new LLRBNode(8,8,false, null, null)); + it('Rotate right leaves the tree in a valid state', function() { + const node = new LLRBNode( + 7, + 7, + false, + new LLRBNode( + 4, + 4, + true, + new LLRBNode(2, 2, false, null, null), + new LLRBNode(5, 5, false, null, null) + ), + new LLRBNode(8, 8, false, null, null) + ); const node2 = (node as any).rotateRight_(); expect(node2.count()).to.equal(5); @@ -166,62 +181,61 @@ describe("SortedMap Tests", function() { expect(node2.right.right.key).to.equal(8); }); - it("The structure should be valid after insertion (3)",function(){ + it('The structure should be valid after insertion (3)', function() { const map = new SortedMap(defaultCmp) - .insert(1,1) - .insert(50,50) - .insert(3,3) - .insert(4,4) - .insert(7,7) - .insert(9,9); + .insert(1, 1) + .insert(50, 50) + .insert(3, 3) + .insert(4, 4) + .insert(7, 7) + .insert(9, 9); expect(map.count()).to.equal(6); expect((map as any).root_.checkMaxDepth_()).to.equal(true); - const m2 = map - .insert(20,20) - .insert(18,18) - .insert(2,2); + const m2 = map.insert(20, 20).insert(18, 18).insert(2, 2); expect(m2.count()).to.equal(9); expect((m2 as any).root_.checkMaxDepth_()).to.equal(true); - const m3 = m2 - .insert(71,71) - .insert(42,42) - .insert(88,88); + const m3 = m2.insert(71, 71).insert(42, 42).insert(88, 88); expect(m3.count()).to.equal(12); expect((m3 as any).root_.checkMaxDepth_()).to.equal(true); }); - it("you can overwrite a value",function(){ - const map = new SortedMap(defaultCmp).insert(10,10).insert(10,8); + it('you can overwrite a value', function() { + const map = new SortedMap(defaultCmp).insert(10, 10).insert(10, 8); expect(map.get(10)).to.equal(8); }); - it("removing the last element returns an empty map",function() { - const map = new SortedMap(defaultCmp).insert(10,10).remove(10); + it('removing the last element returns an empty map', function() { + const map = new SortedMap(defaultCmp).insert(10, 10).remove(10); expect(map.isEmpty()).to.equal(true); }); - it("empty .get()",function() { + it('empty .get()', function() { const empty = new SortedMap(defaultCmp); - expect(empty.get("something")).to.equal(null); + expect(empty.get('something')).to.equal(null); }); - it("empty .count()",function() { + it('empty .count()', function() { const empty = new SortedMap(defaultCmp); expect(empty.count()).to.equal(0); }); - it("empty .remove()",function() { + it('empty .remove()', function() { const empty = new SortedMap(defaultCmp); - expect(empty.remove("something").count()).to.equal(0); + expect(empty.remove('something').count()).to.equal(0); }); - it(".reverseTraversal() works.", function() { - const map = new SortedMap(defaultCmp).insert(1, 1).insert(5, 5).insert(3, 3).insert(2, 2).insert(4, 4); + it('.reverseTraversal() works.', function() { + const map = new SortedMap(defaultCmp) + .insert(1, 1) + .insert(5, 5) + .insert(3, 3) + .insert(2, 2) + .insert(4, 4); let next = 5; map.reverseTraversal(function(key, value) { expect(key).to.equal(next); @@ -230,10 +244,11 @@ describe("SortedMap Tests", function() { expect(next).to.equal(0); }); - it("insertion and removal of 100 items in random order.", function() { + it('insertion and removal of 100 items in random order.', function() { const N = 100; - const toInsert = [], toRemove = []; - for(let i = 0; i < N; i++) { + const toInsert = [], + toRemove = []; + for (let i = 0; i < N; i++) { toInsert.push(i); toRemove.push(i); } @@ -243,7 +258,7 @@ describe("SortedMap Tests", function() { let map = new SortedMap(defaultCmp); - for (let i = 0 ; i < N ; i++ ) { + for (let i = 0; i < N; i++) { map = map.insert(toInsert[i], toInsert[i]); expect((map as any).root_.checkMaxDepth_()).to.equal(true); } @@ -258,7 +273,7 @@ describe("SortedMap Tests", function() { }); expect(next).to.equal(N); - for (let i = 0 ; i < N ; i++ ) { + for (let i = 0; i < N; i++) { expect((map as any).root_.checkMaxDepth_()).to.equal(true); map = map.remove(toRemove[i]); } @@ -266,15 +281,15 @@ describe("SortedMap Tests", function() { }); // A little perf test for convenient benchmarking. - xit("Perf", function() { - for(let j = 0; j < 5; j++) { + xit('Perf', function() { + for (let j = 0; j < 5; j++) { let map = new SortedMap(defaultCmp); const start = new Date().getTime(); - for(let i = 0; i < 50000; i++) { + for (let i = 0; i < 50000; i++) { map = map.insert(i, i); } - for(let i = 0; i < 50000; i++) { + for (let i = 0; i < 50000; i++) { map = map.remove(i); } const end = new Date().getTime(); @@ -282,7 +297,7 @@ describe("SortedMap Tests", function() { } }); - xit("Perf: Insertion and removal with various # of items.", function() { + xit('Perf: Insertion and removal with various # of items.', function() { const verifyTraversal = function(map, max) { let next = 0; map.inorderTraversal(function(key, value) { @@ -293,9 +308,10 @@ describe("SortedMap Tests", function() { expect(next).to.equal(max); }; - for(let N = 10; N <= 100000; N *= 10) { - const toInsert = [], toRemove = []; - for(let i = 0; i < N; i++) { + for (let N = 10; N <= 100000; N *= 10) { + const toInsert = [], + toRemove = []; + for (let i = 0; i < N; i++) { toInsert.push(i); toRemove.push(i); } @@ -306,14 +322,14 @@ describe("SortedMap Tests", function() { let map = new SortedMap(defaultCmp); const start = new Date().getTime(); - for (let i = 0 ; i < N ; i++ ) { + for (let i = 0; i < N; i++) { map = map.insert(toInsert[i], toInsert[i]); } // Ensure order is correct. verifyTraversal(map, N); - for (let i = 0 ; i < N ; i++ ) { + for (let i = 0; i < N; i++) { map = map.remove(toRemove[i]); } @@ -322,65 +338,66 @@ describe("SortedMap Tests", function() { } }); - xit("Perf: Comparison with {}: Insertion and removal with various # of items.", function() { - const verifyTraversal = function(tree, max) { - const keys = []; - for(const k in tree) - keys.push(k); - - keys.sort(); - expect(keys.length).to.equal(max); - for(let i = 0; i < max; i++) - expect(tree[i]).to.equal(i); - }; - - for(let N = 10; N <= 100000; N *= 10) { - const toInsert = [], toRemove = []; - for(let i = 0; i < N; i++) { - toInsert.push(i); - toRemove.push(i); - } - - shuffle(toInsert); - shuffle(toRemove); - - const tree = { }; - - const start = new Date().getTime(); - for (let i = 0 ; i < N ; i++ ) { - tree[i] = i; + xit( + 'Perf: Comparison with {}: Insertion and removal with various # of items.', + function() { + const verifyTraversal = function(tree, max) { + const keys = []; + for (const k in tree) keys.push(k); + + keys.sort(); + expect(keys.length).to.equal(max); + for (let i = 0; i < max; i++) expect(tree[i]).to.equal(i); + }; + + for (let N = 10; N <= 100000; N *= 10) { + const toInsert = [], + toRemove = []; + for (let i = 0; i < N; i++) { + toInsert.push(i); + toRemove.push(i); + } + + shuffle(toInsert); + shuffle(toRemove); + + const tree = {}; + + const start = new Date().getTime(); + for (let i = 0; i < N; i++) { + tree[i] = i; + } + + // Ensure order is correct. + //verifyTraversal(tree, N); + + for (let i = 0; i < N; i++) { + delete tree[i]; + } + + const elapsed = new Date().getTime() - start; + // console.log(N + ": " +elapsed); } - - // Ensure order is correct. - //verifyTraversal(tree, N); - - for (let i = 0 ; i < N ; i++ ) { - delete tree[i]; - } - - const elapsed = (new Date().getTime()) - start; - // console.log(N + ": " +elapsed); } - }); + ); - it("SortedMapIterator empty test.", function() { + it('SortedMapIterator empty test.', function() { const map = new SortedMap(defaultCmp); const iterator = map.getIterator(); expect(iterator.getNext()).to.equal(null); }); - it("SortedMapIterator test with 10 items.", function() { + it('SortedMapIterator test with 10 items.', function() { const items = []; - for(let i = 0; i < 10; i++) - items.push(i); + for (let i = 0; i < 10; i++) items.push(i); shuffle(items); let map = new SortedMap(defaultCmp); - for(let i = 0; i < 10; i++) - map = map.insert(items[i], items[i]); + for (let i = 0; i < 10; i++) map = map.insert(items[i], items[i]); const iterator = map.getIterator(); - let n, expected = 0; + let n, + expected = 0; while ((n = iterator.getNext()) !== null) { expect(n.key).to.equal(expected); expect(n.value).to.equal(expected); @@ -389,14 +406,14 @@ describe("SortedMap Tests", function() { expect(expected).to.equal(10); }); - it("SortedMap.getPredecessorKey works.", function() { + it('SortedMap.getPredecessorKey works.', function() { const map = new SortedMap(defaultCmp) - .insert(1,1) - .insert(50,50) - .insert(3,3) - .insert(4,4) - .insert(7,7) - .insert(9,9); + .insert(1, 1) + .insert(50, 50) + .insert(3, 3) + .insert(4, 4) + .insert(7, 7) + .insert(9, 9); expect(map.getPredecessorKey(1)).to.equal(null); expect(map.getPredecessorKey(3)).to.equal(1); diff --git a/tests/database/sparsesnapshottree.test.ts b/tests/database/sparsesnapshottree.test.ts index 7186e2060bc..56ead35c6b3 100644 --- a/tests/database/sparsesnapshottree.test.ts +++ b/tests/database/sparsesnapshottree.test.ts @@ -14,120 +14,116 @@ * limitations under the License. */ -import { expect } from "chai"; -import { SparseSnapshotTree } from "../../src/database/core/SparseSnapshotTree"; -import { Path } from "../../src/database/core/util/Path"; -import { nodeFromJSON } from "../../src/database/core/snap/nodeFromJSON"; -import { ChildrenNode } from "../../src/database/core/snap/ChildrenNode"; - -describe("SparseSnapshotTree Tests", function () { - it("Basic remember and find.", function () { +import { expect } from 'chai'; +import { SparseSnapshotTree } from '../../src/database/core/SparseSnapshotTree'; +import { Path } from '../../src/database/core/util/Path'; +import { nodeFromJSON } from '../../src/database/core/snap/nodeFromJSON'; +import { ChildrenNode } from '../../src/database/core/snap/ChildrenNode'; + +describe('SparseSnapshotTree Tests', function() { + it('Basic remember and find.', function() { const st = new SparseSnapshotTree(); - const path = new Path("a/b"); - const node = nodeFromJSON("sdfsd"); + const path = new Path('a/b'); + const node = nodeFromJSON('sdfsd'); st.remember(path, node); - expect(st.find(new Path("a/b")).isEmpty()).to.equal(false); - expect(st.find(new Path("a"))).to.equal(null); + expect(st.find(new Path('a/b')).isEmpty()).to.equal(false); + expect(st.find(new Path('a'))).to.equal(null); }); - - it("Find inside an existing snapshot", function () { + it('Find inside an existing snapshot', function() { const st = new SparseSnapshotTree(); - const path = new Path("t/tt"); - let node = nodeFromJSON({ a: "sdfsd", x: 5, "999i": true }); - node = node.updateImmediateChild("apples", nodeFromJSON({ "goats": 88 })); + const path = new Path('t/tt'); + let node = nodeFromJSON({ a: 'sdfsd', x: 5, '999i': true }); + node = node.updateImmediateChild('apples', nodeFromJSON({ goats: 88 })); st.remember(path, node); - expect(st.find(new Path("t/tt")).isEmpty()).to.equal(false); - expect(st.find(new Path("t/tt/a")).val()).to.equal("sdfsd"); - expect(st.find(new Path("t/tt/999i")).val()).to.equal(true); - expect(st.find(new Path("t/tt/apples")).isEmpty()).to.equal(false); - expect(st.find(new Path("t/tt/apples/goats")).val()).to.equal(88); + expect(st.find(new Path('t/tt')).isEmpty()).to.equal(false); + expect(st.find(new Path('t/tt/a')).val()).to.equal('sdfsd'); + expect(st.find(new Path('t/tt/999i')).val()).to.equal(true); + expect(st.find(new Path('t/tt/apples')).isEmpty()).to.equal(false); + expect(st.find(new Path('t/tt/apples/goats')).val()).to.equal(88); }); - - it("Write a snapshot inside a snapshot.", function () { + it('Write a snapshot inside a snapshot.', function() { const st = new SparseSnapshotTree(); - st.remember(new Path("t"), nodeFromJSON({ a: { b: "v" } })); - st.remember(new Path("t/a/rr"), nodeFromJSON(19)); - expect(st.find(new Path("t/a/b")).val()).to.equal("v"); - expect(st.find(new Path("t/a/rr")).val()).to.equal(19); + st.remember(new Path('t'), nodeFromJSON({ a: { b: 'v' } })); + st.remember(new Path('t/a/rr'), nodeFromJSON(19)); + expect(st.find(new Path('t/a/b')).val()).to.equal('v'); + expect(st.find(new Path('t/a/rr')).val()).to.equal(19); }); - - it("Write a null value and confirm it is remembered.", function () { + it('Write a null value and confirm it is remembered.', function() { const st = new SparseSnapshotTree(); - st.remember(new Path("awq/fff"), nodeFromJSON(null)); - expect(st.find(new Path("awq/fff"))).to.equal(ChildrenNode.EMPTY_NODE); - expect(st.find(new Path("awq/sdf"))).to.equal(null); - expect(st.find(new Path("awq/fff/jjj"))).to.equal(ChildrenNode.EMPTY_NODE); - expect(st.find(new Path("awq/sdf/sdf/q"))).to.equal(null); + st.remember(new Path('awq/fff'), nodeFromJSON(null)); + expect(st.find(new Path('awq/fff'))).to.equal(ChildrenNode.EMPTY_NODE); + expect(st.find(new Path('awq/sdf'))).to.equal(null); + expect(st.find(new Path('awq/fff/jjj'))).to.equal(ChildrenNode.EMPTY_NODE); + expect(st.find(new Path('awq/sdf/sdf/q'))).to.equal(null); }); - - it("Overwrite with null and confirm it is remembered.", function () { + it('Overwrite with null and confirm it is remembered.', function() { const st = new SparseSnapshotTree(); - st.remember(new Path("t"), nodeFromJSON({ a: { b: "v" } })); - expect(st.find(new Path("t")).isEmpty()).to.equal(false); - st.remember(new Path("t"), ChildrenNode.EMPTY_NODE); - expect(st.find(new Path("t")).isEmpty()).to.equal(true); + st.remember(new Path('t'), nodeFromJSON({ a: { b: 'v' } })); + expect(st.find(new Path('t')).isEmpty()).to.equal(false); + st.remember(new Path('t'), ChildrenNode.EMPTY_NODE); + expect(st.find(new Path('t')).isEmpty()).to.equal(true); }); - - it("Simple remember and forget.", function () { + it('Simple remember and forget.', function() { const st = new SparseSnapshotTree(); - st.remember(new Path("t"), nodeFromJSON({ a: { b: "v" } })); - expect(st.find(new Path("t")).isEmpty()).to.equal(false); - st.forget(new Path("t")); - expect(st.find(new Path("t"))).to.equal(null); + st.remember(new Path('t'), nodeFromJSON({ a: { b: 'v' } })); + expect(st.find(new Path('t')).isEmpty()).to.equal(false); + st.forget(new Path('t')); + expect(st.find(new Path('t'))).to.equal(null); }); - - it("Forget the root.", function () { + it('Forget the root.', function() { const st = new SparseSnapshotTree(); - st.remember(new Path("t"), nodeFromJSON({ a: { b: "v" } })); - expect(st.find(new Path("t")).isEmpty()).to.equal(false); - st.forget(new Path("")); - expect(st.find(new Path("t"))).to.equal(null); + st.remember(new Path('t'), nodeFromJSON({ a: { b: 'v' } })); + expect(st.find(new Path('t')).isEmpty()).to.equal(false); + st.forget(new Path('')); + expect(st.find(new Path('t'))).to.equal(null); }); - - it("Forget snapshot inside snapshot.", function () { + it('Forget snapshot inside snapshot.', function() { const st = new SparseSnapshotTree(); - st.remember(new Path("t"), nodeFromJSON({ a: { b: "v", c: 9, art: false } })); - expect(st.find(new Path("t/a/c")).isEmpty()).to.equal(false); - expect(st.find(new Path("t")).isEmpty()).to.equal(false); - - st.forget(new Path("t/a/c")); - expect(st.find(new Path("t"))).to.equal(null); - expect(st.find(new Path("t/a"))).to.equal(null); - expect(st.find(new Path("t/a/b")).val()).to.equal("v"); - expect(st.find(new Path("t/a/c"))).to.equal(null); - expect(st.find(new Path("t/a/art")).val()).to.equal(false); + st.remember( + new Path('t'), + nodeFromJSON({ a: { b: 'v', c: 9, art: false } }) + ); + expect(st.find(new Path('t/a/c')).isEmpty()).to.equal(false); + expect(st.find(new Path('t')).isEmpty()).to.equal(false); + + st.forget(new Path('t/a/c')); + expect(st.find(new Path('t'))).to.equal(null); + expect(st.find(new Path('t/a'))).to.equal(null); + expect(st.find(new Path('t/a/b')).val()).to.equal('v'); + expect(st.find(new Path('t/a/c'))).to.equal(null); + expect(st.find(new Path('t/a/art')).val()).to.equal(false); }); - - it("Forget path shallower than snapshots.", function () { + it('Forget path shallower than snapshots.', function() { const st = new SparseSnapshotTree(); - st.remember(new Path("t/x1"), nodeFromJSON(false)); - st.remember(new Path("t/x2"), nodeFromJSON(true)); - st.forget(new Path("t")); - expect(st.find(new Path("t"))).to.equal(null); + st.remember(new Path('t/x1'), nodeFromJSON(false)); + st.remember(new Path('t/x2'), nodeFromJSON(true)); + st.forget(new Path('t')); + expect(st.find(new Path('t'))).to.equal(null); }); - - it("Iterate children.", function () { + it('Iterate children.', function() { const st = new SparseSnapshotTree(); - st.remember(new Path("t"), nodeFromJSON({ b: "v", c: 9, art: false })); - st.remember(new Path("q"), ChildrenNode.EMPTY_NODE); + st.remember(new Path('t'), nodeFromJSON({ b: 'v', c: 9, art: false })); + st.remember(new Path('q'), ChildrenNode.EMPTY_NODE); - let num = 0, gotT = false, gotQ = false; + let num = 0, + gotT = false, + gotQ = false; st.forEachChild(function(key, child) { num += 1; - if (key === "t") { + if (key === 't') { gotT = true; - } else if (key === "q") { + } else if (key === 'q') { gotQ = true; } else { expect(false).to.equal(true); @@ -139,35 +135,38 @@ describe("SparseSnapshotTree Tests", function () { expect(num).to.equal(2); }); - - it("Iterate trees.", function () { + it('Iterate trees.', function() { const st = new SparseSnapshotTree(); let count = 0; - st.forEachTree(new Path(""), function(path, tree) { + st.forEachTree(new Path(''), function(path, tree) { count += 1; }); expect(count).to.equal(0); - st.remember(new Path("t"), nodeFromJSON(1)); - st.remember(new Path("a/b"), nodeFromJSON(2)); - st.remember(new Path("a/x/g"), nodeFromJSON(3)); - st.remember(new Path("a/x/null"), nodeFromJSON(null)); - - let num = 0, got1 = false, got2 = false, got3 = false, got4 = false; - st.forEachTree(new Path("q"), function(path, node) { + st.remember(new Path('t'), nodeFromJSON(1)); + st.remember(new Path('a/b'), nodeFromJSON(2)); + st.remember(new Path('a/x/g'), nodeFromJSON(3)); + st.remember(new Path('a/x/null'), nodeFromJSON(null)); + + let num = 0, + got1 = false, + got2 = false, + got3 = false, + got4 = false; + st.forEachTree(new Path('q'), function(path, node) { num += 1; const pathString = path.toString(); - if (pathString === "/q/t") { + if (pathString === '/q/t') { got1 = true; expect(node.val()).to.equal(1); - } else if (pathString === "/q/a/b") { + } else if (pathString === '/q/a/b') { got2 = true; expect(node.val()).to.equal(2); - } else if (pathString === "/q/a/x/g") { + } else if (pathString === '/q/a/x/g') { got3 = true; expect(node.val()).to.equal(3); - } else if (pathString === "/q/a/x/null") { + } else if (pathString === '/q/a/x/null') { got4 = true; expect(node.val()).to.equal(null); } else { @@ -182,7 +181,7 @@ describe("SparseSnapshotTree Tests", function () { expect(num).to.equal(4); }); - it("Set leaf, then forget deeper path", function() { + it('Set leaf, then forget deeper path', function() { const st = new SparseSnapshotTree(); st.remember(new Path('foo'), nodeFromJSON('bar')); @@ -190,5 +189,4 @@ describe("SparseSnapshotTree Tests", function () { // it's not safe to remove this node expect(safeToRemove).to.equal(false); }); - }); diff --git a/tests/database/transaction.test.ts b/tests/database/transaction.test.ts index 38bc584d695..79f722d1010 100644 --- a/tests/database/transaction.test.ts +++ b/tests/database/transaction.test.ts @@ -14,23 +14,26 @@ * limitations under the License. */ -import { expect } from "chai"; -import { Reference } from "../../src/database/api/Reference"; -import { +import { expect } from 'chai'; +import { Reference } from '../../src/database/api/Reference'; +import { canCreateExtraConnections, getFreshRepoFromReference, - getRandomNode, - getVal, -} from "./helpers/util"; -import { eventTestHelper } from "./helpers/events"; -import { EventAccumulator, EventAccumulatorFactory } from "./helpers/EventAccumulator"; -import { hijackHash } from "../../src/database/api/test_access"; -import firebase from "../../src/app"; -import "../../src/database"; + getRandomNode, + getVal +} from './helpers/util'; +import { eventTestHelper } from './helpers/events'; +import { + EventAccumulator, + EventAccumulatorFactory +} from './helpers/EventAccumulator'; +import { hijackHash } from '../../src/database/api/test_access'; +import firebase from '../../src/app'; +import '../../src/database'; describe('Transaction Tests', function() { it('New value is immediately visible.', function() { - const node = (getRandomNode() as Reference); + const node = getRandomNode() as Reference; node.child('foo').transaction(function() { return 42; }); @@ -43,11 +46,9 @@ describe('Transaction Tests', function() { }); it.skip('Event is raised for new value.', function() { - const node = (getRandomNode() as Reference); + const node = getRandomNode() as Reference; const fooNode = node.child('foo'); - const eventHelper = eventTestHelper([ - [fooNode, ['value', '']] - ]); + const eventHelper = eventTestHelper([[fooNode, ['value', '']]]); node.child('foo').transaction(function() { return 42; @@ -57,44 +58,56 @@ describe('Transaction Tests', function() { }); it('Transaction result can be converted to JSON.', function() { - const node = (getRandomNode() as Reference); + const node = getRandomNode() as Reference; - return node.transaction(() => { - return 42; - }).then(transactionResult => { - expect(transactionResult.toJSON()).to.deep.equal({ committed: true, snapshot: 42 }); - }); + return node + .transaction(() => { + return 42; + }) + .then(transactionResult => { + expect(transactionResult.toJSON()).to.deep.equal({ + committed: true, + snapshot: 42 + }); + }); }); - it('Non-aborted transaction sets committed to true in callback.', function(done) { - const node = (getRandomNode() as Reference); + it('Non-aborted transaction sets committed to true in callback.', function( + done + ) { + const node = getRandomNode() as Reference; - node.transaction(function() { - return 42; - }, - function(error, committed, snapshot) { - expect(error).to.equal(null); - expect(committed).to.equal(true); - expect(snapshot.val()).to.equal(42); - done(); - }); + node.transaction( + function() { + return 42; + }, + function(error, committed, snapshot) { + expect(error).to.equal(null); + expect(committed).to.equal(true); + expect(snapshot.val()).to.equal(42); + done(); + } + ); }); - it('Aborted transaction sets committed to false in callback.', function(done) { - const node = (getRandomNode() as Reference); + it('Aborted transaction sets committed to false in callback.', function( + done + ) { + const node = getRandomNode() as Reference; - node.transaction(function() {}, - function(error, committed, snapshot) { - expect(error).to.equal(null); - expect(committed).to.equal(false); - expect(snapshot.val()).to.be.null; - done(); - }); + node.transaction( + function() {}, + function(error, committed, snapshot) { + expect(error).to.equal(null); + expect(committed).to.equal(false); + expect(snapshot.val()).to.be.null; + done(); + } + ); }); - it('Tetris bug test - set data, reconnect, do transaction that aborts once data arrives, verify correct events.', - async function() { - const nodePair = (getRandomNode(2) as Reference[]); + it('Tetris bug test - set data, reconnect, do transaction that aborts once data arrives, verify correct events.', async function() { + const nodePair = getRandomNode(2) as Reference[]; let node = nodePair[0]; let eventsReceived = 0; const ea = EventAccumulatorFactory.waitsForCount(2); @@ -105,11 +118,9 @@ describe('Transaction Tests', function() { node.child('foo').on('value', function(snap) { if (eventsReceived === 0) { expect(snap.val()).to.equal('temp value'); - } - else if (eventsReceived === 1) { + } else if (eventsReceived === 1) { expect(snap.val()).to.equal(42); - } - else { + } else { // Extra event detected. expect(true).to.equal(false); } @@ -117,23 +128,25 @@ describe('Transaction Tests', function() { ea.addEvent(); }); - node.child('foo').transaction(function(value) { - if (value === null) - return 'temp value'; - else - return; - }, function(error, committed, snapshot) { - expect(error).to.equal(null); - expect(committed).to.equal(false); - expect(snapshot.val()).to.equal(42); - }); + node.child('foo').transaction( + function(value) { + if (value === null) return 'temp value'; + else return; + }, + function(error, committed, snapshot) { + expect(error).to.equal(null); + expect(committed).to.equal(false); + expect(snapshot.val()).to.equal(42); + } + ); return ea.promise; }); it('Use transaction to create a node, make sure exactly one event is received.', function() { - const node = (getRandomNode() as Reference); - let events = 0, done = false; + const node = getRandomNode() as Reference; + let events = 0, + done = false; const ea = new EventAccumulator(() => done && events === 1); @@ -143,54 +156,55 @@ describe('Transaction Tests', function() { if (events > 1) throw 'Expected 1 event on a, but got two.'; }); - node.child('a').transaction(function() { - return 42; - }, function() { - done = true; - ea.addEvent(); - }); + node.child('a').transaction( + function() { + return 42; + }, + function() { + done = true; + ea.addEvent(); + } + ); return ea.promise; }); - it('Use transaction to update one of two existing child nodes. ' + - 'Make sure events are only raised for the changed node.', async function() { - const nodePair = (getRandomNode(2) as Reference[]); - let node = nodePair[0].child('foo'); + it( + 'Use transaction to update one of two existing child nodes. ' + + 'Make sure events are only raised for the changed node.', + async function() { + const nodePair = getRandomNode(2) as Reference[]; + let node = nodePair[0].child('foo'); - await Promise.all([ - node.child('a').set(42), - node.child('b').set(42) - ]); + await Promise.all([node.child('a').set(42), node.child('b').set(42)]); - node = nodePair[1].child('foo'); - const eventHelper = eventTestHelper([ - [node.child('a'), ['value', '']], - [node.child('b'), ['value', '']] - ]); + node = nodePair[1].child('foo'); + const eventHelper = eventTestHelper([ + [node.child('a'), ['value', '']], + [node.child('b'), ['value', '']] + ]); - await eventHelper.promise; + await eventHelper.promise; - eventHelper.addExpectedEvents([ - [node.child('b'), ['value', '']] - ]); - - const transaction = node.transaction(function() { - return {a: 42, b: 87}; - }, function(error, committed, snapshot) { - expect(error).to.be.null; - expect(committed).to.equal(true); - expect(snapshot.val()).to.deep.equal({a: 42, b: 87}); - }); + eventHelper.addExpectedEvents([[node.child('b'), ['value', '']]]); - return Promise.all([ - eventHelper.promise, - transaction - ]); - }); + const transaction = node.transaction( + function() { + return { a: 42, b: 87 }; + }, + function(error, committed, snapshot) { + expect(error).to.be.null; + expect(committed).to.equal(true); + expect(snapshot.val()).to.deep.equal({ a: 42, b: 87 }); + } + ); + + return Promise.all([eventHelper.promise, transaction]); + } + ); it('Transaction is only called once when initializing an empty node.', function() { - const node = (getRandomNode() as Reference); + const node = getRandomNode() as Reference; let updateCalled = 0; const ea = EventAccumulatorFactory.waitsForCount(1); @@ -198,8 +212,7 @@ describe('Transaction Tests', function() { expect(value).to.equal(null); updateCalled++; ea.addEvent(); - if (updateCalled > 1) - throw 'Transaction called too many times.'; + if (updateCalled > 1) throw 'Transaction called too many times.'; if (value === null) { return { a: 5, b: 3 }; @@ -209,9 +222,14 @@ describe('Transaction Tests', function() { return ea.promise; }); - it('Second transaction gets run immediately on previous output and only runs once.', function(done) { - const nodePair = (getRandomNode(2) as Reference[]); - let firstRun = false, firstDone = false, secondRun = false, secondDone = false; + it('Second transaction gets run immediately on previous output and only runs once.', function( + done + ) { + const nodePair = getRandomNode(2) as Reference[]; + let firstRun = false, + firstDone = false, + secondRun = false, + secondDone = false; function onComplete() { if (firstDone && secondDone) { @@ -222,31 +240,37 @@ describe('Transaction Tests', function() { } } - nodePair[0].transaction(function() { - expect(firstRun).to.equal(false); - firstRun = true; - return 42; - }, function(error, committed, snapshot) { - expect(error).to.equal(null); - expect(committed).to.equal(true); - firstDone = true; - onComplete(); - }); + nodePair[0].transaction( + function() { + expect(firstRun).to.equal(false); + firstRun = true; + return 42; + }, + function(error, committed, snapshot) { + expect(error).to.equal(null); + expect(committed).to.equal(true); + firstDone = true; + onComplete(); + } + ); expect(firstRun).to.equal(true); - nodePair[0].transaction(function(value) { - expect(secondRun).to.equal(false); - secondRun = true; - expect(value).to.equal(42); - return 84; - }, function(error, committed, snapshot) { - expect(error).to.equal(null); - expect(committed).to.equal(true); - secondDone = true; - onComplete(); - }); + nodePair[0].transaction( + function(value) { + expect(secondRun).to.equal(false); + secondRun = true; + expect(value).to.equal(42); + return 84; + }, + function(error, committed, snapshot) { + expect(error).to.equal(null); + expect(committed).to.equal(true); + secondDone = true; + onComplete(); + } + ); expect(secondRun).to.equal(true); - + expect(getVal(nodePair[0])).to.equal(84); }); @@ -257,8 +281,10 @@ describe('Transaction Tests', function() { // Transaction #1 should complete as planned (since it was already sent). // Transaction #2 should be aborted by the set. // Transaction #3 should be re-run after #2 is reverted, and then be sent to the server and succeed. - let firstDone = false, secondDone = false, thirdDone = false; - const node = (getRandomNode() as Reference); + let firstDone = false, + secondDone = false, + thirdDone = false; + const node = getRandomNode() as Reference; let nodeSnap = null; let nodeFooSnap = null; @@ -271,8 +297,9 @@ describe('Transaction Tests', function() { nodeFooSnap = s; }); - - let firstRun = false, secondRun = false, thirdRunCount = 0; + let firstRun = false, + secondRun = false, + thirdRunCount = 0; const ea = new EventAccumulator(() => firstDone && thirdDone); node.child('foo').transaction( function() { @@ -286,14 +313,15 @@ describe('Transaction Tests', function() { expect(snapshot.val()).to.equal(42); firstDone = true; ea.addEvent(); - }); + } + ); expect(nodeFooSnap.val()).to.deep.equal(42); node.transaction( function() { expect(secondRun).to.equal(false); secondRun = true; - return { 'foo' : 84, 'bar' : 1}; + return { foo: 84, bar: 1 }; }, function(error, committed, snapshot) { expect(committed).to.equal(false); @@ -302,28 +330,31 @@ describe('Transaction Tests', function() { } ); expect(secondRun).to.equal(true); - expect(nodeSnap.val()).to.deep.equal({'foo': 84, 'bar': 1}); - - node.child('bar').transaction(function(val) { - thirdRunCount++; - if (thirdRunCount === 1) { - expect(val).to.equal(1); - return 'first'; - } else if (thirdRunCount === 2) { - expect(val).to.equal(null); - return 'second'; - } else { - throw new Error('Called too many times!'); + expect(nodeSnap.val()).to.deep.equal({ foo: 84, bar: 1 }); + + node.child('bar').transaction( + function(val) { + thirdRunCount++; + if (thirdRunCount === 1) { + expect(val).to.equal(1); + return 'first'; + } else if (thirdRunCount === 2) { + expect(val).to.equal(null); + return 'second'; + } else { + throw new Error('Called too many times!'); + } + }, + function(error, committed, snapshot) { + expect(error).to.equal(null); + expect(committed).to.equal(true); + expect(snapshot.val()).to.equal('second'); + thirdDone = true; + ea.addEvent(); } - }, function(error, committed, snapshot) { - expect(error).to.equal(null); - expect(committed).to.equal(true); - expect(snapshot.val()).to.equal('second'); - thirdDone = true; - ea.addEvent(); - }); + ); expect(thirdRunCount).to.equal(1); - expect(nodeSnap.val()).to.deep.equal({'foo' : 84, 'bar': 'first'}); + expect(nodeSnap.val()).to.deep.equal({ foo: 84, bar: 'first' }); // This rolls back the second transaction, and triggers a re-run of the third. // However, a new value event won't be triggered until the listener is complete, @@ -338,35 +369,44 @@ describe('Transaction Tests', function() { await ea.promise; - expect(nodeSnap.val()).to.deep.equal({'foo' : 0, 'bar': 'second'}); + expect(nodeSnap.val()).to.deep.equal({ foo: 0, bar: 'second' }); }); it('transaction(), set(), set() should work.', function(done) { - const ref = (getRandomNode() as Reference); - ref.transaction(function(curr) { - expect(curr).to.equal(null); - return 'hi!'; - }, function(error, committed) { - expect(error).to.equal(null); - expect(committed).to.equal(true); - done(); - }); + const ref = getRandomNode() as Reference; + ref.transaction( + function(curr) { + expect(curr).to.equal(null); + return 'hi!'; + }, + function(error, committed) { + expect(error).to.equal(null); + expect(committed).to.equal(true); + done(); + } + ); ref.set('foo'); ref.set('bar'); }); it('Priority is preserved when setting data.', async function() { - const node = (getRandomNode() as Reference); + const node = getRandomNode() as Reference; let complete = false; let snap; - node.on('value', function(s) { snap = s; }); + node.on('value', function(s) { + snap = s; + }); node.setWithPriority('test', 5); expect(snap.getPriority()).to.equal(5); let promise = node.transaction( - function() { return 'new value'}, - function() { complete = true; } + function() { + return 'new value'; + }, + function() { + complete = true; + } ); expect(snap.val()).to.equal('new value'); @@ -377,52 +417,61 @@ describe('Transaction Tests', function() { }); it('Tetris bug test - Can do transactions from transaction callback.', async function() { - const nodePair = (getRandomNode(2) as Reference[]), writeDone = false; + const nodePair = getRandomNode(2) as Reference[], + writeDone = false; await nodePair[0].child('foo').set(42); const node = nodePair[1]; return new Promise(resolve => { - node.child('foo').transaction(function(val) { - if (val === null) - return 84; - }, function() { - node.child('bar').transaction(function(val) { - resolve(); - return 168; - }); - }); - }) + node.child('foo').transaction( + function(val) { + if (val === null) return 84; + }, + function() { + node.child('bar').transaction(function(val) { + resolve(); + return 168; + }); + } + ); + }); }); it('Resulting snapshot is passed to onComplete callback.', async function() { - const nodePair = (getRandomNode(2) as Reference[]); - await nodePair[0].transaction(function(v) { - if (v === null) - return 'hello!'; - }, function(error, committed, snapshot) { - expect(error).to.equal(null); - expect(committed).to.equal(true); - expect(snapshot.val()).to.equal('hello!'); - }); + const nodePair = getRandomNode(2) as Reference[]; + await nodePair[0].transaction( + function(v) { + if (v === null) return 'hello!'; + }, + function(error, committed, snapshot) { + expect(error).to.equal(null); + expect(committed).to.equal(true); + expect(snapshot.val()).to.equal('hello!'); + } + ); // Do it again for the aborted case. - await nodePair[0].transaction(function(v) { - if (v === null) - return 'hello!'; - }, function(error, committed, snapshot) { - expect(committed).to.equal(false); - expect(snapshot.val()).to.equal('hello!'); - }); + await nodePair[0].transaction( + function(v) { + if (v === null) return 'hello!'; + }, + function(error, committed, snapshot) { + expect(committed).to.equal(false); + expect(snapshot.val()).to.equal('hello!'); + } + ); // Do it again on a fresh connection, for the aborted case. - await nodePair[1].transaction(function(v) { - if (v === null) - return 'hello!'; - }, function(error, committed, snapshot) { - expect(committed).to.equal(false); - expect(snapshot.val()).to.equal('hello!'); - }); + await nodePair[1].transaction( + function(v) { + if (v === null) return 'hello!'; + }, + function(error, committed, snapshot) { + expect(committed).to.equal(false); + expect(snapshot.val()).to.equal('hello!'); + } + ); }); it('Transaction aborts after 25 retries.', function(done) { @@ -430,35 +479,43 @@ describe('Transaction Tests', function() { return 'duck, duck, goose.'; }); - const node = (getRandomNode() as Reference); + const node = getRandomNode() as Reference; let tries = 0; - node.transaction(function(curr) { - expect(tries).to.be.lessThan(25); - tries++; - return 'hello!'; - }, function(error, committed, snapshot) { - expect(error.message).to.equal('maxretry'); - expect(committed).to.equal(false); - expect(tries).to.equal(25); - restoreHash(); - done(); - }); + node.transaction( + function(curr) { + expect(tries).to.be.lessThan(25); + tries++; + return 'hello!'; + }, + function(error, committed, snapshot) { + expect(error.message).to.equal('maxretry'); + expect(committed).to.equal(false); + expect(tries).to.equal(25); + restoreHash(); + done(); + } + ); }); - it('Set should cancel already sent transactions that come back as datastale.', function(done) { - const nodePair = (getRandomNode(2) as Reference[]); + it('Set should cancel already sent transactions that come back as datastale.', function( + done + ) { + const nodePair = getRandomNode(2) as Reference[]; let transactionCalls = 0; nodePair[0].set(5, function() { - nodePair[1].transaction(function(old) { - expect(transactionCalls).to.equal(0); - expect(old).to.equal(null); - transactionCalls++; - return 72; - }, function(error, committed, snapshot) { - expect(error.message).to.equal('set'); - expect(committed).to.equal(false); - done(); - }); + nodePair[1].transaction( + function(old) { + expect(transactionCalls).to.equal(0); + expect(old).to.equal(null); + transactionCalls++; + return 72; + }, + function(error, committed, snapshot) { + expect(error.message).to.equal('set'); + expect(committed).to.equal(false); + done(); + } + ); // Transaction should get sent but fail due to stale data, and then aborted because of the below set(). nodePair[1].set(32); @@ -466,7 +523,7 @@ describe('Transaction Tests', function() { }); it('Update should not cancel unrelated transactions', async function() { - const node = (getRandomNode() as Reference); + const node = getRandomNode() as Reference; let fooTransactionDone = false; let barTransactionDone = false; const restoreHash = hijackHash(function() { @@ -474,31 +531,37 @@ describe('Transaction Tests', function() { }); await node.child('foo').set(5); - + // 'foo' gets overwritten in the update so the transaction gets cancelled. - node.child('foo').transaction(function(old) { - return 72; - }, function(error, committed, snapshot) { - expect(error.message).to.equal('set'); - expect(committed).to.equal(false); - fooTransactionDone = true; - }); + node.child('foo').transaction( + function(old) { + return 72; + }, + function(error, committed, snapshot) { + expect(error.message).to.equal('set'); + expect(committed).to.equal(false); + fooTransactionDone = true; + } + ); // 'bar' does not get touched during the update and the transaction succeeds. - node.child('bar').transaction(function(old) { - return 72; - }, function(error, committed, snapshot) { - expect(error).to.equal(null); - expect(committed).to.equal(true); - barTransactionDone = true; - }); + node.child('bar').transaction( + function(old) { + return 72; + }, + function(error, committed, snapshot) { + expect(error).to.equal(null); + expect(committed).to.equal(true); + barTransactionDone = true; + } + ); await node.update({ - 'foo': 'newValue', - 'boo': 'newValue', - 'loo' : { - 'doo' : { - 'boo': 'newValue' + foo: 'newValue', + boo: 'newValue', + loo: { + doo: { + boo: 'newValue' } } }); @@ -509,143 +572,173 @@ describe('Transaction Tests', function() { }); it('Test transaction on wacky unicode data.', function(done) { - const nodePair = (getRandomNode(2) as Reference[]); + const nodePair = getRandomNode(2) as Reference[]; nodePair[0].set('♜♞♝♛♚♝♞♜', function() { - nodePair[1].transaction(function(current) { - if (current !== null) - expect(current).to.equal('♜♞♝♛♚♝♞♜'); - return '♖♘♗♕♔♗♘♖'; - }, function(error, committed, snapshot) { - expect(error).to.equal(null); - expect(committed).to.equal(true); - done(); - }); + nodePair[1].transaction( + function(current) { + if (current !== null) expect(current).to.equal('♜♞♝♛♚♝♞♜'); + return '♖♘♗♕♔♗♘♖'; + }, + function(error, committed, snapshot) { + expect(error).to.equal(null); + expect(committed).to.equal(true); + done(); + } + ); }); }); it('Test immediately aborted transaction.', function(done) { - const node = (getRandomNode() as Reference); + const node = getRandomNode() as Reference; // without callback. node.transaction(function(curr) { return; }); // with callback. - node.transaction(function(curr) { - return; - }, function(error, committed, snapshot) { - expect(committed).to.equal(false); - done(); - }); + node.transaction( + function(curr) { + return; + }, + function(error, committed, snapshot) { + expect(committed).to.equal(false); + done(); + } + ); }); it('Test adding to an array with a transaction.', function(done) { - const node = (getRandomNode() as Reference); + const node = getRandomNode() as Reference; node.set(['cat', 'horse'], function() { - node.transaction(function(current) { - if (current) { - current.push('dog'); - } else { - current = ['dog']; + node.transaction( + function(current) { + if (current) { + current.push('dog'); + } else { + current = ['dog']; + } + return current; + }, + function(error, committed, snapshot) { + expect(error).to.equal(null); + expect(committed).to.equal(true); + expect(snapshot.val()).to.deep.equal(['cat', 'horse', 'dog']); + done(); } - return current; - }, function(error, committed, snapshot) { - expect(error).to.equal(null); - expect(committed).to.equal(true); - expect(snapshot.val()).to.deep.equal(['cat', 'horse', 'dog']); - done(); - }); + ); }); }); it('Merged transactions have correct snapshot in onComplete.', async function() { - const nodePair = (getRandomNode(2) as Reference[]), node1 = nodePair[0], node2 = nodePair[1]; + const nodePair = getRandomNode(2) as Reference[], + node1 = nodePair[0], + node2 = nodePair[1]; let transaction1Done, transaction2Done; - await node1.set({a: 0}); - - const tx1 = node2.transaction(function(val) { - if (val !== null) { - expect(val).to.deep.equal({a: 0}); + await node1.set({ a: 0 }); + + const tx1 = node2.transaction( + function(val) { + if (val !== null) { + expect(val).to.deep.equal({ a: 0 }); + } + return { a: 1 }; + }, + function(error, committed, snapshot) { + expect(error).to.equal(null); + expect(committed).to.equal(true); + expect(snapshot.key).to.equal(node2.key); + // Per new behavior, will include the accepted value of the transaction, if it was successful. + expect(snapshot.val()).to.deep.equal({ a: 1 }); + transaction1Done = true; } - return {a: 1}; - }, function(error, committed, snapshot) { - expect(error).to.equal(null); - expect(committed).to.equal(true); - expect(snapshot.key).to.equal(node2.key); - // Per new behavior, will include the accepted value of the transaction, if it was successful. - expect(snapshot.val()).to.deep.equal({a: 1}); - transaction1Done = true; - }); + ); - const tx2 = node2.child('a').transaction(function(val) { - if (val !== null) { - expect(val).to.equal(1); // should run after the first transaction. + const tx2 = node2.child('a').transaction( + function(val) { + if (val !== null) { + expect(val).to.equal(1); // should run after the first transaction. + } + return 2; + }, + function(error, committed, snapshot) { + expect(error).to.equal(null); + expect(committed).to.equal(true); + expect(snapshot.key).to.equal('a'); + expect(snapshot.val()).to.deep.equal(2); + transaction2Done = true; } - return 2; - }, function(error, committed, snapshot) { - expect(error).to.equal(null); - expect(committed).to.equal(true); - expect(snapshot.key).to.equal('a'); - expect(snapshot.val()).to.deep.equal(2); - transaction2Done = true; - }); + ); - return Promise.all([ tx1, tx2 ]) + return Promise.all([tx1, tx2]); }); - it('Doing set() in successful transaction callback works. Case 870.', function(done) { - const node = (getRandomNode() as Reference); + it('Doing set() in successful transaction callback works. Case 870.', function( + done + ) { + const node = getRandomNode() as Reference; let transactionCalled = false; let callbackCalled = false; - node.transaction(function(val) { - expect(transactionCalled).to.not.be.ok; - transactionCalled = true; - return 'hi'; - }, function() { - expect(callbackCalled).to.not.be.ok; - callbackCalled = true; - node.set('transaction done', function() { - done(); - }); - }); - }); - - it('Doing set() in aborted transaction callback works. Case 870.', function(done) { - const nodePair = (getRandomNode(2) as Reference[]), node1 = nodePair[0], node2 = nodePair[1]; - - node1.set('initial', function() { - let transactionCalled = false; - let callbackCalled = false; - node2.transaction(function(val) { - // Return dummy value until we're called with the actual current value. - if (val === null) - return 'hi'; - + node.transaction( + function(val) { expect(transactionCalled).to.not.be.ok; transactionCalled = true; - return; - }, function(error, committed, snapshot) { + return 'hi'; + }, + function() { expect(callbackCalled).to.not.be.ok; callbackCalled = true; - node2.set('transaction done', function() { + node.set('transaction done', function() { done(); }); - }); + } + ); + }); + + it('Doing set() in aborted transaction callback works. Case 870.', function( + done + ) { + const nodePair = getRandomNode(2) as Reference[], + node1 = nodePair[0], + node2 = nodePair[1]; + + node1.set('initial', function() { + let transactionCalled = false; + let callbackCalled = false; + node2.transaction( + function(val) { + // Return dummy value until we're called with the actual current value. + if (val === null) return 'hi'; + + expect(transactionCalled).to.not.be.ok; + transactionCalled = true; + return; + }, + function(error, committed, snapshot) { + expect(callbackCalled).to.not.be.ok; + callbackCalled = true; + node2.set('transaction done', function() { + done(); + }); + } + ); }); }); it('Pending transactions are canceled on disconnect.', function(done) { - const ref = (getRandomNode() as Reference); + const ref = getRandomNode() as Reference; // wait to be connected and some data set. ref.set('initial', function() { - ref.transaction(function(current) { - return 'new'; - }, function(error, committed, snapshot) { - expect(committed).to.equal(false); - expect(error.message).to.equal('disconnect'); - done(); - }); + ref.transaction( + function(current) { + return 'new'; + }, + function(error, committed, snapshot) { + expect(committed).to.equal(false); + expect(error.message).to.equal('disconnect'); + done(); + } + ); // Kill the connection, which should cancel the outstanding transaction, since we don't know if it was // committed on the server or not. @@ -655,7 +748,8 @@ describe('Transaction Tests', function() { }); it('Transaction without local events (1)', async function() { - const ref = (getRandomNode() as Reference), actions = []; + const ref = getRandomNode() as Reference, + actions = []; let ea = EventAccumulatorFactory.waitsForCount(1); ref.on('value', function(s) { @@ -666,65 +760,82 @@ describe('Transaction Tests', function() { await ea.promise; ea = new EventAccumulator(() => actions.length >= 4); - - ref.transaction(function() { - return 'hello!'; - }, function(error, committed, snapshot) { - expect(error).to.be.null; - expect(committed).to.equal(true); - expect(snapshot.val()).to.equal('hello!'); - - actions.push('txn completed'); - ea.addEvent(); - }, /*applyLocally=*/false); + + ref.transaction( + function() { + return 'hello!'; + }, + function(error, committed, snapshot) { + expect(error).to.be.null; + expect(committed).to.equal(true); + expect(snapshot.val()).to.equal('hello!'); + + actions.push('txn completed'); + ea.addEvent(); + }, + /*applyLocally=*/ false + ); // Shouldn't have gotten any events yet. expect(actions).to.deep.equal(['value null']); actions.push('txn run'); - ea.addEvent(); + ea.addEvent(); await ea.promise; - expect(actions).to.deep.equal(['value null', 'txn run', 'value hello!', 'txn completed']); + expect(actions).to.deep.equal([ + 'value null', + 'txn run', + 'value hello!', + 'txn completed' + ]); }); // This test is meant to ensure that with applyLocally=false, while the transaction is outstanding, we continue // to get events from other clients. it('Transaction without local events (2)', function(done) { - const refPair = (getRandomNode(2) as Reference[]), ref1 = refPair[0], ref2 = refPair[1]; - const restoreHash = hijackHash(function() { return 'badhash'; }); + const refPair = getRandomNode(2) as Reference[], + ref1 = refPair[0], + ref2 = refPair[1]; + const restoreHash = hijackHash(function() { + return 'badhash'; + }); const SETS = 4; - let events = [], retries = 0, setsDone = 0; + let events = [], + retries = 0, + setsDone = 0; function txn1(next) { // Do a transaction on the first connection which will keep retrying (cause we hijacked the hash). // Make sure we're getting events for the sets happening on the second connection. - ref1.transaction(function(current) { - retries++; - // We should be getting server events while the transaction is outstanding. - for (let i = 0; i < (current || 0); i++) { - expect(events[i]).to.equal(i); - } - - if (current === SETS - 1) { - restoreHash(); - } - return 'txn result'; - }, function(error, committed, snapshot) { - expect(error).to.equal(null); - expect(committed).to.equal(true); + ref1.transaction( + function(current) { + retries++; + // We should be getting server events while the transaction is outstanding. + for (let i = 0; i < (current || 0); i++) { + expect(events[i]).to.equal(i); + } - expect(snapshot && snapshot.val()).to.equal('txn result'); - next() - }, /*applyLocally=*/false); + if (current === SETS - 1) { + restoreHash(); + } + return 'txn result'; + }, + function(error, committed, snapshot) { + expect(error).to.equal(null); + expect(committed).to.equal(true); + expect(snapshot && snapshot.val()).to.equal('txn result'); + next(); + }, + /*applyLocally=*/ false + ); // Meanwhile, do sets from the second connection. const doSet = function() { ref2.set(setsDone, function() { setsDone++; - if (setsDone < SETS) - doSet(); + if (setsDone < SETS) doSet(); }); }; doSet(); @@ -737,8 +848,7 @@ describe('Transaction Tests', function() { txn1(function() { // Sanity check stuff. expect(setsDone).to.equal(SETS); - if (retries === 0) - throw 'Transaction should have had to retry!'; + if (retries === 0) throw 'Transaction should have had to retry!'; // Validate we got the correct events. for (let i = 0; i < SETS; i++) { @@ -755,49 +865,55 @@ describe('Transaction Tests', function() { }); it('Transaction from value callback.', function(done) { - const ref = (getRandomNode() as Reference); + const ref = getRandomNode() as Reference; const COUNT = 1; ref.on('value', function(snap) { let shouldCommit = true; - ref.transaction(function(current) { - if (current == null) { - return 0; - } else if (current < COUNT) { - return current + 1; - } else { - shouldCommit = false; - } + ref.transaction( + function(current) { + if (current == null) { + return 0; + } else if (current < COUNT) { + return current + 1; + } else { + shouldCommit = false; + } - if (snap.val() === COUNT) { - done(); + if (snap.val() === COUNT) { + done(); + } + }, + function(error, committed, snap) { + expect(committed).to.equal(shouldCommit); } - }, function(error, committed, snap) { - expect(committed).to.equal(shouldCommit); - }); + ); }); }); it('Transaction runs on null only once after reconnect (Case 1981).', async function() { if (!canCreateExtraConnections()) return; - const ref = (getRandomNode() as Reference); + const ref = getRandomNode() as Reference; await ref.set(42); const newRef = getFreshRepoFromReference(ref); let run = 0; - return newRef.transaction(function(curr) { - run++; - if (run === 1) { - expect(curr).to.equal(null); - } else if (run === 2) { - expect(curr).to.equal(42); + return newRef.transaction( + function(curr) { + run++; + if (run === 1) { + expect(curr).to.equal(null); + } else if (run === 2) { + expect(curr).to.equal(42); + } + return 3.14; + }, + function(error, committed, resultSnapshot) { + expect(error).to.equal(null); + expect(committed).to.equal(true); + expect(run).to.equal(2); + expect(resultSnapshot.val()).to.equal(3.14); } - return 3.14; - }, function(error, committed, resultSnapshot) { - expect(error).to.equal(null); - expect(committed).to.equal(true); - expect(run).to.equal(2); - expect(resultSnapshot.val()).to.equal(3.14); - }); + ); }); // Provided by bk@thinkloop.com, this was failing when we sent puts before listens, but passes now. @@ -819,29 +935,34 @@ describe('Transaction Tests', function() { } function makeFriend(accountID, friendAccountID, firebase) { - firebase.child(accountID).child(friendAccountID).transaction(function(r) { - if (r == null) { - r = { accountID: accountID, friendAccountID: friendAccountID, percentCommon: 0 }; - } + firebase.child(accountID).child(friendAccountID).transaction( + function(r) { + if (r == null) { + r = { + accountID: accountID, + friendAccountID: friendAccountID, + percentCommon: 0 + }; + } - return r; - }, - function(error, committed, snapshot) { - if (error) { - throw error; - } - else if (!committed) { - throw 'All should be committed!'; - } - else { - count++; - ea.addEvent(); - snapshot.ref.setPriority(snapshot.val().percentCommon); - } - }, false); + return r; + }, + function(error, committed, snapshot) { + if (error) { + throw error; + } else if (!committed) { + throw 'All should be committed!'; + } else { + count++; + ea.addEvent(); + snapshot.ref.setPriority(snapshot.val().percentCommon); + } + }, + false + ); } - const firebase = (getRandomNode() as Reference); + const firebase = getRandomNode() as Reference; firebase.database.goOffline(); firebase.database.goOnline(); let count = 0; @@ -852,29 +973,41 @@ describe('Transaction Tests', function() { }); it('transaction() respects .priority.', function(done) { - const ref = (getRandomNode() as Reference); + const ref = getRandomNode() as Reference; const values = []; - ref.on('value', function(s) { values.push(s.exportVal()); }); - - ref.transaction(function(curr) { - expect(curr).to.equal(null); - return {'.value': 5, '.priority': 5}; - }, function() { - ref.transaction(function(curr) { - expect(curr).to.equal(5); - return {'.value': 10, '.priority': 10 }; - }, function() { - expect(values).to.deep.equal([ - {'.value': 5, '.priority': 5}, - {'.value': 10, '.priority': 10} - ]); - done(); - }); + ref.on('value', function(s) { + values.push(s.exportVal()); }); + + ref.transaction( + function(curr) { + expect(curr).to.equal(null); + return { '.value': 5, '.priority': 5 }; + }, + function() { + ref.transaction( + function(curr) { + expect(curr).to.equal(5); + return { '.value': 10, '.priority': 10 }; + }, + function() { + expect(values).to.deep.equal([ + { '.value': 5, '.priority': 5 }, + { '.value': 10, '.priority': 10 } + ]); + done(); + } + ); + } + ); }); - it('Transaction properly reverts data when you add a deeper listen.', function(done) { - const refPair = (getRandomNode(2) as Reference[]), ref1 = refPair[0], ref2 = refPair[1]; + it('Transaction properly reverts data when you add a deeper listen.', function( + done + ) { + const refPair = getRandomNode(2) as Reference[], + ref1 = refPair[0], + ref2 = refPair[1]; ref1.child('y').set('test', function() { ref2.transaction(function(curr) { if (curr === null) { @@ -891,32 +1024,38 @@ describe('Transaction Tests', function() { }); it('Transaction with integer keys', function(done) { - const ref = (getRandomNode() as Reference); - ref.set({1: 1, 5: 5, 10: 10, 20: 20}, function() { - ref.transaction(function(current) { - return 42; - }, function(error, committed) { - expect(error).to.be.null; - expect(committed).to.equal(true); - done(); - }); + const ref = getRandomNode() as Reference; + ref.set({ 1: 1, 5: 5, 10: 10, 20: 20 }, function() { + ref.transaction( + function(current) { + return 42; + }, + function(error, committed) { + expect(error).to.be.null; + expect(committed).to.equal(true); + done(); + } + ); }); }); it('Return null from first run of transaction.', function(done) { - const ref = (getRandomNode() as Reference); - ref.transaction(function(c) { - return null; - }, function(error, committed) { - expect(error).to.equal(null); - expect(committed).to.equal(true); - done(); - }); + const ref = getRandomNode() as Reference; + ref.transaction( + function(c) { + return null; + }, + function(error, committed) { + expect(error).to.equal(null); + expect(committed).to.equal(true); + done(); + } + ); }); // https://app.asana.com/0/5673976843758/9259161251948 it('Bubble-app transaction bug.', function(done) { - const ref = (getRandomNode() as Reference); + const ref = getRandomNode() as Reference; ref.child('a').transaction(function() { return 1; }); @@ -926,34 +1065,37 @@ describe('Transaction Tests', function() { ref.child('b').transaction(function() { return 7; }); - ref.transaction(function(current) { - if (current && current.a && current.b) { - return current.a + current.b; - } else { - return 'dummy'; + ref.transaction( + function(current) { + if (current && current.a && current.b) { + return current.a + current.b; + } else { + return 'dummy'; + } + }, + function(error, committed, snap) { + expect(error).to.equal(null); + expect(committed).to.equal(true); + expect(snap.val()).to.deep.equal(50); + done(); } - }, function(error, committed, snap) { - expect(error).to.equal(null); - expect(committed).to.equal(true); - expect(snap.val()).to.deep.equal(50); - done(); - }); + ); }); it('Transaction and priority: Can set priority in transaction on empty node', async function() { - const ref = (getRandomNode() as Reference); + const ref = getRandomNode() as Reference; await ref.transaction(function(current) { return { '.value': 42, '.priority': 7 }; }); return ref.once('value', function(s) { - expect(s.exportVal()).to.deep.equal({ '.value': 42, '.priority': 7}); + expect(s.exportVal()).to.deep.equal({ '.value': 42, '.priority': 7 }); }); }); it("Transaction and priority: Transaction doesn't change priority.", async function() { - const ref = (getRandomNode() as Reference); + const ref = getRandomNode() as Reference; await ref.set({ '.value': 42, '.priority': 7 }); @@ -963,11 +1105,11 @@ describe('Transaction Tests', function() { const snap = await ref.once('value'); - expect(snap.exportVal()).to.deep.equal({ '.value': 12, '.priority': 7}); + expect(snap.exportVal()).to.deep.equal({ '.value': 12, '.priority': 7 }); }); it('Transaction and priority: Transaction can change priority on non-empty node.', async function() { - const ref = (getRandomNode() as Reference); + const ref = getRandomNode() as Reference; await ref.set({ '.value': 42, '.priority': 7 }); @@ -976,16 +1118,16 @@ describe('Transaction Tests', function() { }); return ref.once('value', function(s) { - expect(s.exportVal()).to.deep.equal({ '.value': 43, '.priority': 8}); + expect(s.exportVal()).to.deep.equal({ '.value': 43, '.priority': 8 }); }); }); it('Transaction and priority: Changing priority on siblings.', async function() { - const ref = (getRandomNode() as Reference); + const ref = getRandomNode() as Reference; - await ref.set({ - a: { '.value': 'a', '.priority': 'a' }, - b: { '.value': 'b', '.priority': 'b' } + await ref.set({ + a: { '.value': 'a', '.priority': 'a' }, + b: { '.value': 'b', '.priority': 'b' } }); const tx1 = ref.child('a').transaction(function(current) { @@ -999,14 +1141,20 @@ describe('Transaction Tests', function() { await Promise.all([tx1, tx2]); return ref.once('value', function(s) { - expect(s.exportVal()).to.deep.equal({ a: { '.value': 'a2', '.priority': 'a2' }, b: { '.value': 'b2', '.priority': 'b2' } }); + expect(s.exportVal()).to.deep.equal({ + a: { '.value': 'a2', '.priority': 'a2' }, + b: { '.value': 'b2', '.priority': 'b2' } + }); }); }); it('Transaction and priority: Leaving priority on siblings.', async function() { - const ref = (getRandomNode() as Reference); + const ref = getRandomNode() as Reference; - await ref.set({a: {'.value': 'a', '.priority': 'a'}, b: {'.value': 'b', '.priority': 'b'}}); + await ref.set({ + a: { '.value': 'a', '.priority': 'a' }, + b: { '.value': 'b', '.priority': 'b' } + }); const tx1 = ref.child('a').transaction(function(current) { return 'a2'; @@ -1019,75 +1167,108 @@ describe('Transaction Tests', function() { await Promise.all([tx1, tx2]); return ref.once('value', function(s) { - expect(s.exportVal()).to.deep.equal({ a: { '.value': 'a2', '.priority': 'a' }, b: { '.value': 'b2', '.priority': 'b' } }); + expect(s.exportVal()).to.deep.equal({ + a: { '.value': 'a2', '.priority': 'a' }, + b: { '.value': 'b2', '.priority': 'b' } + }); }); }); - it('transaction() doesn\'t pick up cached data from previous once().', function(done) { - const refPair = (getRandomNode(2) as Reference[]); - const me = refPair[0], other = refPair[1]; + it("transaction() doesn't pick up cached data from previous once().", function( + done + ) { + const refPair = getRandomNode(2) as Reference[]; + const me = refPair[0], + other = refPair[1]; me.set('not null', function() { me.once('value', function(snapshot) { other.set(null, function() { - me.transaction(function(snapshot) { - if (snapshot === null) { - return 'it was null!'; - } else { - return 'it was not null!'; + me.transaction( + function(snapshot) { + if (snapshot === null) { + return 'it was null!'; + } else { + return 'it was not null!'; + } + }, + function(err, committed, snapshot) { + expect(err).to.equal(null); + expect(committed).to.equal(true); + expect(snapshot.val()).to.deep.equal('it was null!'); + done(); } - }, function(err, committed, snapshot) { - expect(err).to.equal(null); - expect(committed).to.equal(true); - expect(snapshot.val()).to.deep.equal('it was null!'); - done(); - }); + ); }); }); }); }); - it('transaction() doesn\'t pick up cached data from previous transaction.', function(done) { - const refPair = (getRandomNode(2) as Reference[]); - const me = refPair[0], other = refPair[1]; - me.transaction(function() { - return 'not null'; - }, function(err, committed) { - expect(err).to.equal(null); - expect(committed).to.equal(true); - other.set(null, function() { - me.transaction(function(snapshot) { - if (snapshot === null) { - return 'it was null!'; - } else { - return 'it was not null!'; - } - }, function(err, committed, snapshot) { - expect(err).to.equal(null); - expect(committed).to.equal(true); - expect(snapshot.val()).to.deep.equal('it was null!'); - done(); + it("transaction() doesn't pick up cached data from previous transaction.", function( + done + ) { + const refPair = getRandomNode(2) as Reference[]; + const me = refPair[0], + other = refPair[1]; + me.transaction( + function() { + return 'not null'; + }, + function(err, committed) { + expect(err).to.equal(null); + expect(committed).to.equal(true); + other.set(null, function() { + me.transaction( + function(snapshot) { + if (snapshot === null) { + return 'it was null!'; + } else { + return 'it was not null!'; + } + }, + function(err, committed, snapshot) { + expect(err).to.equal(null); + expect(committed).to.equal(true); + expect(snapshot.val()).to.deep.equal('it was null!'); + done(); + } + ); }); - }); - }); + } + ); }); - it("server values: local timestamp should eventually (but not immediately) match the server with txns", function(done) { - const refPair = (getRandomNode(2) as Reference[]), - writer = refPair[0], - reader = refPair[1], - readSnaps = [], writeSnaps = []; + it('server values: local timestamp should eventually (but not immediately) match the server with txns', function( + done + ) { + const refPair = getRandomNode(2) as Reference[], + writer = refPair[0], + reader = refPair[1], + readSnaps = [], + writeSnaps = []; const evaluateCompletionCriteria = function() { if (readSnaps.length === 1 && writeSnaps.length === 2) { - expect(Math.abs(new Date().getTime() - writeSnaps[0].val()) < 10000).to.equal(true); - expect(Math.abs(new Date().getTime() - writeSnaps[0].getPriority()) < 10000).to.equal(true); - expect(Math.abs(new Date().getTime() - writeSnaps[1].val()) < 10000).to.equal(true); - expect(Math.abs(new Date().getTime() - writeSnaps[1].getPriority()) < 10000).to.equal(true); + expect( + Math.abs(new Date().getTime() - writeSnaps[0].val()) < 10000 + ).to.equal(true); + expect( + Math.abs(new Date().getTime() - writeSnaps[0].getPriority()) < 10000 + ).to.equal(true); + expect( + Math.abs(new Date().getTime() - writeSnaps[1].val()) < 10000 + ).to.equal(true); + expect( + Math.abs(new Date().getTime() - writeSnaps[1].getPriority()) < 10000 + ).to.equal(true); expect(writeSnaps[0].val() === writeSnaps[1].val()).to.equal(false); - expect(writeSnaps[0].getPriority() === writeSnaps[1].getPriority()).to.equal(false); + expect( + writeSnaps[0].getPriority() === writeSnaps[1].getPriority() + ).to.equal(false); expect(writeSnaps[1].val() === readSnaps[0].val()).to.equal(true); - expect(writeSnaps[1].getPriority() === readSnaps[0].getPriority()).to.equal(true); + expect( + writeSnaps[1].getPriority() === readSnaps[0].getPriority() + ).to.equal(true); done(); } }; @@ -1113,8 +1294,8 @@ describe('Transaction Tests', function() { writer.transaction(function(current) { return { - '.value' : firebase.database.ServerValue.TIMESTAMP, - '.priority' : firebase.database.ServerValue.TIMESTAMP + '.value': firebase.database.ServerValue.TIMESTAMP, + '.priority': firebase.database.ServerValue.TIMESTAMP }; }); @@ -1122,47 +1303,58 @@ describe('Transaction Tests', function() { }); it("transaction() still works when there's a query listen.", function(done) { - const ref = (getRandomNode() as Reference); + const ref = getRandomNode() as Reference; - ref.set({ - a: 1, - b: 2 - }, function() { - ref.limitToFirst(1).on('child_added', function() {}); + ref.set( + { + a: 1, + b: 2 + }, + function() { + ref.limitToFirst(1).on('child_added', function() {}); - ref.child('a').transaction(function(current) { - return current; - }, function(error, committed, snapshot) { - expect(error).to.equal(null); - expect(committed).to.equal(true); - if (!error) { - expect(snapshot.val()).to.deep.equal(1); - } - done(); - }, false); - }); + ref.child('a').transaction( + function(current) { + return current; + }, + function(error, committed, snapshot) { + expect(error).to.equal(null); + expect(committed).to.equal(true); + if (!error) { + expect(snapshot.val()).to.deep.equal(1); + } + done(); + }, + false + ); + } + ); }); - it("transaction() on queried location doesn't run initially on null (firebase-worker-queue depends on this).", - function(done) { - const ref = (getRandomNode() as Reference); - ref.push({ a: 1, b: 2}, function() { + it("transaction() on queried location doesn't run initially on null (firebase-worker-queue depends on this).", function( + done + ) { + const ref = getRandomNode() as Reference; + ref.push({ a: 1, b: 2 }, function() { ref.startAt().limitToFirst(1).on('child_added', function(snap) { - snap.ref.transaction(function(current) { - expect(current).to.deep.equal({a: 1, b: 2}); - return null; - }, function(error, committed, snapshot) { - expect(error).to.equal(null); - expect(committed).to.equal(true); - expect(snapshot.val()).to.equal(null); - done(); - }); + snap.ref.transaction( + function(current) { + expect(current).to.deep.equal({ a: 1, b: 2 }); + return null; + }, + function(error, committed, snapshot) { + expect(error).to.equal(null); + expect(committed).to.equal(true); + expect(snapshot.val()).to.equal(null); + done(); + } + ); }); }); }); it('transactions raise correct child_changed events on queries', async function() { - const ref = (getRandomNode() as Reference); + const ref = getRandomNode() as Reference; const value = { foo: { value: 1 } }; const snapshots = []; @@ -1178,12 +1370,16 @@ describe('Transaction Tests', function() { snapshots.push(snapshot); }); - await ref.child('foo').transaction(function(current) { - return {value: 2}; - }, function(error, committed, snapshot) { - expect(error).to.equal(null); - expect(committed).to.equal(true); - }, false); + await ref.child('foo').transaction( + function(current) { + return { value: 2 }; + }, + function(error, committed, snapshot) { + expect(error).to.equal(null); + expect(committed).to.equal(true); + }, + false + ); expect(snapshots.length).to.equal(2); const addedSnapshot = snapshots[0]; @@ -1195,49 +1391,63 @@ describe('Transaction Tests', function() { }); it('transactions can use local merges', function(done) { - const ref = (getRandomNode() as Reference); + const ref = getRandomNode() as Reference; - ref.update({'foo': 'bar'}); + ref.update({ foo: 'bar' }); - ref.child('foo').transaction(function(current) { - expect(current).to.equal('bar'); - return current; - }, function(error, committed, snapshot) { - expect(error).to.equal(null); - expect(committed).to.equal(true); - done(); - }); + ref.child('foo').transaction( + function(current) { + expect(current).to.equal('bar'); + return current; + }, + function(error, committed, snapshot) { + expect(error).to.equal(null); + expect(committed).to.equal(true); + done(); + } + ); }); - it('transactions works with merges without the transaction path', function(done) { - const ref = (getRandomNode() as Reference); + it('transactions works with merges without the transaction path', function( + done + ) { + const ref = getRandomNode() as Reference; - ref.update({'foo': 'bar'}); + ref.update({ foo: 'bar' }); - ref.child('non-foo').transaction(function(current) { - expect(current).to.equal(null); - return current; - }, function(error, committed, snapshot) { - expect(error).to.equal(null); - expect(committed).to.equal(true); - done(); - }); + ref.child('non-foo').transaction( + function(current) { + expect(current).to.equal(null); + return current; + }, + function(error, committed, snapshot) { + expect(error).to.equal(null); + expect(committed).to.equal(true); + done(); + } + ); }); //See https://app.asana.com/0/15566422264127/23303789496881 it('out of order remove writes are handled correctly', function(done) { - const ref = (getRandomNode() as Reference); + const ref = getRandomNode() as Reference; - ref.set({foo: 'bar'}); - ref.transaction(function() { - return 'transaction-1'; - }, function() { }); - ref.transaction(function() { - return 'transaction-2'; - }, function() { }); + ref.set({ foo: 'bar' }); + ref.transaction( + function() { + return 'transaction-1'; + }, + function() {} + ); + ref.transaction( + function() { + return 'transaction-2'; + }, + function() {} + ); // This will trigger an abort of the transaction which should not cause the client to crash - ref.update({qux: 'quu' }, function(error) { + ref.update({ qux: 'quu' }, function(error) { expect(error).to.equal(null); done(); }); diff --git a/tests/messaging/browser/array-buffer-to-base64.test.ts b/tests/messaging/browser/array-buffer-to-base64.test.ts index bf368fe67ff..3ffe6d3006f 100644 --- a/tests/messaging/browser/array-buffer-to-base64.test.ts +++ b/tests/messaging/browser/array-buffer-to-base64.test.ts @@ -14,27 +14,102 @@ * limitations under the License. */ -import { expect } from "chai"; +import { expect } from 'chai'; import arrayBufferToBase64 from '../../../src/messaging/helpers/array-buffer-to-base64'; describe('Firebase Messaging > array-buffer-to-base64', function() { it('should return expected value for p256dh example', function() { const buffer = new Uint8Array([ - 4, 181, 98, 240, 48, 62, 75, 119, 193, 227, 154, 69, 250, 216, 53, - 110, 157, 120, 62, 76, 213, 249, 11, 62, 12, 19, 149, 36, 5, 82, 140, - 37, 141, 134, 132, 98, 87, 152, 175, 98, 53, 83, 196, 242, 202, 155, - 19, 173, 157, 216, 45, 147, 20, 12, 151, 160, 147, 159, 205, 219, 75, - 133, 156, 129, 152 + 4, + 181, + 98, + 240, + 48, + 62, + 75, + 119, + 193, + 227, + 154, + 69, + 250, + 216, + 53, + 110, + 157, + 120, + 62, + 76, + 213, + 249, + 11, + 62, + 12, + 19, + 149, + 36, + 5, + 82, + 140, + 37, + 141, + 134, + 132, + 98, + 87, + 152, + 175, + 98, + 53, + 83, + 196, + 242, + 202, + 155, + 19, + 173, + 157, + 216, + 45, + 147, + 20, + 12, + 151, + 160, + 147, + 159, + 205, + 219, + 75, + 133, + 156, + 129, + 152 ]).buffer; - const expectedValue = 'BLVi8DA-S3fB45pF-tg1bp14PkzV-Qs-DBOVJAVSjCWNhoRi' + + const expectedValue = + 'BLVi8DA-S3fB45pF-tg1bp14PkzV-Qs-DBOVJAVSjCWNhoRi' + 'V5ivYjVTxPLKmxOtndgtkxQMl6CTn83bS4WcgZg'; expect(arrayBufferToBase64(buffer)).to.equal(expectedValue); }); it('should return expected value for auth example', function() { const buffer = new Uint8Array([ - 255, 237, 107, 177, 171, 78, 84, 131, 221, 231, 87, 188, 22, - 232, 71, 15 + 255, + 237, + 107, + 177, + 171, + 78, + 84, + 131, + 221, + 231, + 87, + 188, + 22, + 232, + 71, + 15 ]).buffer; const expectedValue = '_-1rsatOVIPd51e8FuhHDw'; expect(arrayBufferToBase64(buffer)).to.equal(expectedValue); diff --git a/tests/messaging/browser/constructor.test.ts b/tests/messaging/browser/constructor.test.ts index e6787cc6b4c..71f5dc1f5b9 100644 --- a/tests/messaging/browser/constructor.test.ts +++ b/tests/messaging/browser/constructor.test.ts @@ -14,12 +14,12 @@ * limitations under the License. */ -import { assert } from "chai"; +import { assert } from 'chai'; import makeFakeApp from './make-fake-app'; -import Errors from '../../../src/messaging/models/errors' -import WindowController from '../../../src/messaging/controllers/window-controller' -import SWController from '../../../src/messaging/controllers/sw-controller' +import Errors from '../../../src/messaging/models/errors'; +import WindowController from '../../../src/messaging/controllers/window-controller'; +import SWController from '../../../src/messaging/controllers/sw-controller'; describe('Firebase Messaging > new *Controller()', function() { it('should handle bad input', function() { @@ -44,8 +44,10 @@ describe('Firebase Messaging > new *Controller()', function() { new WindowController(badInput); new SWController(badInput); - console.warn('Bad Input should have thrown: ', - JSON.stringify(badInput)); + console.warn( + 'Bad Input should have thrown: ', + JSON.stringify(badInput) + ); } catch (err) { caughtError = err; } @@ -59,5 +61,5 @@ describe('Firebase Messaging > new *Controller()', function() { }); new WindowController(app); new SWController(app); - }) + }); }); diff --git a/tests/messaging/browser/db-helper.ts b/tests/messaging/browser/db-helper.ts index a85c23343f3..38bf68b2273 100644 --- a/tests/messaging/browser/db-helper.ts +++ b/tests/messaging/browser/db-helper.ts @@ -14,7 +14,7 @@ * limitations under the License. */ export default { - deleteDb: (dbName) => { + deleteDb: dbName => { return new Promise((resolve, reject) => { const request = indexedDB.deleteDatabase(dbName); request.onerror = event => { @@ -28,4 +28,4 @@ export default { }; }); } -}; \ No newline at end of file +}; diff --git a/tests/messaging/browser/db-token-manager.ts b/tests/messaging/browser/db-token-manager.ts index 55f353c5b82..e3667b66bfb 100644 --- a/tests/messaging/browser/db-token-manager.ts +++ b/tests/messaging/browser/db-token-manager.ts @@ -26,10 +26,10 @@ export default { return tokenManager.closeDatabase(); }, getTokenDetailsFromDB: () => { - return tokenManager.openDatabase_() - .then(db => { + return tokenManager.openDatabase_().then(db => { return new Promise((resolve, reject) => { - const objectStore = db.transaction([FCM_TOKEN_OBJ_STORE]) + const objectStore = db + .transaction([FCM_TOKEN_OBJ_STORE]) .objectStore(FCM_TOKEN_OBJ_STORE); const allDetails = []; @@ -51,8 +51,7 @@ export default { }, addObjectToIndexDB: object => { - return tokenManager.openDatabase_() - .then(db => { + return tokenManager.openDatabase_().then(db => { return new Promise((resolve, reject) => { const transaction = db.transaction([FCM_TOKEN_OBJ_STORE], 'readwrite'); const objectStore = transaction.objectStore(FCM_TOKEN_OBJ_STORE); @@ -68,8 +67,7 @@ export default { }, updateObjectInIndexDb: object => { - return tokenManager.openDatabase_() - .then(db => { + return tokenManager.openDatabase_().then(db => { return new Promise((resolve, reject) => { const transaction = db.transaction([FCM_TOKEN_OBJ_STORE], 'readwrite'); const objectStore = transaction.objectStore(FCM_TOKEN_OBJ_STORE); @@ -98,4 +96,4 @@ export default { }; }); } -}; \ No newline at end of file +}; diff --git a/tests/messaging/browser/delete-token.test.ts b/tests/messaging/browser/delete-token.test.ts index a3e81e75651..9b095868d05 100644 --- a/tests/messaging/browser/delete-token.test.ts +++ b/tests/messaging/browser/delete-token.test.ts @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -import { assert } from "chai"; +import { assert } from 'chai'; import * as sinon from 'sinon'; import makeFakeApp from './make-fake-app'; import makeFakeSWReg from './make-fake-sw-reg'; @@ -35,10 +35,7 @@ describe('Firebase Messaging > *Controller.deleteToken()', function() { p256dh: '123456789098765642421' }; - const servicesToTest = [ - WindowController, - SWController - ]; + const servicesToTest = [WindowController, SWController]; const app = makeFakeApp({ messagingSenderId: EXAMPLE_TOKEN_SAVE.fcmSenderId @@ -46,7 +43,10 @@ describe('Firebase Messaging > *Controller.deleteToken()', function() { const configureRegistrationMocks = fakeReg => { servicesToTest.forEach(serviceClass => { - const controllerStub = sinon.stub(serviceClass.prototype, 'getSWRegistration_'); + const controllerStub = sinon.stub( + serviceClass.prototype, + 'getSWRegistration_' + ); controllerStub.callsFake(() => { return fakeReg; }); @@ -71,14 +71,12 @@ describe('Firebase Messaging > *Controller.deleteToken()', function() { }); afterEach(function() { - stubs.forEach((stub) => { + stubs.forEach(stub => { stub.restore(); - }) + }); stubs = []; - const deletePromises = [ - dbTMHelper.closeDatabase() - ]; + const deletePromises = [dbTMHelper.closeDatabase()]; if (globalMessagingService) { deletePromises.push(globalMessagingService.delete()); } @@ -87,19 +85,23 @@ describe('Firebase Messaging > *Controller.deleteToken()', function() { it('should handle no token to delete', function() { globalMessagingService = new WindowController(app); - return globalMessagingService.deleteToken() - .then(() => { - throw new Error('Invalid subscription.'); - }, err => { - assert.equal('messaging/' + Errors.codes.INVALID_DELETE_TOKEN, err.code); - }); + return globalMessagingService.deleteToken().then( + () => { + throw new Error('Invalid subscription.'); + }, + err => { + assert.equal( + 'messaging/' + Errors.codes.INVALID_DELETE_TOKEN, + err.code + ); + } + ); }); it('should handle no registration', function() { configureRegistrationMocks(Promise.resolve(null)); - return dbTMHelper.addObjectToIndexDB(EXAMPLE_TOKEN_SAVE) - .then(() => { + return dbTMHelper.addObjectToIndexDB(EXAMPLE_TOKEN_SAVE).then(() => { globalMessagingService = new WindowController(app); return globalMessagingService.deleteToken(EXAMPLE_TOKEN_SAVE.fcmToken); }); @@ -113,32 +115,33 @@ describe('Firebase Messaging > *Controller.deleteToken()', function() { dbTMHelper.addObjectToIndexDB(EXAMPLE_TOKEN_SAVE); globalMessagingService = new WindowController(app); - return globalMessagingService.deleteToken(EXAMPLE_TOKEN_SAVE.fcmToken) - .then(() => { - throw new Error('Expected this to reject'); - }, err => { - assert.equal('Unknown error', err.message); - }); + return globalMessagingService.deleteToken(EXAMPLE_TOKEN_SAVE.fcmToken).then( + () => { + throw new Error('Expected this to reject'); + }, + err => { + assert.equal('Unknown error', err.message); + } + ); }); it('should handle null getSubscription()', function() { - configureRegistrationMocks( - generateFakeReg(Promise.resolve(null)) - ); + configureRegistrationMocks(generateFakeReg(Promise.resolve(null))); let serviceInstance; return servicesToTest.reduce((chain, serviceClass) => { - return chain.then(() => { - return dbTMHelper.addObjectToIndexDB(EXAMPLE_TOKEN_SAVE); - }) - .then(() => { - serviceInstance = new serviceClass(app); - return serviceInstance.deleteToken(EXAMPLE_TOKEN_SAVE.fcmToken); - }) - .then(() => { - return serviceInstance.delete(); - }); + return chain + .then(() => { + return dbTMHelper.addObjectToIndexDB(EXAMPLE_TOKEN_SAVE); + }) + .then(() => { + serviceInstance = new serviceClass(app); + return serviceInstance.deleteToken(EXAMPLE_TOKEN_SAVE.fcmToken); + }) + .then(() => { + return serviceInstance.delete(); + }); }, Promise.resolve()); }); @@ -156,21 +159,24 @@ describe('Firebase Messaging > *Controller.deleteToken()', function() { let serviceInstance; return servicesToTest.reduce((chain, serviceClass) => { - return chain.then(() => { - return dbTMHelper.addObjectToIndexDB(EXAMPLE_TOKEN_SAVE); - }) - .then(() => { - serviceInstance = new serviceClass(app); - return serviceInstance.deleteToken(EXAMPLE_TOKEN_SAVE.fcmToken) + return chain .then(() => { - throw new Error('Expected this to reject'); - }, err => { - assert.equal(errorMsg, err.message); + return dbTMHelper.addObjectToIndexDB(EXAMPLE_TOKEN_SAVE); + }) + .then(() => { + serviceInstance = new serviceClass(app); + return serviceInstance.deleteToken(EXAMPLE_TOKEN_SAVE.fcmToken).then( + () => { + throw new Error('Expected this to reject'); + }, + err => { + assert.equal(errorMsg, err.message); + } + ); + }) + .then(() => { + return serviceInstance.delete(); }); - }) - .then(() => { - return serviceInstance.delete(); - }); }, Promise.resolve()); }); @@ -187,16 +193,17 @@ describe('Firebase Messaging > *Controller.deleteToken()', function() { let serviceInstance; return servicesToTest.reduce((chain, serviceClass) => { - return chain.then(() => { - return dbTMHelper.addObjectToIndexDB(EXAMPLE_TOKEN_SAVE); - }) - .then(() => { - serviceInstance = new serviceClass(app); - return serviceInstance.deleteToken(EXAMPLE_TOKEN_SAVE.fcmToken); - }) - .then(() => { - return serviceInstance.delete(); - }); + return chain + .then(() => { + return dbTMHelper.addObjectToIndexDB(EXAMPLE_TOKEN_SAVE); + }) + .then(() => { + serviceInstance = new serviceClass(app); + return serviceInstance.deleteToken(EXAMPLE_TOKEN_SAVE.fcmToken); + }) + .then(() => { + return serviceInstance.delete(); + }); }, Promise.resolve()); }); }); diff --git a/tests/messaging/browser/get-sw-reg.test.ts b/tests/messaging/browser/get-sw-reg.test.ts index a0b0b884551..aea856c46bb 100644 --- a/tests/messaging/browser/get-sw-reg.test.ts +++ b/tests/messaging/browser/get-sw-reg.test.ts @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -import { assert } from "chai"; +import { assert } from 'chai'; import * as sinon from 'sinon'; import makeFakeApp from './make-fake-app'; import makeFakeSWReg from './make-fake-sw-reg'; @@ -30,27 +30,23 @@ const app = makeFakeApp({ }); const mockWindowRegistration = registration => { - const regStub = sinon.stub( - navigator.serviceWorker, 'register'); + const regStub = sinon.stub(navigator.serviceWorker, 'register'); regStub.callsFake(() => Promise.resolve(registration)); stubs.push(regStub); }; describe('Firebase Messaging > *Controller.getSWReg_()', function() { afterEach(function() { - stubs.forEach((stub) => { + stubs.forEach(stub => { stub.restore(); }); }); it('should get sw reg in window', function() { let updateCalled = false; - const activatedRegistration = makeFakeSWReg( - 'active', - { - state: 'activated' - } - ); + const activatedRegistration = makeFakeSWReg('active', { + state: 'activated' + }); activatedRegistration.update = () => { updateCalled = true; }; @@ -58,18 +54,19 @@ describe('Firebase Messaging > *Controller.getSWReg_()', function() { mockWindowRegistration(activatedRegistration); const messagingService = new WindowController(app); - return messagingService.getSWRegistration_() - .then(registration => { - assert.equal(registration, activatedRegistration); - assert.equal(updateCalled, true); - }) - .then(() => { - // Check a second call returns the already registered registration - return messagingService.getSWRegistration_(); - }) - .then(registration => { - assert.equal(registration, activatedRegistration); - }); + return messagingService + .getSWRegistration_() + .then(registration => { + assert.equal(registration, activatedRegistration); + assert.equal(updateCalled, true); + }) + .then(() => { + // Check a second call returns the already registered registration + return messagingService.getSWRegistration_(); + }) + .then(registration => { + assert.equal(registration, activatedRegistration); + }); }); it('should handle no sw reg in page', function() { @@ -77,12 +74,14 @@ describe('Firebase Messaging > *Controller.getSWReg_()', function() { mockWindowRegistration(fakeReg); const messagingService = new WindowController(app); - return messagingService.getSWRegistration_() - .then(() => { - throw new Error('Expected this error to throw due to no SW.'); - }, err => { - assert.equal('messaging/' + Errors.codes.NO_SW_IN_REG, err.code); - }); + return messagingService.getSWRegistration_().then( + () => { + throw new Error('Expected this error to throw due to no SW.'); + }, + err => { + assert.equal('messaging/' + Errors.codes.NO_SW_IN_REG, err.code); + } + ); }); it('should get sw reg in sw', function() { @@ -90,49 +89,55 @@ describe('Firebase Messaging > *Controller.getSWReg_()', function() { (self as any).registration = fakeReg; const messagingService = new SWController(app); - return messagingService.getSWRegistration_() - .then(registration => { - assert.equal(fakeReg, registration); - }) - .then(() => { - // Check a second call returns the already registered registration - return messagingService.getSWRegistration_(); - }) - .then(registration => { - assert.equal(registration, fakeReg); - }); + return messagingService + .getSWRegistration_() + .then(registration => { + assert.equal(fakeReg, registration); + }) + .then(() => { + // Check a second call returns the already registered registration + return messagingService.getSWRegistration_(); + }) + .then(registration => { + assert.equal(registration, fakeReg); + }); }); it('should make registration error available to developer', function() { const errorMsg = 'test-reg-error-1234567890'; const mockRegisterMethod = sinon.stub(navigator.serviceWorker, 'register'); - mockRegisterMethod.callsFake(() =>Promise.reject(new Error(errorMsg))); + mockRegisterMethod.callsFake(() => Promise.reject(new Error(errorMsg))); const messagingService = new WindowController(app); - return messagingService.getSWRegistration_() - .then(() => { - throw new Error('Expect getSWRegistration_ to reject.'); - }, error => { - assert.equal('messaging/' + Errors.codes.FAILED_DEFAULT_REGISTRATION, - error.code); - assert.equal((error.message.indexOf(errorMsg) !== -1), true); - }); + return messagingService.getSWRegistration_().then( + () => { + throw new Error('Expect getSWRegistration_ to reject.'); + }, + error => { + assert.equal( + 'messaging/' + Errors.codes.FAILED_DEFAULT_REGISTRATION, + error.code + ); + assert.equal(error.message.indexOf(errorMsg) !== -1, true); + } + ); }); it('should test redundant edge case', function() { - const redundantRegistration = makeFakeSWReg( - 'installing', - { state: 'redundant'} - ); + const redundantRegistration = makeFakeSWReg('installing', { + state: 'redundant' + }); mockWindowRegistration(redundantRegistration); const messagingService = new WindowController(app); - return messagingService.getSWRegistration_() - .then(() => { - throw new Error('Should throw error due to redundant SW'); - }, err => { - assert.equal('messaging/' + Errors.codes.SW_REG_REDUNDANT, err.code); - }); + return messagingService.getSWRegistration_().then( + () => { + throw new Error('Should throw error due to redundant SW'); + }, + err => { + assert.equal('messaging/' + Errors.codes.SW_REG_REDUNDANT, err.code); + } + ); }); it('should handle installed to redundant edge case', function() { @@ -147,19 +152,18 @@ describe('Firebase Messaging > *Controller.getSWReg_()', function() { } }; - const slowRedundantRegistration = makeFakeSWReg( - 'installing', - swValue - ); + const slowRedundantRegistration = makeFakeSWReg('installing', swValue); mockWindowRegistration(slowRedundantRegistration); const messagingService = new WindowController(app); - return messagingService.getSWRegistration_() - .then(() => { + return messagingService.getSWRegistration_().then( + () => { throw new Error('Should throw error due to redundant SW'); - }, err => { + }, + err => { assert.equal('messaging/' + Errors.codes.SW_REG_REDUNDANT, err.code); - }); + } + ); }); it('should handle waiting to redundant edge case', function() { @@ -174,18 +178,17 @@ describe('Firebase Messaging > *Controller.getSWReg_()', function() { } }; - const slowRedundantRegistration = makeFakeSWReg( - 'waiting', - swValue - ); + const slowRedundantRegistration = makeFakeSWReg('waiting', swValue); mockWindowRegistration(slowRedundantRegistration); const messagingService = new WindowController(app); - return messagingService.getSWRegistration_() - .then(() => { + return messagingService.getSWRegistration_().then( + () => { throw new Error('Should throw error due to redundant SW'); - }, err => { + }, + err => { assert.equal('messaging/' + Errors.codes.SW_REG_REDUNDANT, err.code); - }); + } + ); }); }); diff --git a/tests/messaging/browser/get-token.test.ts b/tests/messaging/browser/get-token.test.ts index 850ffa48177..aba8a6c4d0e 100644 --- a/tests/messaging/browser/get-token.test.ts +++ b/tests/messaging/browser/get-token.test.ts @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -import { assert } from "chai"; +import { assert } from 'chai'; import * as sinon from 'sinon'; import makeFakeApp from './make-fake-app'; import makeFakeSWReg from './make-fake-sw-reg'; @@ -32,24 +32,22 @@ describe('Firebase Messaging > *Controller.getToken()', function() { messagingSenderId: EXAMPLE_SENDER_ID }); - const servicesToTest = [ - WindowController, - SWController - ]; + const servicesToTest = [WindowController, SWController]; const mockGetReg = fakeReg => { - servicesToTest.forEach(serviceClass => { - const getSwMock = sinon.stub(serviceClass.prototype, 'getSWRegistration_'); - getSwMock.callsFake(() => fakeReg); - stubs.push(getSwMock); - }); -}; + servicesToTest.forEach(serviceClass => { + const getSwMock = sinon.stub( + serviceClass.prototype, + 'getSWRegistration_' + ); + getSwMock.callsFake(() => fakeReg); + stubs.push(getSwMock); + }); + }; let stubs = []; - beforeEach(function() { - - }); + beforeEach(function() {}); afterEach(function() { stubs.forEach(stub => stub.restore()); @@ -57,21 +55,29 @@ describe('Firebase Messaging > *Controller.getToken()', function() { }); it('should throw on unsupported browsers', function() { - const isSupportedStub = sinon.stub(WindowController.prototype, 'isSupported_'); + const isSupportedStub = sinon.stub( + WindowController.prototype, + 'isSupported_' + ); isSupportedStub.callsFake(() => false); stubs.push(isSupportedStub); const messagingService = new WindowController(app); - return messagingService.getToken() - .then(() => { - throw new Error('Expected getToken to throw '); - }, err => { - assert.equal('messaging/' + Errors.codes.UNSUPPORTED_BROWSER, err.code); - }); + return messagingService.getToken().then( + () => { + throw new Error('Expected getToken to throw '); + }, + err => { + assert.equal('messaging/' + Errors.codes.UNSUPPORTED_BROWSER, err.code); + } + ); }); it('should handle a failure to get registration', function() { - const notificationStub = sinon.stub(ControllerInterface.prototype, 'getNotificationPermission_'); + const notificationStub = sinon.stub( + ControllerInterface.prototype, + 'getNotificationPermission_' + ); notificationStub.callsFake(() => NotificationPermission.granted); stubs.push(notificationStub); @@ -80,20 +86,29 @@ describe('Firebase Messaging > *Controller.getToken()', function() { stubs.push(registerStub); const messagingService = new WindowController(app); - return messagingService.getToken() - .then(() => { - throw new Error('Expected getToken to throw '); - }, err => { - assert.equal('messaging/' + Errors.codes.FAILED_DEFAULT_REGISTRATION, - err.code); - }) - .then(() => { - messagingService.delete(); - }); + return messagingService + .getToken() + .then( + () => { + throw new Error('Expected getToken to throw '); + }, + err => { + assert.equal( + 'messaging/' + Errors.codes.FAILED_DEFAULT_REGISTRATION, + err.code + ); + } + ) + .then(() => { + messagingService.delete(); + }); }); it('should handle the notification permission', function() { - const notificationStub = sinon.stub(ControllerInterface.prototype, 'getNotificationPermission_'); + const notificationStub = sinon.stub( + ControllerInterface.prototype, + 'getNotificationPermission_' + ); notificationStub.onCall(0).returns(NotificationPermission.denied); notificationStub.onCall(1).returns(NotificationPermission.default); notificationStub.onCall(2).returns(NotificationPermission.denied); @@ -102,28 +117,37 @@ describe('Firebase Messaging > *Controller.getToken()', function() { return servicesToTest.reduce((chain, ServiceClass) => { const serviceInstance = new ServiceClass(app); - return chain.then(() => { - return serviceInstance.getToken(); - }) - .then(() => { - throw new Error('Expected getToken to throw '); - }, err => { - assert.equal('messaging/' + Errors.codes.NOTIFICATIONS_BLOCKED, - err.code); - }) - .then(() => { - return serviceInstance.getToken(); - }) - .then(token => { - assert.equal(null, token); - }); + return chain + .then(() => { + return serviceInstance.getToken(); + }) + .then( + () => { + throw new Error('Expected getToken to throw '); + }, + err => { + assert.equal( + 'messaging/' + Errors.codes.NOTIFICATIONS_BLOCKED, + err.code + ); + } + ) + .then(() => { + return serviceInstance.getToken(); + }) + .then(token => { + assert.equal(null, token); + }); }, Promise.resolve()); }); it('should get saved token', function() { const registration = makeFakeSWReg(); - const notificationStub = sinon.stub(ControllerInterface.prototype, 'getNotificationPermission_'); + const notificationStub = sinon.stub( + ControllerInterface.prototype, + 'getNotificationPermission_' + ); notificationStub.callsFake(() => NotificationPermission.granted); stubs.push(notificationStub); @@ -136,8 +160,7 @@ describe('Firebase Messaging > *Controller.getToken()', function() { return Promise.all( servicesToTest.map(ServiceClass => { const serviceInstance = new ServiceClass(app); - return serviceInstance.getToken() - .then(token => { + return serviceInstance.getToken().then(token => { assert.equal(EXAMPLE_FCM_TOKEN, token); }); }) @@ -148,7 +171,9 @@ describe('Firebase Messaging > *Controller.getToken()', function() { const registration = makeFakeSWReg(); const notificationStub = sinon.stub( - ControllerInterface.prototype, 'getNotificationPermission_'); + ControllerInterface.prototype, + 'getNotificationPermission_' + ); notificationStub.callsFake(() => NotificationPermission.granted); stubs.push(notificationStub); @@ -165,12 +190,10 @@ describe('Firebase Messaging > *Controller.getToken()', function() { return Promise.all( servicesToTest.map(ServiceClass => { const serviceInstance = new ServiceClass(app); - return serviceInstance.getToken() - .then(token => { + return serviceInstance.getToken().then(token => { assert.equal(EXAMPLE_FCM_TOKEN, token); }); }) ); }); - }); diff --git a/tests/messaging/browser/make-fake-app.ts b/tests/messaging/browser/make-fake-app.ts index 903b4ab10bf..fb7b3eb73f5 100644 --- a/tests/messaging/browser/make-fake-app.ts +++ b/tests/messaging/browser/make-fake-app.ts @@ -15,7 +15,7 @@ */ export default (options = {}) => { window['firebase'] = window['firebase'] || {}; - let app = /** @type {!firebase.app.App} */ ({}); + let app /** @type {!firebase.app.App} */ = {}; (app as any).INTERNAL = window['firebase'].INTERNAL; (app as any).options = options; return app; diff --git a/tests/messaging/browser/make-fake-subscription.ts b/tests/messaging/browser/make-fake-subscription.ts index 0bfcee9a95c..6d5e603a6bd 100644 --- a/tests/messaging/browser/make-fake-subscription.ts +++ b/tests/messaging/browser/make-fake-subscription.ts @@ -24,7 +24,7 @@ FakeSubscription.prototype = PushSubscription.prototype; * @param {string} string String to convert to ArrayBuffer. * @return {ArrayBuffer} ArrayBuffer containing bytes from supplied string. */ -const stringToArrayBuffer = (string) => { +const stringToArrayBuffer = string => { // String char codes are 16 bits (See MDN). const arrayBuffer = new ArrayBuffer(string.length * 2); const bufferView = new Uint16Array(arrayBuffer); @@ -43,9 +43,9 @@ export default function(options = {}) { const fakeSub = new FakeSubscription(); // Set endpoint - const endpoint = (options as any).endpoint ? - (options as any).endpoint : - 'https://example-push-endpoint.com/'; + const endpoint = (options as any).endpoint + ? (options as any).endpoint + : 'https://example-push-endpoint.com/'; Object.defineProperty(fakeSub, 'endpoint', { value: endpoint @@ -53,24 +53,31 @@ export default function(options = {}) { // Set getKey Object.defineProperty(fakeSub, 'getKey', { - value: (keyName) => { + value: keyName => { let keyString = null; - switch(keyName) { + switch (keyName) { case 'auth': { - keyString = (options as any).auth ? (options as any).auth : 'auth-secret'; + keyString = (options as any).auth + ? (options as any).auth + : 'auth-secret'; break; } case 'p256dh': { - keyString = (options as any).p256dh ? (options as any).p256dh : 'the-user-public-key'; + keyString = (options as any).p256dh + ? (options as any).p256dh + : 'the-user-public-key'; break; } default: - throw new Error('Error from MakeFakeSubscription, unexpected ' + - 'getKey() key name: ' + keyName); + throw new Error( + 'Error from MakeFakeSubscription, unexpected ' + + 'getKey() key name: ' + + keyName + ); } return stringToArrayBuffer(keyString); } }); return fakeSub; -}; \ No newline at end of file +} diff --git a/tests/messaging/browser/make-fake-sw-reg.ts b/tests/messaging/browser/make-fake-sw-reg.ts index 3a8342fcc86..a18513df42d 100644 --- a/tests/messaging/browser/make-fake-sw-reg.ts +++ b/tests/messaging/browser/make-fake-sw-reg.ts @@ -30,4 +30,4 @@ export default function(selectedState?, desiredValue?) { }); return fakeReg; -}; \ No newline at end of file +} diff --git a/tests/messaging/browser/token-details-model-delete.test.ts b/tests/messaging/browser/token-details-model-delete.test.ts index f3a6446d1e1..e2d5f8d80e3 100644 --- a/tests/messaging/browser/token-details-model-delete.test.ts +++ b/tests/messaging/browser/token-details-model-delete.test.ts @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -import { assert } from "chai"; +import { assert } from 'chai'; import makeFakeSubscription from './make-fake-subscription'; import dbHelpers from './db-helper'; import Errors from '../../../src/messaging/models/errors'; @@ -23,12 +23,13 @@ import arrayBufferToBase64 from '../../../src/messaging/helpers/array-buffer-to- describe('Firebase Messaging > TokenDetailsModel.deleteToken()', function() { const EXAMPLE_INPUT = { swScope: '/example-scope', - vapidKey: 'BNJxw7sCGkGLOUP2cawBaBXRuWZ3lw_PmQMgreLVVvX_b' + - '4emEWVURkCF8fUTHEFe2xrEgTt5ilh5xD94v0pFe_I', + vapidKey: + 'BNJxw7sCGkGLOUP2cawBaBXRuWZ3lw_PmQMgreLVVvX_b' + + '4emEWVURkCF8fUTHEFe2xrEgTt5ilh5xD94v0pFe_I', subscription: makeFakeSubscription(), fcmSenderId: '1234567', fcmToken: 'qwerty', - fcmPushSet: '7654321', + fcmPushSet: '7654321' }; let tokenModel; @@ -51,71 +52,82 @@ describe('Firebase Messaging > TokenDetailsModel.deleteToken()', function() { it('should handle no input', function() { tokenModel = new TokenDetailsModel(); - return tokenModel.deleteToken() - .then(() => { - throw new Error('Expected this to throw an error due to no token'); - }, err => { - assert.equal('messaging/' + Errors.codes.INVALID_DELETE_TOKEN, - err.code); - }); + return tokenModel.deleteToken().then( + () => { + throw new Error('Expected this to throw an error due to no token'); + }, + err => { + assert.equal( + 'messaging/' + Errors.codes.INVALID_DELETE_TOKEN, + err.code + ); + } + ); }); it('should handle empty string', function() { tokenModel = new TokenDetailsModel(); - return tokenModel.deleteToken('') - .then(() => { - throw new Error('Expected this to throw an error due to no token'); - }, err => { - assert.equal('messaging/' + Errors.codes.INVALID_DELETE_TOKEN, - err.code); - }); + return tokenModel.deleteToken('').then( + () => { + throw new Error('Expected this to throw an error due to no token'); + }, + err => { + assert.equal( + 'messaging/' + Errors.codes.INVALID_DELETE_TOKEN, + err.code + ); + } + ); }); it('should delete current token', function() { tokenModel = new TokenDetailsModel(); - return tokenModel.saveTokenDetails(EXAMPLE_INPUT) - .then(() => { - return tokenModel.deleteToken(EXAMPLE_INPUT.fcmToken); - }) - .then(details => { - const subscriptionKeys = [ - 'endpoint', - 'auth', - 'p256dh' - ]; - const subscriptionValues = { - endpoint: EXAMPLE_INPUT.subscription.endpoint, - auth: arrayBufferToBase64(EXAMPLE_INPUT.subscription.getKey('auth')), - p256dh: arrayBufferToBase64(EXAMPLE_INPUT.subscription.getKey('p256dh')) - }; + return tokenModel + .saveTokenDetails(EXAMPLE_INPUT) + .then(() => { + return tokenModel.deleteToken(EXAMPLE_INPUT.fcmToken); + }) + .then(details => { + const subscriptionKeys = ['endpoint', 'auth', 'p256dh']; + const subscriptionValues = { + endpoint: EXAMPLE_INPUT.subscription.endpoint, + auth: arrayBufferToBase64(EXAMPLE_INPUT.subscription.getKey('auth')), + p256dh: arrayBufferToBase64( + EXAMPLE_INPUT.subscription.getKey('p256dh') + ) + }; - subscriptionKeys.forEach((keyName) => { - assert.equal(details[keyName], subscriptionValues[keyName]); - }); + subscriptionKeys.forEach(keyName => { + assert.equal(details[keyName], subscriptionValues[keyName]); + }); - Object.keys(details).forEach((keyName) => { - if (subscriptionKeys.indexOf(keyName) !== -1) { - return; - } + Object.keys(details).forEach(keyName => { + if (subscriptionKeys.indexOf(keyName) !== -1) { + return; + } - assert.equal(details[keyName], EXAMPLE_INPUT[keyName]); - }); + assert.equal(details[keyName], EXAMPLE_INPUT[keyName]); + }); - return tokenModel.getTokenDetailsFromToken(EXAMPLE_INPUT.fcmToken); - }) - .then(tokenDetails => { - assert.equal(null, tokenDetails); - }); + return tokenModel.getTokenDetailsFromToken(EXAMPLE_INPUT.fcmToken); + }) + .then(tokenDetails => { + assert.equal(null, tokenDetails); + }); }); it('should handle deleting a non-existant token', function() { tokenModel = new TokenDetailsModel(); - return tokenModel.deleteToken('bad-token') - .then(() => { - throw new Error('Expected this delete to throw and error.'); - }, err => { - assert.equal('messaging/' + Errors.codes.DELETE_TOKEN_NOT_FOUND, - err.code); - }); + return tokenModel.deleteToken('bad-token').then( + () => { + throw new Error('Expected this delete to throw and error.'); + }, + err => { + assert.equal( + 'messaging/' + Errors.codes.DELETE_TOKEN_NOT_FOUND, + err.code + ); + } + ); }); -}); \ No newline at end of file +}); diff --git a/tests/messaging/browser/token-details-model-get.test.ts b/tests/messaging/browser/token-details-model-get.test.ts index 5c21f0daca1..84252214460 100644 --- a/tests/messaging/browser/token-details-model-get.test.ts +++ b/tests/messaging/browser/token-details-model-get.test.ts @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -import { assert } from "chai"; +import { assert } from 'chai'; import makeFakeSubscription from './make-fake-subscription'; import dbHelpers from './db-helper'; import Errors from '../../../src/messaging/models/errors'; @@ -23,12 +23,13 @@ import arrayBufferToBase64 from '../../../src/messaging/helpers/array-buffer-to- describe('Firebase Messaging > TokenDetailsModel.getToken()', function() { const EXAMPLE_INPUT = { swScope: '/example-scope', - vapidKey: 'BNJxw7sCGkGLOUP2cawBaBXRuWZ3lw_PmQMgreLVVvX_b' + + vapidKey: + 'BNJxw7sCGkGLOUP2cawBaBXRuWZ3lw_PmQMgreLVVvX_b' + '4emEWVURkCF8fUTHEFe2xrEgTt5ilh5xD94v0pFe_I', subscription: makeFakeSubscription(), fcmSenderId: '1234567', fcmToken: 'qwerty', - fcmPushSet: '7654321', + fcmPushSet: '7654321' }; let tokenModel; @@ -50,46 +51,34 @@ describe('Firebase Messaging > TokenDetailsModel.getToken()', function() { }); it('should throw on bad scope input', function() { - const badInputs = [ - '', - [], - {}, - true, - null, - 123 - ]; - const promises = badInputs.map((badInput) => { + const badInputs = ['', [], {}, true, null, 123]; + const promises = badInputs.map(badInput => { tokenModel = new TokenDetailsModel(); - return tokenModel.getTokenDetailsFromSWScope(badInput) - .then(() => { - throw new Error('Expected promise to reject'); - }, (err) => { - assert.equal('messaging/' + Errors.codes.BAD_SCOPE, - err.code); - }); + return tokenModel.getTokenDetailsFromSWScope(badInput).then( + () => { + throw new Error('Expected promise to reject'); + }, + err => { + assert.equal('messaging/' + Errors.codes.BAD_SCOPE, err.code); + } + ); }); return Promise.all(promises); }); it('should throw on bad FCM Token input', function() { - const badInputs = [ - '', - [], - {}, - true, - null, - 123 - ]; - const promises = badInputs.map((badInput) => { + const badInputs = ['', [], {}, true, null, 123]; + const promises = badInputs.map(badInput => { tokenModel = new TokenDetailsModel(); - return tokenModel.getTokenDetailsFromToken(badInput) - .then(() => { - throw new Error('Expected promise to reject'); - }, (err) => { - assert.equal('messaging/' + Errors.codes.BAD_TOKEN, - err.code); - }); + return tokenModel.getTokenDetailsFromToken(badInput).then( + () => { + throw new Error('Expected promise to reject'); + }, + err => { + assert.equal('messaging/' + Errors.codes.BAD_TOKEN, err.code); + } + ); }); return Promise.all(promises); @@ -97,76 +86,73 @@ describe('Firebase Messaging > TokenDetailsModel.getToken()', function() { it('should get from scope', function() { tokenModel = new TokenDetailsModel(); - return tokenModel.getTokenDetailsFromSWScope(EXAMPLE_INPUT.swScope) - .then((details) => { - assert.equal(null, details); - - return tokenModel.saveTokenDetails(EXAMPLE_INPUT); - }) - .then(() => { - return tokenModel.getTokenDetailsFromSWScope(EXAMPLE_INPUT.swScope); - }) - .then((details) => { - const subscriptionKeys = [ - 'endpoint', - 'auth', - 'p256dh' - ]; - const subscriptionValues = { - endpoint: EXAMPLE_INPUT.subscription.endpoint, - auth: arrayBufferToBase64(EXAMPLE_INPUT.subscription.getKey('auth')), - p256dh: arrayBufferToBase64(EXAMPLE_INPUT.subscription.getKey('p256dh')) - }; - - subscriptionKeys.forEach((keyName) => { - assert.equal(details[keyName], subscriptionValues[keyName]); - }); - - Object.keys(details).forEach((keyName) => { - if (subscriptionKeys.indexOf(keyName) !== -1) { - return; - } + return tokenModel + .getTokenDetailsFromSWScope(EXAMPLE_INPUT.swScope) + .then(details => { + assert.equal(null, details); - assert.equal(details[keyName], EXAMPLE_INPUT[keyName]); + return tokenModel.saveTokenDetails(EXAMPLE_INPUT); + }) + .then(() => { + return tokenModel.getTokenDetailsFromSWScope(EXAMPLE_INPUT.swScope); + }) + .then(details => { + const subscriptionKeys = ['endpoint', 'auth', 'p256dh']; + const subscriptionValues = { + endpoint: EXAMPLE_INPUT.subscription.endpoint, + auth: arrayBufferToBase64(EXAMPLE_INPUT.subscription.getKey('auth')), + p256dh: arrayBufferToBase64( + EXAMPLE_INPUT.subscription.getKey('p256dh') + ) + }; + + subscriptionKeys.forEach(keyName => { + assert.equal(details[keyName], subscriptionValues[keyName]); + }); + + Object.keys(details).forEach(keyName => { + if (subscriptionKeys.indexOf(keyName) !== -1) { + return; + } + + assert.equal(details[keyName], EXAMPLE_INPUT[keyName]); + }); }); - }); }); it('should get from token', function() { tokenModel = new TokenDetailsModel(); - return tokenModel.getTokenDetailsFromToken(EXAMPLE_INPUT.fcmToken) - .then((details) => { - assert.equal(null, details); - - return tokenModel.saveTokenDetails(EXAMPLE_INPUT); - }) - .then(() => { - return tokenModel.getTokenDetailsFromToken(EXAMPLE_INPUT.fcmToken); - }) - .then((details) => { - const subscriptionKeys = [ - 'endpoint', - 'auth', - 'p256dh' - ]; - const subscriptionValues = { - endpoint: EXAMPLE_INPUT.subscription.endpoint, - auth: arrayBufferToBase64(EXAMPLE_INPUT.subscription.getKey('auth')), - p256dh: arrayBufferToBase64(EXAMPLE_INPUT.subscription.getKey('p256dh')) - }; - - subscriptionKeys.forEach((keyName) => { - assert.equal(details[keyName], subscriptionValues[keyName]); - }); - - Object.keys(details).forEach((keyName) => { - if (subscriptionKeys.indexOf(keyName) !== -1) { - return; - } + return tokenModel + .getTokenDetailsFromToken(EXAMPLE_INPUT.fcmToken) + .then(details => { + assert.equal(null, details); - assert.equal(details[keyName], EXAMPLE_INPUT[keyName]); + return tokenModel.saveTokenDetails(EXAMPLE_INPUT); + }) + .then(() => { + return tokenModel.getTokenDetailsFromToken(EXAMPLE_INPUT.fcmToken); + }) + .then(details => { + const subscriptionKeys = ['endpoint', 'auth', 'p256dh']; + const subscriptionValues = { + endpoint: EXAMPLE_INPUT.subscription.endpoint, + auth: arrayBufferToBase64(EXAMPLE_INPUT.subscription.getKey('auth')), + p256dh: arrayBufferToBase64( + EXAMPLE_INPUT.subscription.getKey('p256dh') + ) + }; + + subscriptionKeys.forEach(keyName => { + assert.equal(details[keyName], subscriptionValues[keyName]); + }); + + Object.keys(details).forEach(keyName => { + if (subscriptionKeys.indexOf(keyName) !== -1) { + return; + } + + assert.equal(details[keyName], EXAMPLE_INPUT[keyName]); + }); }); - }); }); - }); diff --git a/tests/messaging/browser/token-details-model-save.test.ts b/tests/messaging/browser/token-details-model-save.test.ts index d6f9b8d4e5f..ad6edcfa2ca 100644 --- a/tests/messaging/browser/token-details-model-save.test.ts +++ b/tests/messaging/browser/token-details-model-save.test.ts @@ -13,22 +13,22 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -import { assert } from "chai"; +import { assert } from 'chai'; import makeFakeSubscription from './make-fake-subscription'; import dbHelpers from './db-helper'; import Errors from '../../../src/messaging/models/errors'; import TokenDetailsModel from '../../../src/messaging/models/token-details-model'; describe('Firebase Messaging > TokenDetailsModel.saveToken()', function() { - const EXAMPLE_INPUT = { swScope: '/example-scope', - vapidKey: 'BNJxw7sCGkGLOUP2cawBaBXRuWZ3lw_PmQMgreLVVvX_b' + - '4emEWVURkCF8fUTHEFe2xrEgTt5ilh5xD94v0pFe_I', + vapidKey: + 'BNJxw7sCGkGLOUP2cawBaBXRuWZ3lw_PmQMgreLVVvX_b' + + '4emEWVURkCF8fUTHEFe2xrEgTt5ilh5xD94v0pFe_I', subscription: makeFakeSubscription(), fcmSenderId: '1234567', fcmToken: 'qwerty', - fcmPushSet: '7654321', + fcmPushSet: '7654321' }; let tokenModel; @@ -50,156 +50,120 @@ describe('Firebase Messaging > TokenDetailsModel.saveToken()', function() { }); it('should throw on bad input', function() { - const badInputs = [ - '', - [], - {}, - true, - null, - 123 - ]; + const badInputs = ['', [], {}, true, null, 123]; const promises = badInputs.map((badInput: any) => { tokenModel = new TokenDetailsModel(); const validInput = Object.assign({}, EXAMPLE_INPUT); validInput.swScope = badInput; - return tokenModel.saveTokenDetails(validInput) - .then(() => { - throw new Error('Expected promise to reject'); - }, (err) => { - assert.equal('messaging/' + Errors.codes.BAD_SCOPE, - err.code); - }); + return tokenModel.saveTokenDetails(validInput).then( + () => { + throw new Error('Expected promise to reject'); + }, + err => { + assert.equal('messaging/' + Errors.codes.BAD_SCOPE, err.code); + } + ); }); return Promise.all(promises); }); it('should throw on bad vapid key input', function() { - const badInputs = [ - '', - [], - {}, - true, - null, - 123 - ]; + const badInputs = ['', [], {}, true, null, 123]; const promises = badInputs.map((badInput: any) => { tokenModel = new TokenDetailsModel(); const validInput = Object.assign({}, EXAMPLE_INPUT); validInput.vapidKey = badInput; - return tokenModel.saveTokenDetails(validInput) - .then(() => { - throw new Error('Expected promise to reject'); - }, (err) => { - assert.equal('messaging/' + Errors.codes.BAD_VAPID_KEY, - err.code); - }); + return tokenModel.saveTokenDetails(validInput).then( + () => { + throw new Error('Expected promise to reject'); + }, + err => { + assert.equal('messaging/' + Errors.codes.BAD_VAPID_KEY, err.code); + } + ); }); return Promise.all(promises); }); it('should throw on bad subscription input', function() { - const badInputs = [ - '', - [], - {}, - true, - null, - 123 - ]; + const badInputs = ['', [], {}, true, null, 123]; const promises = badInputs.map((badInput: any) => { tokenModel = new TokenDetailsModel(); const validInput = Object.assign({}, EXAMPLE_INPUT); validInput.subscription = badInput; - return tokenModel.saveTokenDetails(validInput) - .then(() => { - throw new Error('Expected promise to reject'); - }, (err) => { - assert.equal('messaging/' + Errors.codes.BAD_SUBSCRIPTION, - err.code); - }); + return tokenModel.saveTokenDetails(validInput).then( + () => { + throw new Error('Expected promise to reject'); + }, + err => { + assert.equal('messaging/' + Errors.codes.BAD_SUBSCRIPTION, err.code); + } + ); }); return Promise.all(promises); }); it('should throw on bad send id input', function() { - const badInputs = [ - '', - [], - {}, - true, - null, - 123 - ]; + const badInputs = ['', [], {}, true, null, 123]; const promises = badInputs.map((badInput: any) => { tokenModel = new TokenDetailsModel(); const validInput = Object.assign({}, EXAMPLE_INPUT); validInput.fcmSenderId = badInput; - return tokenModel.saveTokenDetails(validInput) - .then(() => { - throw new Error('Expected promise to reject'); - }, (err) => { - assert.equal('messaging/' + Errors.codes.BAD_SENDER_ID, - err.code); - }); + return tokenModel.saveTokenDetails(validInput).then( + () => { + throw new Error('Expected promise to reject'); + }, + err => { + assert.equal('messaging/' + Errors.codes.BAD_SENDER_ID, err.code); + } + ); }); return Promise.all(promises); }); it('should throw on bad token input', function() { - const badInputs = [ - '', - [], - {}, - true, - null, - 123 - ]; + const badInputs = ['', [], {}, true, null, 123]; const promises = badInputs.map((badInput: any) => { tokenModel = new TokenDetailsModel(); const validInput = Object.assign({}, EXAMPLE_INPUT); validInput.fcmToken = badInput; - return tokenModel.saveTokenDetails(validInput) - .then(() => { - throw new Error('Expected promise to reject'); - }, (err) => { - assert.equal('messaging/' + Errors.codes.BAD_TOKEN, - err.code); - }); + return tokenModel.saveTokenDetails(validInput).then( + () => { + throw new Error('Expected promise to reject'); + }, + err => { + assert.equal('messaging/' + Errors.codes.BAD_TOKEN, err.code); + } + ); }); return Promise.all(promises); }); it('should throw on bad pushset input', function() { - const badInputs = [ - '', - [], - {}, - true, - null, - 123 - ]; + const badInputs = ['', [], {}, true, null, 123]; const promises = badInputs.map((badInput: any) => { tokenModel = new TokenDetailsModel(); const validInput = Object.assign({}, EXAMPLE_INPUT); validInput.fcmPushSet = badInput; - return tokenModel.saveTokenDetails(validInput) - .then(() => { - throw new Error('Expected promise to reject'); - }, (err) => { - assert.equal('messaging/' + Errors.codes.BAD_PUSH_SET, - err.code); - }); + return tokenModel.saveTokenDetails(validInput).then( + () => { + throw new Error('Expected promise to reject'); + }, + err => { + assert.equal('messaging/' + Errors.codes.BAD_PUSH_SET, err.code); + } + ); }); return Promise.all(promises); @@ -209,5 +173,4 @@ describe('Firebase Messaging > TokenDetailsModel.saveToken()', function() { tokenModel = new TokenDetailsModel(); return tokenModel.saveTokenDetails(EXAMPLE_INPUT); }); - }); diff --git a/tests/messaging/browser/token-manager-create-token.test.ts b/tests/messaging/browser/token-manager-create-token.test.ts index 0fd701ceaff..c8d5870c865 100644 --- a/tests/messaging/browser/token-manager-create-token.test.ts +++ b/tests/messaging/browser/token-manager-create-token.test.ts @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -import { expect } from "chai"; +import { expect } from 'chai'; import * as sinon from 'sinon'; import makeFakeSWReg from './make-fake-sw-reg'; import dbTMHelper from './db-token-manager'; @@ -22,9 +22,8 @@ import Errors from '../../../src/messaging/models/errors'; import arrayBufferToBase64 from '../../../src/messaging/helpers/array-buffer-to-base64'; describe('Firebase Messaging > tokenManager.createToken()', function() { - - const AUTH_BUFFER = new Uint8Array([1,2,3]); - const P256DH_BUFFER = new Uint8Array([1,2,3]); + const AUTH_BUFFER = new Uint8Array([1, 2, 3]); + const P256DH_BUFFER = new Uint8Array([1, 2, 3]); const EXAMPLE_SENDER_ID = '1234567890'; const EXAMPLE_SUBSCRIPTION = { endpoint: 'http://example-subscription-test.com/1234567890', @@ -49,9 +48,8 @@ describe('Firebase Messaging > tokenManager.createToken()', function() { return dbTMHelper.deleteDB(); }); - afterEach(function() { - stubs.forEach((stub) => { + stubs.forEach(stub => { stub.restore(); }); stubs = []; @@ -90,27 +88,28 @@ describe('Firebase Messaging > tokenManager.createToken()', function() { return badInputs.reduce((promiseChain, badInput) => { return promiseChain.then(() => { - return globalTokenManager.createToken(badInput[0], badInput[1]) - .then(() => { - throw new Error('Bad input should have thrown'); - }, err => { - switch (err.code) { - case 'messaging/' + Errors.codes.BAD_SENDER_ID: - case 'messaging/' + Errors.codes.SW_REGISTRATION_EXPECTED: - break; - default: - throw new Error('Unexpected error thrown: ' + err.message); + return globalTokenManager.createToken(badInput[0], badInput[1]).then( + () => { + throw new Error('Bad input should have thrown'); + }, + err => { + switch (err.code) { + case 'messaging/' + Errors.codes.BAD_SENDER_ID: + case 'messaging/' + Errors.codes.SW_REGISTRATION_EXPECTED: + break; + default: + throw new Error('Unexpected error thrown: ' + err.message); + } } - }); + ); }); }, Promise.resolve()); }); it('should handle failing getSubscription', function() { - const activatedRegistration = makeFakeSWReg( - 'active', - { state: 'activated'} - ); + const activatedRegistration = makeFakeSWReg('active', { + state: 'activated' + }); Object.defineProperty(activatedRegistration, 'pushManager', { value: { @@ -121,20 +120,22 @@ describe('Firebase Messaging > tokenManager.createToken()', function() { }); globalTokenManager = new TokenManager(); - return globalTokenManager.createToken(EXAMPLE_SENDER_ID, - activatedRegistration) - .then(() => { - throw new Error('This should have rejected'); - }, err => { - expect(err).to.equal('Unknown error.'); - }); + return globalTokenManager + .createToken(EXAMPLE_SENDER_ID, activatedRegistration) + .then( + () => { + throw new Error('This should have rejected'); + }, + err => { + expect(err).to.equal('Unknown error.'); + } + ); }); it('should handle failing subscribe call', function() { - const activatedRegistration = makeFakeSWReg( - 'active', - { state: 'activated'} - ); + const activatedRegistration = makeFakeSWReg('active', { + state: 'activated' + }); Object.defineProperty(activatedRegistration, 'scope', { value: '/example-scope' }); @@ -143,32 +144,35 @@ describe('Firebase Messaging > tokenManager.createToken()', function() { getSubscription: () => { return Promise.resolve(null); }, - subscribe: (options) => { + subscribe: options => { return Promise.reject(new Error('Unknown Error')); } } }); - const subscribeStub = sinon.stub(TokenManager.prototype, 'subscribeToFCM') + const subscribeStub = sinon + .stub(TokenManager.prototype, 'subscribeToFCM') .callsFake(() => Promise.resolve(EXAMPLE_FCM_TOKEN_DETAILS)); stubs.push(subscribeStub); globalTokenManager = new TokenManager(); - return globalTokenManager.createToken(EXAMPLE_SENDER_ID, - activatedRegistration) - .then(() => { - throw new Error('Expected createToken to throw.'); - }, err => { - expect(err.message).to.equal('Unknown Error'); - }); + return globalTokenManager + .createToken(EXAMPLE_SENDER_ID, activatedRegistration) + .then( + () => { + throw new Error('Expected createToken to throw.'); + }, + err => { + expect(err.message).to.equal('Unknown Error'); + } + ); }); it('should use an existing subscription', function() { let currentSubscription = null; - const activatedRegistration = makeFakeSWReg( - 'active', - { state: 'activated'} - ); + const activatedRegistration = makeFakeSWReg('active', { + state: 'activated' + }); Object.defineProperty(activatedRegistration, 'scope', { value: '/example-scope' }); @@ -177,47 +181,50 @@ describe('Firebase Messaging > tokenManager.createToken()', function() { getSubscription: () => { return Promise.resolve(currentSubscription); }, - subscribe: (options) => { + subscribe: options => { currentSubscription = EXAMPLE_SUBSCRIPTION; return Promise.resolve(currentSubscription); } } }); - const subscribeStub = sinon.stub(TokenManager.prototype, 'subscribeToFCM') + const subscribeStub = sinon + .stub(TokenManager.prototype, 'subscribeToFCM') .callsFake(() => Promise.resolve(EXAMPLE_FCM_TOKEN_DETAILS)); stubs.push(subscribeStub); globalTokenManager = new TokenManager(); - return globalTokenManager.createToken(EXAMPLE_SENDER_ID, - activatedRegistration) - .then(token => { - expect(token).to.equal(EXAMPLE_FCM_TOKEN_DETAILS.token); - }) - .then(() => { - // Ensure details are saved correctly - return dbTMHelper.getTokenDetailsFromDB(); - }) - .then(allDetails => { - expect(allDetails.length).to.equal(1); - - const details = allDetails[0]; - - expect(details.fcmPushSet).to.equal(EXAMPLE_FCM_TOKEN_DETAILS.pushSet); - expect(details.fcmToken).to.equal(EXAMPLE_FCM_TOKEN_DETAILS.token); - expect(details.fcmSenderId).to.equal(EXAMPLE_SENDER_ID); - expect(details.endpoint).to.equal(EXAMPLE_SUBSCRIPTION.endpoint); - expect(details.swScope).to.equal(activatedRegistration.scope); - expect(details.auth).to.equal(arrayBufferToBase64(AUTH_BUFFER)); - expect(details.p256dh).to.equal(arrayBufferToBase64(P256DH_BUFFER)); - }) - .then(() => { - return globalTokenManager.getSavedToken(EXAMPLE_SENDER_ID, - activatedRegistration); - }) - .then(token => { - expect(token).to.equal(EXAMPLE_FCM_TOKEN_DETAILS.token); - }); + return globalTokenManager + .createToken(EXAMPLE_SENDER_ID, activatedRegistration) + .then(token => { + expect(token).to.equal(EXAMPLE_FCM_TOKEN_DETAILS.token); + }) + .then(() => { + // Ensure details are saved correctly + return dbTMHelper.getTokenDetailsFromDB(); + }) + .then(allDetails => { + expect(allDetails.length).to.equal(1); + + const details = allDetails[0]; + + expect(details.fcmPushSet).to.equal(EXAMPLE_FCM_TOKEN_DETAILS.pushSet); + expect(details.fcmToken).to.equal(EXAMPLE_FCM_TOKEN_DETAILS.token); + expect(details.fcmSenderId).to.equal(EXAMPLE_SENDER_ID); + expect(details.endpoint).to.equal(EXAMPLE_SUBSCRIPTION.endpoint); + expect(details.swScope).to.equal(activatedRegistration.scope); + expect(details.auth).to.equal(arrayBufferToBase64(AUTH_BUFFER)); + expect(details.p256dh).to.equal(arrayBufferToBase64(P256DH_BUFFER)); + }) + .then(() => { + return globalTokenManager.getSavedToken( + EXAMPLE_SENDER_ID, + activatedRegistration + ); + }) + .then(token => { + expect(token).to.equal(EXAMPLE_FCM_TOKEN_DETAILS.token); + }); }); it('should handle valid flow', function() { @@ -226,10 +233,9 @@ describe('Firebase Messaging > tokenManager.createToken()', function() { // state to dispatching an activate statechange event. // Test already activated - const activatedRegistration = makeFakeSWReg( - 'active', - { state: 'activated'} - ); + const activatedRegistration = makeFakeSWReg('active', { + state: 'activated' + }); Object.defineProperty(activatedRegistration, 'scope', { value: '/example-scope-1' }); @@ -251,10 +257,7 @@ describe('Firebase Messaging > tokenManager.createToken()', function() { // NOOP } }; - const installToActivateReg = makeFakeSWReg( - 'installing', - swValue - ); + const installToActivateReg = makeFakeSWReg('installing', swValue); Object.defineProperty(installToActivateReg, 'scope', { value: '/example-scope-2' }); @@ -267,15 +270,22 @@ describe('Firebase Messaging > tokenManager.createToken()', function() { }); stubs.push( - sinon.stub(TokenManager.prototype, 'subscribeToFCM') - .onCall(0).returns(Promise.resolve({ - token: EXAMPLE_FCM_TOKEN_DETAILS.token + '1', - pushSet: EXAMPLE_FCM_TOKEN_DETAILS.pushSet + '1' - })) - .onCall(1).returns(Promise.resolve({ - token: EXAMPLE_FCM_TOKEN_DETAILS.token + '2', - pushSet: EXAMPLE_FCM_TOKEN_DETAILS.pushSet + '2' - })) + sinon + .stub(TokenManager.prototype, 'subscribeToFCM') + .onCall(0) + .returns( + Promise.resolve({ + token: EXAMPLE_FCM_TOKEN_DETAILS.token + '1', + pushSet: EXAMPLE_FCM_TOKEN_DETAILS.pushSet + '1' + }) + ) + .onCall(1) + .returns( + Promise.resolve({ + token: EXAMPLE_FCM_TOKEN_DETAILS.token + '2', + pushSet: EXAMPLE_FCM_TOKEN_DETAILS.pushSet + '2' + }) + ) ); const validCombos = [ @@ -297,41 +307,43 @@ describe('Firebase Messaging > tokenManager.createToken()', function() { return validCombos.reduce((promiseChain, validCombo) => { return promiseChain.then(() => { - return globalTokenManager.createToken(validCombo.senderId, - validCombo.swReg) - .then(token => { - expect(token).to.equal(validCombo.expectedToken); - }) - .then(() => { - // Ensure details are saved correctly - return globalTokenManager.getTokenDetailsFromToken( - validCombo.expectedToken); - }) - .then(details => { - expect(validCombo.expectedToken).to.equal(details.fcmToken); - expect(validCombo.expectedPushSet).to.equal(details.fcmPushSet); - expect(validCombo.senderId).to.equal(details.fcmSenderId); - expect(EXAMPLE_SUBSCRIPTION.endpoint).to.equal(details.endpoint); - expect(validCombo.swReg.scope).to.equal(details.swScope); - expect(arrayBufferToBase64(AUTH_BUFFER)).to.equal(details.auth); - expect(arrayBufferToBase64(P256DH_BUFFER)).to.equal(details.p256dh); - }) - .then(() => { - return globalTokenManager.getSavedToken(validCombo.senderId, - validCombo.swReg); - }) - .then(token => { - expect(validCombo.expectedToken).to.equal(token); - }); + return globalTokenManager + .createToken(validCombo.senderId, validCombo.swReg) + .then(token => { + expect(token).to.equal(validCombo.expectedToken); + }) + .then(() => { + // Ensure details are saved correctly + return globalTokenManager.getTokenDetailsFromToken( + validCombo.expectedToken + ); + }) + .then(details => { + expect(validCombo.expectedToken).to.equal(details.fcmToken); + expect(validCombo.expectedPushSet).to.equal(details.fcmPushSet); + expect(validCombo.senderId).to.equal(details.fcmSenderId); + expect(EXAMPLE_SUBSCRIPTION.endpoint).to.equal(details.endpoint); + expect(validCombo.swReg.scope).to.equal(details.swScope); + expect(arrayBufferToBase64(AUTH_BUFFER)).to.equal(details.auth); + expect(arrayBufferToBase64(P256DH_BUFFER)).to.equal(details.p256dh); + }) + .then(() => { + return globalTokenManager.getSavedToken( + validCombo.senderId, + validCombo.swReg + ); + }) + .then(token => { + expect(validCombo.expectedToken).to.equal(token); + }); }); }, Promise.resolve()); }); it('should handle sender ID difference', function() { - const activatedRegistration = makeFakeSWReg( - 'active', - { state: 'activated' } - ); + const activatedRegistration = makeFakeSWReg('active', { + state: 'activated' + }); Object.defineProperty(activatedRegistration, 'scope', { value: '/example-scope' }); @@ -340,7 +352,7 @@ describe('Firebase Messaging > tokenManager.createToken()', function() { getSubscription: () => { return Promise.resolve(null); }, - subscribe: (options) => { + subscribe: options => { return Promise.resolve(EXAMPLE_SUBSCRIPTION); } } @@ -353,22 +365,28 @@ describe('Firebase Messaging > tokenManager.createToken()', function() { const secondSenderId = EXAMPLE_SENDER_ID + '2'; const methodStub = sinon.stub(TokenManager.prototype, 'subscribeToFCM'); - methodStub.withArgs(EXAMPLE_SENDER_ID, EXAMPLE_SUBSCRIPTION).returns(Promise.resolve(EXAMPLE_FCM_TOKEN_DETAILS)); - methodStub.withArgs(secondSenderId, EXAMPLE_SUBSCRIPTION).returns(Promise.resolve(secondDetails)) + methodStub + .withArgs(EXAMPLE_SENDER_ID, EXAMPLE_SUBSCRIPTION) + .returns(Promise.resolve(EXAMPLE_FCM_TOKEN_DETAILS)); + methodStub + .withArgs(secondSenderId, EXAMPLE_SUBSCRIPTION) + .returns(Promise.resolve(secondDetails)); stubs.push(methodStub); globalTokenManager = new TokenManager(); - return globalTokenManager.createToken(EXAMPLE_SENDER_ID, - activatedRegistration) - .then(token => { - expect(EXAMPLE_FCM_TOKEN_DETAILS.token).to.equal(token); - }) - .then(() => { - return globalTokenManager.createToken(secondSenderId, - activatedRegistration); - }) - .then(token => { - expect(secondDetails.token).to.equal(token); - }); + return globalTokenManager + .createToken(EXAMPLE_SENDER_ID, activatedRegistration) + .then(token => { + expect(EXAMPLE_FCM_TOKEN_DETAILS.token).to.equal(token); + }) + .then(() => { + return globalTokenManager.createToken( + secondSenderId, + activatedRegistration + ); + }) + .then(token => { + expect(secondDetails.token).to.equal(token); + }); }); }); diff --git a/tests/messaging/browser/token-manager-delete-token_test.test.ts b/tests/messaging/browser/token-manager-delete-token_test.test.ts index 75590096a34..56a49c87d0a 100644 --- a/tests/messaging/browser/token-manager-delete-token_test.test.ts +++ b/tests/messaging/browser/token-manager-delete-token_test.test.ts @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -import { assert } from "chai"; +import { assert } from 'chai'; import dbTMHelper from './db-token-manager'; import TokenManager from '../../../src/messaging/models/token-manager'; import Errors from '../../../src/messaging/models/errors'; @@ -27,9 +27,8 @@ describe('Firebase Messaging > tokenManager.deleteToken()', function() { return dbTMHelper.deleteDB(); }); - afterEach(function() { - stubs.forEach((stub) => { + stubs.forEach(stub => { stub.restore(); }); stubs = []; @@ -42,24 +41,32 @@ describe('Firebase Messaging > tokenManager.deleteToken()', function() { it('should handle nothing', function() { globalTokenManager = new TokenManager(); - return globalTokenManager.deleteToken() - .then(() => { - throw new Error('Expected this to throw an error due to no token'); - }, err => { - assert.equal('messaging/' + Errors.codes.INVALID_DELETE_TOKEN, - err.code); - }); + return globalTokenManager.deleteToken().then( + () => { + throw new Error('Expected this to throw an error due to no token'); + }, + err => { + assert.equal( + 'messaging/' + Errors.codes.INVALID_DELETE_TOKEN, + err.code + ); + } + ); }); it('should handle empty string', function() { globalTokenManager = new TokenManager(); - return globalTokenManager.deleteToken('') - .then(() => { - throw new Error('Expected this to throw an error due to no token'); - }, err => { - assert.equal('messaging/' + Errors.codes.INVALID_DELETE_TOKEN, - err.code); - }); + return globalTokenManager.deleteToken('').then( + () => { + throw new Error('Expected this to throw an error due to no token'); + }, + err => { + assert.equal( + 'messaging/' + Errors.codes.INVALID_DELETE_TOKEN, + err.code + ); + } + ); }); it('should delete current token', function() { @@ -71,18 +78,18 @@ describe('Firebase Messaging > tokenManager.deleteToken()', function() { dbTMHelper.addObjectToIndexDB(exampleDetails); globalTokenManager = new TokenManager(); - return globalTokenManager.deleteToken(exampleDetails.fcmToken) - .then(deletedDetails => { - assert.equal(exampleDetails.swScope, deletedDetails.swScope); - assert.equal(exampleDetails.fcmToken, deletedDetails.fcmToken); - assert.equal(exampleDetails.fcmSenderId, deletedDetails.fcmSenderId); - - return dbTMHelper.getTokenDetailsFromDB(); + return globalTokenManager + .deleteToken(exampleDetails.fcmToken) + .then(deletedDetails => { + assert.equal(exampleDetails.swScope, deletedDetails.swScope); + assert.equal(exampleDetails.fcmToken, deletedDetails.fcmToken); + assert.equal(exampleDetails.fcmSenderId, deletedDetails.fcmSenderId); - }) - .then(tokenDetails => { - assert.equal(0, tokenDetails.length); - }); + return dbTMHelper.getTokenDetailsFromDB(); + }) + .then(tokenDetails => { + assert.equal(0, tokenDetails.length); + }); }); it('should delete non existant token', function() { @@ -94,22 +101,27 @@ describe('Firebase Messaging > tokenManager.deleteToken()', function() { dbTMHelper.addObjectToIndexDB(exampleDetails); globalTokenManager = new TokenManager(); - return globalTokenManager.deleteToken('bad-token') - .then(() => { - throw new Error('Expected this delete to throw and error.'); - }, err => { - assert.equal('messaging/' + Errors.codes.DELETE_TOKEN_NOT_FOUND, - err.code); - }) - .then(() => { - return dbTMHelper.getTokenDetailsFromDB(); - }) - .then(tokenDetails => { - assert.equal(1, tokenDetails.length); - assert.equal(exampleDetails.swScope, tokenDetails[0].swScope); - assert.equal(exampleDetails.fcmToken, tokenDetails[0].fcmToken); - assert.equal(exampleDetails.fcmSenderId, tokenDetails[0].fcmSenderId); - }); + return globalTokenManager + .deleteToken('bad-token') + .then( + () => { + throw new Error('Expected this delete to throw and error.'); + }, + err => { + assert.equal( + 'messaging/' + Errors.codes.DELETE_TOKEN_NOT_FOUND, + err.code + ); + } + ) + .then(() => { + return dbTMHelper.getTokenDetailsFromDB(); + }) + .then(tokenDetails => { + assert.equal(1, tokenDetails.length); + assert.equal(exampleDetails.swScope, tokenDetails[0].swScope); + assert.equal(exampleDetails.fcmToken, tokenDetails[0].fcmToken); + assert.equal(exampleDetails.fcmSenderId, tokenDetails[0].fcmSenderId); + }); }); - }); diff --git a/tests/messaging/browser/token-manager-get-saved-token.test.ts b/tests/messaging/browser/token-manager-get-saved-token.test.ts index d69eb82f7b9..0fb75499686 100644 --- a/tests/messaging/browser/token-manager-get-saved-token.test.ts +++ b/tests/messaging/browser/token-manager-get-saved-token.test.ts @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -import { assert } from "chai"; +import { assert } from 'chai'; import * as sinon from 'sinon'; import dbTMHelper from './db-token-manager'; import TokenManager from '../../../src/messaging/models/token-manager'; @@ -40,43 +40,38 @@ describe('Firebase Messaging > tokenManager.getSavedToken()', function() { const FakeRegistration = function() {}; FakeRegistration.prototype = ServiceWorkerRegistration.prototype; - const badInputs = [ - '', - [], - {}, - true, - null - ]; + const badInputs = ['', [], {}, true, null]; const promises = badInputs.map(badInput => { globalTokenManager = new TokenManager(); - return globalTokenManager.getSavedToken(badInput, new FakeRegistration()) - .then(() => { - throw new Error('Expected getSavedToken to reject the promise.'); - }, err => { - assert.equal('messaging/' + Errors.codes.BAD_SENDER_ID, - err.code); - }); + return globalTokenManager + .getSavedToken(badInput, new FakeRegistration()) + .then( + () => { + throw new Error('Expected getSavedToken to reject the promise.'); + }, + err => { + assert.equal('messaging/' + Errors.codes.BAD_SENDER_ID, err.code); + } + ); }); return Promise.all(promises); }); it('should handle bad registration input', function() { - const badInputs = [ - 'invalid', - [], - {}, - true, - null - ]; + const badInputs = ['invalid', [], {}, true, null]; const promises = badInputs.map(badInput => { globalTokenManager = new TokenManager(); - return globalTokenManager.getSavedToken('1234567890', badInput) - .then(() => { - throw new Error('Expected getSavedToken to reject the promise.'); - }, err => { - assert.equal('messaging/' + Errors.codes.SW_REGISTRATION_EXPECTED, - err.code); - }); + return globalTokenManager.getSavedToken('1234567890', badInput).then( + () => { + throw new Error('Expected getSavedToken to reject the promise.'); + }, + err => { + assert.equal( + 'messaging/' + Errors.codes.SW_REGISTRATION_EXPECTED, + err.code + ); + } + ); }); return Promise.all(promises); }); @@ -89,29 +84,30 @@ describe('Firebase Messaging > tokenManager.getSavedToken()', function() { const registration = new FakeRegistration(); Object.defineProperty(registration, 'scope', { - value: swScope + value: swScope }); globalTokenManager = new TokenManager(); - return globalTokenManager.getSavedToken('1234567890', registration) - .then(token => { - assert.equal(undefined, token); - }); + return globalTokenManager + .getSavedToken('1234567890', registration) + .then(token => { + assert.equal(undefined, token); + }); }); it('should handle sender ID mismatch', function() { const originalSenderID = '1234567890'; const sencondSenderID = '0987654321'; - const auth = new Uint8Array([1,2,3]); - const p256dh = new Uint8Array([4,5,6]); + const auth = new Uint8Array([1, 2, 3]); + const p256dh = new Uint8Array([4, 5, 6]); const details = { - 'swScope': 'sw-scope', - 'endpoint': 'http://example.google.com/', - 'auth': arrayBufferToBase64(auth), - 'p256dh': arrayBufferToBase64(p256dh), - 'fcmToken': 'example-token', - 'fcmPushSet': 'example-push-set', - 'fcmSenderId': originalSenderID + swScope: 'sw-scope', + endpoint: 'http://example.google.com/', + auth: arrayBufferToBase64(auth), + p256dh: arrayBufferToBase64(p256dh), + fcmToken: 'example-token', + fcmPushSet: 'example-push-set', + fcmSenderId: originalSenderID }; dbTMHelper.addObjectToIndexDB(details); @@ -121,38 +117,39 @@ describe('Firebase Messaging > tokenManager.getSavedToken()', function() { const registration = new FakeRegistration(); Object.defineProperty(registration, 'scope', { - value: details.swScope + value: details.swScope }); // First test with no subscription Object.defineProperty(registration, 'pushManager', { - value: { - getSubscription: () => { - return Promise.resolve({ - endpoint: details.endpoint, - getKey: keyName => { - if (keyName === 'auth') { - return auth; - } else { - return p256dh; - } + value: { + getSubscription: () => { + return Promise.resolve({ + endpoint: details.endpoint, + getKey: keyName => { + if (keyName === 'auth') { + return auth; + } else { + return p256dh; } - }); - } + } + }); } + } }); globalTokenManager = new TokenManager(); - return globalTokenManager.getSavedToken(originalSenderID, registration) - .then(token => { - assert.equal(details.fcmToken, token); - }) - .then(() => { - return globalTokenManager.getSavedToken(sencondSenderID, registration); - }) - .then(token => { - assert.equal(undefined, token); - }); + return globalTokenManager + .getSavedToken(originalSenderID, registration) + .then(token => { + assert.equal(details.fcmToken, token); + }) + .then(() => { + return globalTokenManager.getSavedToken(sencondSenderID, registration); + }) + .then(token => { + assert.equal(undefined, token); + }); }); it('should handle subscription', function() { @@ -171,52 +168,57 @@ describe('Firebase Messaging > tokenManager.getSavedToken()', function() { let registration = new FakeRegistration(); Object.defineProperty(registration, 'scope', { - value: swScope + value: swScope }); // First test with no subscription Object.defineProperty(registration, 'pushManager', { - value: { - getSubscription: () => { - return Promise.resolve(null); - } + value: { + getSubscription: () => { + return Promise.resolve(null); } + } }); - const authBuffer = new Uint8Array([1,2,3]); - const p256dhBuffer = new Uint8Array([4,5,6]); + const authBuffer = new Uint8Array([1, 2, 3]); + const p256dhBuffer = new Uint8Array([4, 5, 6]); globalTokenManager = new TokenManager(); - return globalTokenManager.getSavedToken(senderId, registration) - .then(token => { - assert.equal(undefined, token); + return globalTokenManager + .getSavedToken(senderId, registration) + .then(token => { + assert.equal(undefined, token); - registration = new FakeRegistration(); - Object.defineProperty(registration, 'scope', { + registration = new FakeRegistration(); + Object.defineProperty(registration, 'scope', { value: swScope - }); - Object.defineProperty(registration, 'pushManager', { + }); + Object.defineProperty(registration, 'pushManager', { value: { getSubscription: () => { return Promise.reject(new Error('Unknown service worker error')); } } - }); - - return globalTokenManager.getSavedToken(senderId, registration) + }); + + return globalTokenManager.getSavedToken(senderId, registration).then( + () => { + throw new Error('Expected this to reject the promise'); + }, + err => { + assert.equal( + 'messaging/' + Errors.codes.GET_SUBSCRIPTION_FAILED, + err.code + ); + } + ); + }) .then(() => { - throw new Error('Expected this to reject the promise'); - }, err => { - assert.equal('messaging/' + Errors.codes.GET_SUBSCRIPTION_FAILED, - err.code); - }); - }) - .then(() => { - // Second test with mis-match subscription - registration = new FakeRegistration(); - Object.defineProperty(registration, 'scope', { + // Second test with mis-match subscription + registration = new FakeRegistration(); + Object.defineProperty(registration, 'scope', { value: swScope - }); - Object.defineProperty(registration, 'pushManager', { + }); + Object.defineProperty(registration, 'pushManager', { value: { getSubscription: () => { return Promise.resolve({ @@ -231,67 +233,67 @@ describe('Firebase Messaging > tokenManager.getSavedToken()', function() { }); } } + }); + + dbTMHelper.updateObjectInIndexDb({ + fcmToken: 'current-token', + fcmPushSet: 'example-push-set', + fcmSenderId: senderId, + endpoint: 'https://fcm.google.com/wrong-fake-endpoint', + auth: arrayBufferToBase64(authBuffer), + p256dh: arrayBufferToBase64(p256dhBuffer), + swScope + }); + + return globalTokenManager.getSavedToken(senderId, registration); + }) + .then(token => { + assert.equal(undefined, token); + + dbTMHelper.updateObjectInIndexDb({ + fcmToken: 'current-token', + fcmPushSet: 'example-push-set', + fcmSenderId: senderId, + endpoint: 'https://fcm.google.com/fake-endpoint', + auth: arrayBufferToBase64(new Uint8Array([9, 8, 7])), + p256dh: arrayBufferToBase64(p256dhBuffer), + swScope + }); + + return globalTokenManager.getSavedToken(senderId, registration); + }) + .then(token => { + assert.equal(undefined, token); + + dbTMHelper.updateObjectInIndexDb({ + fcmToken: 'current-token', + fcmPushSet: 'example-push-set', + fcmSenderId: senderId, + endpoint: 'https://fcm.google.com/fake-endpoint', + auth: arrayBufferToBase64(authBuffer), + p256dh: arrayBufferToBase64(new Uint8Array([9, 8, 7])), + swScope + }); + + return globalTokenManager.getSavedToken(senderId, registration); + }) + .then(token => { + assert.equal(undefined, token); + + dbTMHelper.updateObjectInIndexDb({ + fcmToken: 'current-token', + fcmPushSet: 'example-push-set', + fcmSenderId: senderId, + endpoint: 'https://fcm.google.com/fake-endpoint', + auth: arrayBufferToBase64(authBuffer), + p256dh: arrayBufferToBase64(p256dhBuffer), + swScope + }); + + return globalTokenManager.getSavedToken(senderId, registration); + }) + .then(token => { + assert.equal('current-token', token); }); - - dbTMHelper.updateObjectInIndexDb({ - fcmToken: 'current-token', - fcmPushSet: 'example-push-set', - fcmSenderId: senderId, - endpoint: 'https://fcm.google.com/wrong-fake-endpoint', - auth: arrayBufferToBase64(authBuffer), - p256dh: arrayBufferToBase64(p256dhBuffer), - swScope - }); - - return globalTokenManager.getSavedToken(senderId, registration); - }) - .then(token => { - assert.equal(undefined, token); - - dbTMHelper.updateObjectInIndexDb({ - fcmToken: 'current-token', - fcmPushSet: 'example-push-set', - fcmSenderId: senderId, - endpoint: 'https://fcm.google.com/fake-endpoint', - auth: arrayBufferToBase64(new Uint8Array([9,8,7])), - p256dh: arrayBufferToBase64(p256dhBuffer), - swScope - }); - - return globalTokenManager.getSavedToken(senderId, registration); - }) - .then(token => { - assert.equal(undefined, token); - - dbTMHelper.updateObjectInIndexDb({ - fcmToken: 'current-token', - fcmPushSet: 'example-push-set', - fcmSenderId: senderId, - endpoint: 'https://fcm.google.com/fake-endpoint', - auth: arrayBufferToBase64(authBuffer), - p256dh: arrayBufferToBase64(new Uint8Array([9,8,7])), - swScope - }); - - return globalTokenManager.getSavedToken(senderId, registration); - }) - .then(token => { - assert.equal(undefined, token); - - dbTMHelper.updateObjectInIndexDb({ - fcmToken: 'current-token', - fcmPushSet: 'example-push-set', - fcmSenderId: senderId, - endpoint: 'https://fcm.google.com/fake-endpoint', - auth: arrayBufferToBase64(authBuffer), - p256dh: arrayBufferToBase64(p256dhBuffer), - swScope - }); - - return globalTokenManager.getSavedToken(senderId, registration); - }) - .then(token => { - assert.equal('current-token', token); - }); }); }); diff --git a/tests/messaging/browser/vapid-details-model-delete.test.ts b/tests/messaging/browser/vapid-details-model-delete.test.ts index 3b7378ca834..730b424e155 100644 --- a/tests/messaging/browser/vapid-details-model-delete.test.ts +++ b/tests/messaging/browser/vapid-details-model-delete.test.ts @@ -13,14 +13,15 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -import { assert } from "chai"; +import { assert } from 'chai'; import dbHelpers from './db-helper'; import Errors from '../../../src/messaging/models/errors'; import VapidDetailsModel from '../../../src/messaging/models/vapid-details-model'; describe('Firebase Messaging > VapidDetailsModel.deleteToken()', function() { const EXAMPLE_SCOPE = '/example-scope'; - const EXAMPLE_VAPID_STRING = 'BNJxw7sCGkGLOUP2cawBaBXRuWZ3lw_PmQMgreLVVvX_b' + + const EXAMPLE_VAPID_STRING = + 'BNJxw7sCGkGLOUP2cawBaBXRuWZ3lw_PmQMgreLVVvX_b' + '4emEWVURkCF8fUTHEFe2xrEgTt5ilh5xD94v0pFe_I'; let vapidModel; @@ -42,50 +43,48 @@ describe('Firebase Messaging > VapidDetailsModel.deleteToken()', function() { }); it('should throw on bad scope input', function() { - const badInputs = [ - '', - [], - {}, - true, - null, - 123 - ]; - badInputs.forEach((badInput) => { + const badInputs = ['', [], {}, true, null, 123]; + badInputs.forEach(badInput => { vapidModel = new VapidDetailsModel(); - return vapidModel.saveVapidDetails(badInput, EXAMPLE_VAPID_STRING) - .then(() => { - throw new Error('Expected promise to reject'); - }, (err) => { - assert.equal('messaging/' + Errors.codes.BAD_SCOPE, - err.code); - }); + return vapidModel.saveVapidDetails(badInput, EXAMPLE_VAPID_STRING).then( + () => { + throw new Error('Expected promise to reject'); + }, + err => { + assert.equal('messaging/' + Errors.codes.BAD_SCOPE, err.code); + } + ); }); }); it('should delete non existant details', function() { vapidModel = new VapidDetailsModel(); - return vapidModel.deleteVapidDetails(EXAMPLE_SCOPE) - .then(() => { - throw new Error('Expected promise to reject'); - }, (err) => { - assert.equal('messaging/' + Errors.codes.DELETE_SCOPE_NOT_FOUND, - err.code); - }); + return vapidModel.deleteVapidDetails(EXAMPLE_SCOPE).then( + () => { + throw new Error('Expected promise to reject'); + }, + err => { + assert.equal( + 'messaging/' + Errors.codes.DELETE_SCOPE_NOT_FOUND, + err.code + ); + } + ); }); it('should save and delete details', function() { vapidModel = new VapidDetailsModel(); - return vapidModel.saveVapidDetails(EXAMPLE_SCOPE, EXAMPLE_VAPID_STRING) - .then(() => { - return vapidModel.deleteVapidDetails(EXAMPLE_SCOPE); - }) - .then((vapidKey) => { - assert.equal(vapidKey, EXAMPLE_VAPID_STRING); - return vapidModel.getVapidFromSWScope(EXAMPLE_SCOPE); - }) - .then((vapid) => { - assert.equal(vapid, null); - }); + return vapidModel + .saveVapidDetails(EXAMPLE_SCOPE, EXAMPLE_VAPID_STRING) + .then(() => { + return vapidModel.deleteVapidDetails(EXAMPLE_SCOPE); + }) + .then(vapidKey => { + assert.equal(vapidKey, EXAMPLE_VAPID_STRING); + return vapidModel.getVapidFromSWScope(EXAMPLE_SCOPE); + }) + .then(vapid => { + assert.equal(vapid, null); + }); }); - -}); \ No newline at end of file +}); diff --git a/tests/messaging/browser/vapid-details-model-get.test.ts b/tests/messaging/browser/vapid-details-model-get.test.ts index 2de44c47b5a..2dc2fb83f3f 100644 --- a/tests/messaging/browser/vapid-details-model-get.test.ts +++ b/tests/messaging/browser/vapid-details-model-get.test.ts @@ -13,14 +13,15 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -import { assert } from "chai"; +import { assert } from 'chai'; import dbHelpers from './db-helper'; import Errors from '../../../src/messaging/models/errors'; import VapidDetailsModel from '../../../src/messaging/models/vapid-details-model'; describe('Firebase Messaging > VapidDetailsModel.getVapidFromSWScope()', function() { const EXAMPLE_SCOPE = '/example-scope'; - const EXAMPLE_VAPID_STRING = 'BNJxw7sCGkGLOUP2cawBaBXRuWZ3lw_PmQMgreLVVvX_b' + + const EXAMPLE_VAPID_STRING = + 'BNJxw7sCGkGLOUP2cawBaBXRuWZ3lw_PmQMgreLVVvX_b' + '4emEWVURkCF8fUTHEFe2xrEgTt5ilh5xD94v0pFe_I'; let vapidModel; @@ -42,40 +43,34 @@ describe('Firebase Messaging > VapidDetailsModel.getVapidFromSWScope()', functio }); it('should throw on bad scope input', function() { - const badInputs = [ - '', - [], - {}, - true, - null, - 123 - ]; - badInputs.forEach((badInput) => { + const badInputs = ['', [], {}, true, null, 123]; + badInputs.forEach(badInput => { vapidModel = new VapidDetailsModel(); - return vapidModel.getVapidFromSWScope(badInput) - .then(() => { - throw new Error('Expected promise to reject'); - }, (err) => { - assert.equal('messaging/' + Errors.codes.BAD_SCOPE, - err.code); - }); + return vapidModel.getVapidFromSWScope(badInput).then( + () => { + throw new Error('Expected promise to reject'); + }, + err => { + assert.equal('messaging/' + Errors.codes.BAD_SCOPE, err.code); + } + ); }); }); - + it('should get vapid key', function() { vapidModel = new VapidDetailsModel(); - return vapidModel.getVapidFromSWScope(EXAMPLE_SCOPE) - .then((vapidKey) => { - assert.equal(null, vapidKey); + return vapidModel + .getVapidFromSWScope(EXAMPLE_SCOPE) + .then(vapidKey => { + assert.equal(null, vapidKey); - return vapidModel.saveVapidDetails(EXAMPLE_SCOPE, EXAMPLE_VAPID_STRING); - }) - .then(() => { - return vapidModel.getVapidFromSWScope(EXAMPLE_SCOPE); - }) - .then((vapidKey) => { - assert.equal(EXAMPLE_VAPID_STRING, vapidKey); - }); + return vapidModel.saveVapidDetails(EXAMPLE_SCOPE, EXAMPLE_VAPID_STRING); + }) + .then(() => { + return vapidModel.getVapidFromSWScope(EXAMPLE_SCOPE); + }) + .then(vapidKey => { + assert.equal(EXAMPLE_VAPID_STRING, vapidKey); + }); }); - }); diff --git a/tests/messaging/browser/vapid-details-model-save.test.ts b/tests/messaging/browser/vapid-details-model-save.test.ts index 3ad3ca14c67..b8657f7b041 100644 --- a/tests/messaging/browser/vapid-details-model-save.test.ts +++ b/tests/messaging/browser/vapid-details-model-save.test.ts @@ -13,16 +13,17 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -import { assert } from "chai"; +import { assert } from 'chai'; import dbHelpers from './db-helper'; import Errors from '../../../src/messaging/models/errors'; import VapidDetailsModel from '../../../src/messaging/models/vapid-details-model'; describe('Firebase Messaging > VapidDetailsModel.saveVapidDetails()', function() { const EXAMPLE_SCOPE = '/example-scope'; - const EXAMPLE_VAPID_STRING = 'BNJxw7sCGkGLOUP2cawBaBXRuWZ3lw_PmQMgreLVVvX_b' + + const EXAMPLE_VAPID_STRING = + 'BNJxw7sCGkGLOUP2cawBaBXRuWZ3lw_PmQMgreLVVvX_b' + '4emEWVURkCF8fUTHEFe2xrEgTt5ilh5xD94v0pFe_I'; - + let vapidModel; beforeEach(function() { @@ -42,44 +43,32 @@ describe('Firebase Messaging > VapidDetailsModel.saveVapidDetails()', function() }); it('should throw on bad scope input', function() { - const badInputs = [ - '', - [], - {}, - true, - null, - 123 - ]; - badInputs.forEach((badInput) => { + const badInputs = ['', [], {}, true, null, 123]; + badInputs.forEach(badInput => { vapidModel = new VapidDetailsModel(); - return vapidModel.saveVapidDetails(badInput, EXAMPLE_VAPID_STRING) - .then(() => { - throw new Error('Expected promise to reject'); - }, (err) => { - assert.equal('messaging/' + Errors.codes.BAD_SCOPE, - err.code); - }); + return vapidModel.saveVapidDetails(badInput, EXAMPLE_VAPID_STRING).then( + () => { + throw new Error('Expected promise to reject'); + }, + err => { + assert.equal('messaging/' + Errors.codes.BAD_SCOPE, err.code); + } + ); }); }); it('should throw on bad vapid input', function() { - const badInputs = [ - '', - [], - {}, - true, - null, - 123 - ]; - badInputs.forEach((badInput) => { + const badInputs = ['', [], {}, true, null, 123]; + badInputs.forEach(badInput => { vapidModel = new VapidDetailsModel(); - return vapidModel.saveVapidDetails(EXAMPLE_SCOPE, badInput) - .then(() => { - throw new Error('Expected promise to reject'); - }, (err) => { - assert.equal('messaging/' + Errors.codes.BAD_VAPID_KEY, - err.code); - }); + return vapidModel.saveVapidDetails(EXAMPLE_SCOPE, badInput).then( + () => { + throw new Error('Expected promise to reject'); + }, + err => { + assert.equal('messaging/' + Errors.codes.BAD_VAPID_KEY, err.code); + } + ); }); }); diff --git a/tests/package/binary/browser/binary_namespace.test.ts b/tests/package/binary/browser/binary_namespace.test.ts index 216742a3e57..22b1316deb0 100644 --- a/tests/package/binary/browser/binary_namespace.test.ts +++ b/tests/package/binary/browser/binary_namespace.test.ts @@ -16,23 +16,23 @@ declare var firebase: FirebaseNamespace; -import { appInstanceSpec } from "../../utils/definitions/app"; -import { assert } from "chai"; -import { checkProps } from "../../utils/validator"; -import { FirebaseNamespace } from "../../../../src/app/firebase_app"; -import { firebaseSpec } from "../../utils/definitions/firebase"; -import { storageInstanceSpec } from "../../utils/definitions/storage"; -import { authInstanceSpec } from "../../utils/definitions/auth"; -import { messagingInstanceSpec } from "../../utils/definitions/messaging"; -import { databaseInstanceSpec } from "../../utils/definitions/database"; +import { appInstanceSpec } from '../../utils/definitions/app'; +import { assert } from 'chai'; +import { checkProps } from '../../utils/validator'; +import { FirebaseNamespace } from '../../../../src/app/firebase_app'; +import { firebaseSpec } from '../../utils/definitions/firebase'; +import { storageInstanceSpec } from '../../utils/definitions/storage'; +import { authInstanceSpec } from '../../utils/definitions/auth'; +import { messagingInstanceSpec } from '../../utils/definitions/messaging'; +import { databaseInstanceSpec } from '../../utils/definitions/database'; const appConfig = { - apiKey: "test-api-key", - authDomain: "test-project-name.firebaseapp.com", - databaseURL: "https://test-project-name.firebaseio.com", - projectId: "test-project-name", - storageBucket: "test-project-name.appspot.com", - messagingSenderId: "012345678910" + apiKey: 'test-api-key', + authDomain: 'test-project-name.firebaseapp.com', + databaseURL: 'https://test-project-name.firebaseio.com', + projectId: 'test-project-name', + storageBucket: 'test-project-name.appspot.com', + messagingSenderId: '012345678910' }; describe('Binary Namespace Test', () => { @@ -56,17 +56,29 @@ describe('Binary Namespace Test', () => { }); describe('firebase.database() Verification', () => { it('firebase.database() should expose proper namespace', () => { - checkProps('firebase.database()', (firebase as any).database(), databaseInstanceSpec); + checkProps( + 'firebase.database()', + (firebase as any).database(), + databaseInstanceSpec + ); }); }); describe('firebase.storage() Verification', () => { it('firebase.storage() should expose proper namespace', () => { - checkProps('firebase.storage()', (firebase as any).storage(), storageInstanceSpec); + checkProps( + 'firebase.storage()', + (firebase as any).storage(), + storageInstanceSpec + ); }); }); describe('firebase.messaging() Verification', () => { it('firebase.messaging() should expose proper namespace', () => { - checkProps('firebase.messaging()', (firebase as any).messaging(), messagingInstanceSpec); + checkProps( + 'firebase.messaging()', + (firebase as any).messaging(), + messagingInstanceSpec + ); }); }); }); diff --git a/tests/package/binary/node/binary_namespace.test.ts b/tests/package/binary/node/binary_namespace.test.ts index 92c5a8fdb7d..d39b5c3384b 100644 --- a/tests/package/binary/node/binary_namespace.test.ts +++ b/tests/package/binary/node/binary_namespace.test.ts @@ -15,23 +15,23 @@ */ const firebase = require('../../../../dist/package/firebase-node'); -import { appInstanceSpec } from "../../utils/definitions/app"; -import { assert } from "chai"; -import { checkProps } from "../../utils/validator"; -import { FirebaseNamespace } from "../../../../src/app/firebase_app"; -import { firebaseSpec } from "../../utils/definitions/firebase"; -import { storageInstanceSpec } from "../../utils/definitions/storage"; -import { authInstanceSpec } from "../../utils/definitions/auth"; -import { compiledMessagingInstanceSpec } from "../../utils/definitions/messaging"; -import { databaseInstanceSpec } from "../../utils/definitions/database"; +import { appInstanceSpec } from '../../utils/definitions/app'; +import { assert } from 'chai'; +import { checkProps } from '../../utils/validator'; +import { FirebaseNamespace } from '../../../../src/app/firebase_app'; +import { firebaseSpec } from '../../utils/definitions/firebase'; +import { storageInstanceSpec } from '../../utils/definitions/storage'; +import { authInstanceSpec } from '../../utils/definitions/auth'; +import { compiledMessagingInstanceSpec } from '../../utils/definitions/messaging'; +import { databaseInstanceSpec } from '../../utils/definitions/database'; const appConfig = { - apiKey: "test-api-key", - authDomain: "test-project-name.firebaseapp.com", - databaseURL: "https://test-project-name.firebaseio.com", - projectId: "test-project-name", - storageBucket: "test-project-name.appspot.com", - messagingSenderId: "012345678910" + apiKey: 'test-api-key', + authDomain: 'test-project-name.firebaseapp.com', + databaseURL: 'https://test-project-name.firebaseio.com', + projectId: 'test-project-name', + storageBucket: 'test-project-name.appspot.com', + messagingSenderId: '012345678910' }; describe('Binary Namespace Test', () => { @@ -55,7 +55,11 @@ describe('Binary Namespace Test', () => { }); describe('firebase.database() Verification', () => { it('firebase.database() should expose proper namespace', () => { - checkProps('firebase.database()', (firebase as any).database(), databaseInstanceSpec); + checkProps( + 'firebase.database()', + (firebase as any).database(), + databaseInstanceSpec + ); }); }); }); diff --git a/tests/package/browser/messaging_namespace.test.ts b/tests/package/browser/messaging_namespace.test.ts index 6a2c49f59e7..cd47b5bac1b 100644 --- a/tests/package/browser/messaging_namespace.test.ts +++ b/tests/package/browser/messaging_namespace.test.ts @@ -14,19 +14,19 @@ * limitations under the License. */ -import { createFirebaseNamespace } from "../../../src/app/firebase_app"; -import { messagingInstanceSpec } from "../utils/definitions/messaging"; -import { FirebaseNamespace } from "../../../src/app/firebase_app"; -import { registerMessaging } from "../../../src/messaging"; -import { checkProps } from "../utils/validator"; +import { createFirebaseNamespace } from '../../../src/app/firebase_app'; +import { messagingInstanceSpec } from '../utils/definitions/messaging'; +import { FirebaseNamespace } from '../../../src/app/firebase_app'; +import { registerMessaging } from '../../../src/messaging'; +import { checkProps } from '../utils/validator'; const appConfig = { - apiKey: "test-api-key", - authDomain: "test-project-name.firebaseapp.com", - databaseURL: "https://test-project-name.firebaseio.com", - projectId: "test-project-name", - storageBucket: "test-project-name.appspot.com", - messagingSenderId: "012345678910" + apiKey: 'test-api-key', + authDomain: 'test-project-name.firebaseapp.com', + databaseURL: 'https://test-project-name.firebaseio.com', + projectId: 'test-project-name', + storageBucket: 'test-project-name.appspot.com', + messagingSenderId: '012345678910' }; describe('Namespace Test', () => { @@ -38,7 +38,11 @@ describe('Namespace Test', () => { }); describe('firebase.messaging() Verification', () => { it('firebase.messaging() should expose proper namespace', () => { - checkProps('firebase.messaging()', (firebase as any).messaging(), messagingInstanceSpec); + checkProps( + 'firebase.messaging()', + (firebase as any).messaging(), + messagingInstanceSpec + ); }); }); }); diff --git a/tests/package/module_namespace.test.ts b/tests/package/module_namespace.test.ts index 7ff3aa7ab6d..f473fe39943 100644 --- a/tests/package/module_namespace.test.ts +++ b/tests/package/module_namespace.test.ts @@ -14,21 +14,21 @@ * limitations under the License. */ -import { createFirebaseNamespace } from "../../src/app/firebase_app"; -import { appInstanceSpec } from "./utils/definitions/app"; -import { assert } from "chai"; -import { checkProps } from "./utils/validator"; -import { FirebaseNamespace } from "../../src/app/firebase_app"; -import { registerStorage } from "../../src/storage"; -import { storageInstanceSpec } from "./utils/definitions/storage"; +import { createFirebaseNamespace } from '../../src/app/firebase_app'; +import { appInstanceSpec } from './utils/definitions/app'; +import { assert } from 'chai'; +import { checkProps } from './utils/validator'; +import { FirebaseNamespace } from '../../src/app/firebase_app'; +import { registerStorage } from '../../src/storage'; +import { storageInstanceSpec } from './utils/definitions/storage'; const appConfig = { - apiKey: "test-api-key", - authDomain: "test-project-name.firebaseapp.com", - databaseURL: "https://test-project-name.firebaseio.com", - projectId: "test-project-name", - storageBucket: "test-project-name.appspot.com", - messagingSenderId: "012345678910" + apiKey: 'test-api-key', + authDomain: 'test-project-name.firebaseapp.com', + databaseURL: 'https://test-project-name.firebaseio.com', + projectId: 'test-project-name', + storageBucket: 'test-project-name.appspot.com', + messagingSenderId: '012345678910' }; describe('Namespace Test', () => { @@ -36,7 +36,7 @@ describe('Namespace Test', () => { beforeEach(() => { firebase = createFirebaseNamespace(); registerStorage(firebase); - firebase.initializeApp(appConfig) + firebase.initializeApp(appConfig); }); describe('firebase Verification', () => { it('Will be tested by integration test suite until TS migration'); @@ -54,7 +54,11 @@ describe('Namespace Test', () => { }); describe('firebase.storage() Verification', () => { it('firebase.storage() should expose proper namespace', () => { - checkProps('firebase.storage()', (firebase as any).storage(), storageInstanceSpec); + checkProps( + 'firebase.storage()', + (firebase as any).storage(), + storageInstanceSpec + ); }); }); }); diff --git a/tests/package/utils/definitions/app.ts b/tests/package/utils/definitions/app.ts index 118749f0a79..b4de02b347e 100644 --- a/tests/package/utils/definitions/app.ts +++ b/tests/package/utils/definitions/app.ts @@ -23,21 +23,21 @@ import { NamespaceSpec } from '../namespace'; // App instance // export const appInstanceSpec: NamespaceSpec = { - name: {is: String}, - options: {is: Object}, - delete: {is: Function}, + name: { is: String }, + options: { is: Object }, + delete: { is: Function }, // Patched methods from Auth service INTERNAL: { - getToken: {is: Function}, - addAuthTokenListener: {is: Function}, - removeAuthTokenListener: {is: Function}, + getToken: { is: Function }, + addAuthTokenListener: { is: Function }, + removeAuthTokenListener: { is: Function } } }; export const firebaseErrorSpec: NamespaceSpec = { - code: {is: String}, - message: {is: String}, - name: {is: String}, - stack: {is: String}, + code: { is: String }, + message: { is: String }, + name: { is: String }, + stack: { is: String } }; diff --git a/tests/package/utils/definitions/auth.ts b/tests/package/utils/definitions/auth.ts index e186bb1327e..20d6eb6a2b8 100644 --- a/tests/package/utils/definitions/auth.ts +++ b/tests/package/utils/definitions/auth.ts @@ -23,23 +23,23 @@ import { NamespaceSpec } from '../namespace'; // Auth Service namespace // export const authSpec: NamespaceSpec = { - Auth: {is: Function}, - Error: {is: Function}, - EmailAuthProvider: {is: Function}, - FacebookAuthProvider: {is: Function}, - GithubAuthProvider: {is: Function}, - GoogleAuthProvider: {is: Function}, - TwitterAuthProvider: {is: Function}, + Auth: { is: Function }, + Error: { is: Function }, + EmailAuthProvider: { is: Function }, + FacebookAuthProvider: { is: Function }, + GithubAuthProvider: { is: Function }, + GoogleAuthProvider: { is: Function }, + TwitterAuthProvider: { is: Function } }; // // Auth Service instance // export const authInstanceSpec: NamespaceSpec = { - app: {is: Object, isName: 'App'}, + app: { is: Object, isName: 'App' }, INTERNAL: { - delete: {is: Function} - }, + delete: { is: Function } + } }; // @@ -48,14 +48,14 @@ export const authInstanceSpec: NamespaceSpec = { export const firebaseSpec: NamespaceSpec = { INTERNAL: { factories: { - auth: {is: Function}, - }, + auth: { is: Function } + } }, - User: {is: Function}, + User: { is: Function }, // Service namespaces are also accessor functions - auth: {is: Function, args: 1}, + auth: { is: Function, args: 1 } }; // @@ -63,5 +63,5 @@ export const firebaseSpec: NamespaceSpec = { // export const appInstanceSpec: NamespaceSpec = { // App-specific, service accessors - auth: {is: Function, args: 1}, + auth: { is: Function, args: 1 } }; diff --git a/tests/package/utils/definitions/database.ts b/tests/package/utils/definitions/database.ts index 638ac29f8fc..1c31f0f565b 100644 --- a/tests/package/utils/definitions/database.ts +++ b/tests/package/utils/definitions/database.ts @@ -23,41 +23,41 @@ import { NamespaceSpec } from '../namespace'; // Database Service namespace // export const databaseSpec: NamespaceSpec = { - Database: {is: Function}, - Reference: {is: Function}, - Query: {is: Function}, - enableLogging: {is: Function, args: 2}, + Database: { is: Function }, + Reference: { is: Function }, + Query: { is: Function }, + enableLogging: { is: Function, args: 2 }, ServerValue: { - TIMESTAMP: {is: Object} - }, + TIMESTAMP: { is: Object } + } }; // // Database Service instance // export const databaseInstanceSpec: NamespaceSpec = { - app: {is: Object, isName: 'App'}, + app: { is: Object, isName: 'App' }, INTERNAL: { - delete: {is: Function} + delete: { is: Function } }, - ref: {is: Function, args: 1}, - refFromURL: {is: Function, args: 1}, - goOnline: {is: Function, args: 0}, - goOffline: {is: Function, args: 0}, + ref: { is: Function, args: 1 }, + refFromURL: { is: Function, args: 1 }, + goOnline: { is: Function, args: 0 }, + goOffline: { is: Function, args: 0 } }; // Incremental properties on firebase namespace. export const firebaseSpec: NamespaceSpec = { INTERNAL: { factories: { - database: {is: Function}, - }, + database: { is: Function } + } }, - database: {is: Function, args: 1}, + database: { is: Function, args: 1 } }; // Incremental properties on App instance. export const appInstanceSpec: NamespaceSpec = { - database: {is: Function, args: 1}, + database: { is: Function, args: 1 } }; diff --git a/tests/package/utils/definitions/firebase.ts b/tests/package/utils/definitions/firebase.ts index b756545d0f5..1d1e181a303 100644 --- a/tests/package/utils/definitions/firebase.ts +++ b/tests/package/utils/definitions/firebase.ts @@ -23,44 +23,47 @@ import { NamespaceSpec } from '../namespace'; // firebase namespace. // export const firebaseSpec: NamespaceSpec = { - initializeApp: {is: Function, args: 2}, + initializeApp: { is: Function, args: 2 }, // // App namespace and accessor // app: { is: Function, args: 1, - App: {is: Function}, + App: { is: Function } }, - SDK_VERSION: {is: String}, - apps: {is: Array}, + SDK_VERSION: { is: String }, + apps: { is: Array }, Promise: { is: Function, - resolve: {is: Function}, - reject: {is: Function}, - all: {is: Function}, - prototype: {then: {is: Function, args: 2}, catch: {is: Function, args: 1}}, + resolve: { is: Function }, + reject: { is: Function }, + all: { is: Function }, + prototype: { + then: { is: Function, args: 2 }, + catch: { is: Function, args: 1 } + } }, INTERNAL: { - registerService: {is: Function, args: 5}, - extendNamespace: {is: Function, args: 1}, - createFirebaseNamespace: {is: Function, args: 0}, - createSubscribe: {is: Function, args: 2}, - removeApp: {is: Function, args: 1}, - factories: {is: Object}, - ErrorFactory: {is: Function, args: 3}, - deepExtend: {is: Function, args: 2}, + registerService: { is: Function, args: 5 }, + extendNamespace: { is: Function, args: 1 }, + createFirebaseNamespace: { is: Function, args: 0 }, + createSubscribe: { is: Function, args: 2 }, + removeApp: { is: Function, args: 1 }, + factories: { is: Object }, + ErrorFactory: { is: Function, args: 3 }, + deepExtend: { is: Function, args: 2 }, // goog.Promise implementation (Browser only) Promise: { is: Function, - resolve: {is: Function}, - reject: {is: Function}, - all: {is: Function}, + resolve: { is: Function }, + reject: { is: Function }, + all: { is: Function }, prototype: { // goog.Promise adds extra context argment to these methods - then: {is: Function, args: 2}, - catch: {is: Function, args: 1} - }, + then: { is: Function, args: 2 }, + catch: { is: Function, args: 1 } + } } - }, -}; \ No newline at end of file + } +}; diff --git a/tests/package/utils/definitions/messaging.ts b/tests/package/utils/definitions/messaging.ts index e24b1c0ca00..4b111454d2a 100644 --- a/tests/package/utils/definitions/messaging.ts +++ b/tests/package/utils/definitions/messaging.ts @@ -30,48 +30,48 @@ export const messagingSpec: NamespaceSpec = { // Messaging Service instance // export const messagingInstanceSpec: NamespaceSpec = { - app: {is: Object, isName: 'App'}, + app: { is: Object, isName: 'App' }, INTERNAL: { - delete: {is: Function} + delete: { is: Function } }, - getToken: {is: Function, args: 0}, - onMessage: {is: Function, args: 3}, + getToken: { is: Function, args: 0 }, + onMessage: { is: Function, args: 3 }, - onTokenRefresh: {is: Function, args: 3}, - requestPermission: {is: Function, args: 0}, - deleteToken: {is: Function, args: 1}, + onTokenRefresh: { is: Function, args: 3 }, + requestPermission: { is: Function, args: 0 }, + deleteToken: { is: Function, args: 1 }, - setBackgroundMessageHandler: {is: Function, args: 1}, + setBackgroundMessageHandler: { is: Function, args: 1 } }; export const compiledMessagingInstanceSpec: NamespaceSpec = { - app: {is: Object, isName: 'App'}, + app: { is: Object, isName: 'App' }, INTERNAL: { - delete: {is: Function} + delete: { is: Function } }, - getToken: {is: Function, args: 0}, - onMessage: {is: Function, args: 3}, + getToken: { is: Function, args: 0 }, + onMessage: { is: Function, args: 3 }, - onTokenRefresh: {is: Function, args: 3}, - requestPermission: {is: Function, args: 0}, - deleteToken: {is: Function, args: 1}, + onTokenRefresh: { is: Function, args: 3 }, + requestPermission: { is: Function, args: 0 }, + deleteToken: { is: Function, args: 1 }, - setBackgroundMessageHandler: {is: Function, args: 0}, + setBackgroundMessageHandler: { is: Function, args: 0 } }; // Incremental properties on firebase namespace. export const firebaseSpec: NamespaceSpec = { INTERNAL: { factories: { - messaging: {is: Function}, - }, + messaging: { is: Function } + } }, - messaging: {is: Function, args: 1}, + messaging: { is: Function, args: 1 } }; // Incremental properties on firebase App instance. export const appInstanceSpec: NamespaceSpec = { - messaging: {is: Function, args: 1}, + messaging: { is: Function, args: 1 } }; diff --git a/tests/package/utils/definitions/storage.ts b/tests/package/utils/definitions/storage.ts index 2cd63f2f4b6..68a71f5107d 100644 --- a/tests/package/utils/definitions/storage.ts +++ b/tests/package/utils/definitions/storage.ts @@ -23,51 +23,51 @@ import { NamespaceSpec } from '../namespace'; // Storage Service namespace // export const storageSpec: NamespaceSpec = { - Storage: {is: Function}, - Reference: {is: Function}, + Storage: { is: Function }, + Reference: { is: Function }, // Enums: TaskEvent: { - STATE_CHANGED: {is: String}, + STATE_CHANGED: { is: String } }, TaskState: { - RUNNING: {is: String}, - PAUSED: {is: String}, - SUCCESS: {is: String}, - CANCELED: {is: String}, - ERROR: {is: String}, + RUNNING: { is: String }, + PAUSED: { is: String }, + SUCCESS: { is: String }, + CANCELED: { is: String }, + ERROR: { is: String } }, StringFormat: { - RAW: {is: String}, - BASE64: {is: String}, - BASE64URL: {is: String}, - DATA_URL: {is: String}, - }, + RAW: { is: String }, + BASE64: { is: String }, + BASE64URL: { is: String }, + DATA_URL: { is: String } + } }; // // Storage Service instance // export const storageInstanceSpec: NamespaceSpec = { - app: {is: Object, isName: 'App'}, + app: { is: Object, isName: 'App' }, INTERNAL: { - delete: {is: Function} + delete: { is: Function } }, - ref: {is: Function, args: 1}, - refFromURL: {is: Function, args: 1}, + ref: { is: Function, args: 1 }, + refFromURL: { is: Function, args: 1 } }; // Incremental properties on firebase namespace. export const firebaseSpec: NamespaceSpec = { INTERNAL: { factories: { - storage: {is: Function}, - }, + storage: { is: Function } + } }, - storage: {is: Function, args: 1}, + storage: { is: Function, args: 1 } }; // Incremental properties on firebase App instance. export const appInstanceSpec: NamespaceSpec = { - storage: {is: Function, args: 1}, + storage: { is: Function, args: 1 } }; diff --git a/tests/package/utils/validator.ts b/tests/package/utils/validator.ts index 10dc58711e1..49e40caa673 100644 --- a/tests/package/utils/validator.ts +++ b/tests/package/utils/validator.ts @@ -14,8 +14,8 @@ * limitations under the License. */ -import { NamespaceSpec, PropertySpec } from "./namespace"; -import { assert } from "chai"; +import { NamespaceSpec, PropertySpec } from './namespace'; +import { assert } from 'chai'; // Recursively check all the properties of the spec against // the object. @@ -29,12 +29,20 @@ export function checkProps(name: string, obj: any, spec: NamespaceSpec): void { if (propSpec.is !== undefined) { let name = propSpec.isName || propSpec.is.name; - let instanceOfCheck = obj instanceof propSpec.is || obj.constructor === propSpec.is; - assert.ok(instanceOfCheck, `expected ${name} but found ${(obj.constructor.name || (obj + ''))}`); + let instanceOfCheck = + obj instanceof propSpec.is || obj.constructor === propSpec.is; + assert.ok( + instanceOfCheck, + `expected ${name} but found ${obj.constructor.name || obj + ''}` + ); } if (propSpec.args !== undefined) { - assert.equal(obj.length, propSpec.args, `${name} takes ${propSpec.args} arguments, passed ${obj.length}`); + assert.equal( + obj.length, + propSpec.args, + `${name} takes ${propSpec.args} arguments, passed ${obj.length}` + ); } for (let prop in spec) { @@ -42,7 +50,10 @@ export function checkProps(name: string, obj: any, spec: NamespaceSpec): void { continue; } - checkProps(name + (name !== '' ? '.' : '') + prop, - obj[prop], spec[prop] as NamespaceSpec); + checkProps( + name + (name !== '' ? '.' : '') + prop, + obj[prop], + spec[prop] as NamespaceSpec + ); } -} \ No newline at end of file +} diff --git a/tests/storage/browser/blob_test.ts b/tests/storage/browser/blob_test.ts index 69cf548678d..888dd824e3c 100644 --- a/tests/storage/browser/blob_test.ts +++ b/tests/storage/browser/blob_test.ts @@ -14,13 +14,13 @@ * limitations under the License. */ -import {assert} from 'chai'; +import { assert } from 'chai'; import * as sinon from 'sinon'; -import {FbsBlob} from '../../../src/storage/implementation/blob'; +import { FbsBlob } from '../../../src/storage/implementation/blob'; import * as type from '../../../src/storage/implementation/type'; import * as testShared from './testshared'; -describe("Firebase Storage > Blob", () => { +describe('Firebase Storage > Blob', () => { let stubs = []; before(() => { const definedStub = sinon.stub(type, 'isNativeBlobDefined'); @@ -28,7 +28,7 @@ describe("Firebase Storage > Blob", () => { stubs.push(definedStub); const blobStub = sinon.stub(window, 'Blob'); - blobStub.throws(Error('I don\'t exist')); + blobStub.throws(Error("I don't exist")); stubs.push(blobStub); }); after(() => { @@ -38,19 +38,38 @@ describe("Firebase Storage > Blob", () => { stubs = []; }); - it("Slicing works", () => { + it('Slicing works', () => { const blob = new FbsBlob(new Uint8Array([1, 2, 3, 4, 5, 6, 7])); const sliced = blob.slice(1, 5); - testShared.assertUint8ArrayEquals(sliced.uploadData() as Uint8Array, new Uint8Array([2, 3, 4, 5])); + testShared.assertUint8ArrayEquals( + sliced.uploadData() as Uint8Array, + new Uint8Array([2, 3, 4, 5]) + ); }); - it("Blobs are merged with strings correctly", () => { + it('Blobs are merged with strings correctly', () => { const blob = new FbsBlob(new Uint8Array([1, 2, 3, 4])); const merged = FbsBlob.getBlob('what', blob, '\ud83d\ude0a '); - testShared.assertUint8ArrayEquals(merged.uploadData() as Uint8Array, - new Uint8Array([0x77, 0x68, 0x61, 0x74, 0x1, 0x2, 0x3, 0x4, 0xF0, 0x9F, 0x98, 0x8A, 0x20])); + testShared.assertUint8ArrayEquals( + merged.uploadData() as Uint8Array, + new Uint8Array([ + 0x77, + 0x68, + 0x61, + 0x74, + 0x1, + 0x2, + 0x3, + 0x4, + 0xf0, + 0x9f, + 0x98, + 0x8a, + 0x20 + ]) + ); }); - it("Respects windowed views of ArrayBuffers when merging", () => { + it('Respects windowed views of ArrayBuffers when merging', () => { const buf = new ArrayBuffer(100); const arr1 = new Uint8Array(buf, 0, 10); const arr2 = new Uint8Array(buf, 10, 10); diff --git a/tests/storage/browser/reference_test.ts b/tests/storage/browser/reference_test.ts index 8a8f92d3ab1..548367a26b7 100644 --- a/tests/storage/browser/reference_test.ts +++ b/tests/storage/browser/reference_test.ts @@ -13,37 +13,44 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -import {assert} from 'chai'; -import {FirebaseApp} from '../../../src/app/firebase_app'; -import {AuthWrapper} from '../../../src/storage/implementation/authwrapper'; -import {makeRequest} from '../../../src/storage/implementation/request'; -import {StringFormat} from '../../../src/storage/implementation/string'; -import {Headers} from '../../../src/storage/implementation/xhrio'; -import {Metadata} from '../../../src/storage/metadata'; -import {Reference} from '../../../src/storage/reference'; -import {Service} from '../../../src/storage/service'; +import { assert } from 'chai'; +import { FirebaseApp } from '../../../src/app/firebase_app'; +import { AuthWrapper } from '../../../src/storage/implementation/authwrapper'; +import { makeRequest } from '../../../src/storage/implementation/request'; +import { StringFormat } from '../../../src/storage/implementation/string'; +import { Headers } from '../../../src/storage/implementation/xhrio'; +import { Metadata } from '../../../src/storage/metadata'; +import { Reference } from '../../../src/storage/reference'; +import { Service } from '../../../src/storage/service'; import * as testShared from './testshared'; -import {SendHook, TestingXhrIo} from './xhrio'; +import { SendHook, TestingXhrIo } from './xhrio'; function makeFakeService(app: FirebaseApp, sendHook: SendHook): Service { return new Service(app, testShared.makePool(sendHook)); } function makeStorage(url: string) { - function maker(wrapper, loc) { return ({} as any) as Reference; } + function maker(wrapper, loc) { + return ({} as any) as Reference; + } const authWrapper = new AuthWrapper( - null, maker, makeRequest, ({} as any) as Service, testShared.makePool(null)); + null, + maker, + makeRequest, + ({} as any) as Service, + testShared.makePool(null) + ); return new Reference(authWrapper, url); } -describe("Firebase Storage > Reference", () => { +describe('Firebase Storage > Reference', () => { const root = makeStorage('gs://test-bucket/'); const child = makeStorage('gs://test-bucket/hello'); - describe("Path constructor", () => { - it("root", () => { + describe('Path constructor', () => { + it('root', () => { assert.equal(root.toString(), 'gs://test-bucket/'); }); - it("keeps characters after ? on a gs:// string", () => { + it('keeps characters after ? on a gs:// string', () => { const s = makeStorage('gs://test-bucket/this/ismyobject?hello'); assert.equal(s.toString(), 'gs://test-bucket/this/ismyobject?hello'); }); @@ -51,126 +58,149 @@ describe("Firebase Storage > Reference", () => { const s = makeStorage('gs://test-bucket/%3F'); assert.equal(s.toString(), 'gs://test-bucket/%3F'); }); - it("ignores URL params and fragments on an http URL", () => { + it('ignores URL params and fragments on an http URL', () => { const s = makeStorage( - 'http://firebasestorage.googleapis.com/v0/b/test-bucket/o/my/object.txt' - + '?ignoreme#please'); + 'http://firebasestorage.googleapis.com/v0/b/test-bucket/o/my/object.txt' + + '?ignoreme#please' + ); assert.equal(s.toString(), 'gs://test-bucket/my/object.txt'); }); - it("URL-decodes and ignores fragment on an http URL", () => { + it('URL-decodes and ignores fragment on an http URL', () => { const s = makeStorage( - 'http://firebasestorage.googleapis.com/v0/b/test-bucket/o/%3F?ignore'); + 'http://firebasestorage.googleapis.com/v0/b/test-bucket/o/%3F?ignore' + ); assert.equal(s.toString(), 'gs://test-bucket/?'); }); - it("ignores URL params and fragments on an https URL", () => { + it('ignores URL params and fragments on an https URL', () => { const s = makeStorage( - 'https://firebasestorage.googleapis.com/v0/b/test-bucket/o/my/object.txt' - + '?ignoreme#please'); + 'https://firebasestorage.googleapis.com/v0/b/test-bucket/o/my/object.txt' + + '?ignoreme#please' + ); assert.equal(s.toString(), 'gs://test-bucket/my/object.txt'); }); - it("URL-decodes and ignores fragment on an https URL", () => { + it('URL-decodes and ignores fragment on an https URL', () => { const s = makeStorage( - 'https://firebasestorage.googleapis.com/v0/b/test-bucket/o/%3F?ignore'); + 'https://firebasestorage.googleapis.com/v0/b/test-bucket/o/%3F?ignore' + ); assert.equal(s.toString(), 'gs://test-bucket/?'); }); - }); - describe("toString", () => { + describe('toString', () => { it("Doesn't add trailing slash", () => { const s = makeStorage('gs://test-bucket/foo'); assert.equal(s.toString(), 'gs://test-bucket/foo'); }); - it("Strips trailing slash", () => { + it('Strips trailing slash', () => { const s = makeStorage('gs://test-bucket/foo/'); assert.equal(s.toString(), 'gs://test-bucket/foo'); }); }); - describe("parent", () => { - it("Returns null at root", () => { + describe('parent', () => { + it('Returns null at root', () => { assert.isNull(root.parent); }); - it("Returns root one level down", () => { + it('Returns root one level down', () => { assert.equal(child.parent.toString(), 'gs://test-bucket/'); }); - it("Works correctly with empty levels", () => { + it('Works correctly with empty levels', () => { const s = makeStorage('gs://test-bucket/a///'); assert.equal(s.parent.toString(), 'gs://test-bucket/a/'); }); }); - describe("root", () => { - it("Returns self at root", () => { + describe('root', () => { + it('Returns self at root', () => { assert.equal(root.root.toString(), 'gs://test-bucket/'); }); - it("Returns root multiple levels down", () => { + it('Returns root multiple levels down', () => { const s = makeStorage('gs://test-bucket/a/b/c/d'); assert.equal(s.root.toString(), 'gs://test-bucket/'); }); }); - describe("bucket", () => { - it("Returns bucket name", () => { + describe('bucket', () => { + it('Returns bucket name', () => { assert.equal(root.bucket, 'test-bucket'); }); }); - describe("fullPath", () => { - it("Returns full path without leading slash", () => { + describe('fullPath', () => { + it('Returns full path without leading slash', () => { const s = makeStorage('gs://test-bucket/full/path'); assert.equal(s.fullPath, 'full/path'); }); }); - describe("name", () => { - it("Works at top level", () => { + describe('name', () => { + it('Works at top level', () => { const s = makeStorage('gs://test-bucket/toplevel.txt'); assert.equal(s.name, 'toplevel.txt'); }); - it("Works at not the top level", () => { + it('Works at not the top level', () => { const s = makeStorage('gs://test-bucket/not/toplevel.txt'); assert.equal('toplevel.txt', s.name); }); }); - describe("child", () => { - it("works with a simple string", () => { + describe('child', () => { + it('works with a simple string', () => { assert.equal(root.child('a').toString(), 'gs://test-bucket/a'); }); - it("drops a trailing slash", () => { + it('drops a trailing slash', () => { assert.equal(root.child('ab/').toString(), 'gs://test-bucket/ab'); }); - it("compresses repeated slashes", () => { - assert.equal(root.child('//a///b/////').toString(), 'gs://test-bucket/a/b'); + it('compresses repeated slashes', () => { + assert.equal( + root.child('//a///b/////').toString(), + 'gs://test-bucket/a/b' + ); }); - it("works chained multiple times with leading slashes", () => { - assert.equal(root.child('a').child('/b').child('c').child('d/e').toString(), - 'gs://test-bucket/a/b/c/d/e'); + it('works chained multiple times with leading slashes', () => { + assert.equal( + root.child('a').child('/b').child('c').child('d/e').toString(), + 'gs://test-bucket/a/b/c/d/e' + ); }); }); - it("Doesn't send Authorization on null auth token", (done) => { - function newSend(xhrio: TestingXhrIo, url: string, method: string, body?: ArrayBufferView|Blob|string|null, headers?: Headers) { + it("Doesn't send Authorization on null auth token", done => { + function newSend( + xhrio: TestingXhrIo, + url: string, + method: string, + body?: ArrayBufferView | Blob | string | null, + headers?: Headers + ) { assert.isDefined(headers); assert.isUndefined(headers['Authorization']); done(); - } + } const service = makeFakeService(testShared.fakeAppNoAuth, newSend); const ref = service.refFromURL('gs://test-bucket'); ref.child('foo').getMetadata(); }); - it("Works if the user logs in before creating the storage reference", (done) => { + it('Works if the user logs in before creating the storage reference', done => { // Regression test for b/27227221 - function newSend(xhrio: TestingXhrIo, url: string, method: string, body?: ArrayBufferView|Blob|string|null, headers?: Headers) { + function newSend( + xhrio: TestingXhrIo, + url: string, + method: string, + body?: ArrayBufferView | Blob | string | null, + headers?: Headers + ) { assert.isDefined(headers); - assert.equal(headers['Authorization'], 'Firebase ' + testShared.authToken); + assert.equal( + headers['Authorization'], + 'Firebase ' + testShared.authToken + ); done(); } @@ -179,59 +209,92 @@ describe("Firebase Storage > Reference", () => { ref.child('foo').getMetadata(); }); - describe("putString", () => { - it("Uses metadata.contentType for RAW format", () => { + describe('putString', () => { + it('Uses metadata.contentType for RAW format', () => { // Regression test for b/30989476 - const task = child.putString('hello', StringFormat.RAW, {'contentType': 'lol/wut'} as Metadata); + const task = child.putString( + 'hello', + StringFormat.RAW, + { contentType: 'lol/wut' } as Metadata + ); assert.equal(task.snapshot.metadata.contentType, 'lol/wut'); task.cancel(); }); - it("Uses embedded content type in DATA_URL format", () => { - const task = child.putString('data:lol/wat;base64,aaaa', StringFormat.DATA_URL); + it('Uses embedded content type in DATA_URL format', () => { + const task = child.putString( + 'data:lol/wat;base64,aaaa', + StringFormat.DATA_URL + ); assert.equal(task.snapshot.metadata.contentType, 'lol/wat'); task.cancel(); }); - it("Lets metadata.contentType override embedded content type in DATA_URL format", () => { + it('Lets metadata.contentType override embedded content type in DATA_URL format', () => { const task = child.putString( - 'data:ignore/me;base64,aaaa', StringFormat.DATA_URL, - {'contentType': 'tomato/soup'} as Metadata); + 'data:ignore/me;base64,aaaa', + StringFormat.DATA_URL, + { contentType: 'tomato/soup' } as Metadata + ); assert.equal(task.snapshot.metadata.contentType, 'tomato/soup'); task.cancel(); }); }); - describe("Argument verification", () => { - describe("child", () => { - it("throws on no args", () => { - testShared.assertThrows(testShared.bind(root.child, root), 'storage/invalid-argument-count'); + describe('Argument verification', () => { + describe('child', () => { + it('throws on no args', () => { + testShared.assertThrows( + testShared.bind(root.child, root), + 'storage/invalid-argument-count' + ); }); - it("throws on null instead of path", () => { - testShared.assertThrows(testShared.bind(root.child, root, null), 'storage/invalid-argument'); + it('throws on null instead of path', () => { + testShared.assertThrows( + testShared.bind(root.child, root, null), + 'storage/invalid-argument' + ); }); - it("throws on number instead of path", () => { - testShared.assertThrows(testShared.bind(root.child, root, 3), 'storage/invalid-argument'); + it('throws on number instead of path', () => { + testShared.assertThrows( + testShared.bind(root.child, root, 3), + 'storage/invalid-argument' + ); }); }); - describe("toString", () => { - it("throws on number arg", () => { - testShared.assertThrows(testShared.bind(root.toString, root, 3), 'storage/invalid-argument-count'); + describe('toString', () => { + it('throws on number arg', () => { + testShared.assertThrows( + testShared.bind(root.toString, root, 3), + 'storage/invalid-argument-count' + ); }); }); - describe("put", () => { + describe('put', () => { const blob = new Blob(['a']); - it("throws on no arguments", () => { - testShared.assertThrows(testShared.bind(child.put, child), 'storage/invalid-argument-count'); + it('throws on no arguments', () => { + testShared.assertThrows( + testShared.bind(child.put, child), + 'storage/invalid-argument-count' + ); }); - it("throws on number instead of metadata", () => { - testShared.assertThrows(testShared.bind(child.put, child, new Blob([]), 3), 'storage/invalid-argument'); + it('throws on number instead of metadata', () => { + testShared.assertThrows( + testShared.bind(child.put, child, new Blob([]), 3), + 'storage/invalid-argument' + ); }); - it("throws on number instead of data", () => { - testShared.assertThrows(testShared.bind(child.put, child, 3), 'storage/invalid-argument'); + it('throws on number instead of data', () => { + testShared.assertThrows( + testShared.bind(child.put, child, 3), + 'storage/invalid-argument' + ); }); - it("throws null instead of data", () => { - testShared.assertThrows(testShared.bind(child.put, child, null), 'storage/invalid-argument'); + it('throws null instead of data', () => { + testShared.assertThrows( + testShared.bind(child.put, child, null), + 'storage/invalid-argument' + ); }); it("doesn't throw on good metadata", () => { const goodMetadata = { @@ -240,14 +303,14 @@ describe("Firebase Storage > Reference", () => { contentDisposition: 'legit', contentEncoding: 'identity', contentLanguage: 'en', - contentType: 'text/legit', + contentType: 'text/legit' }; assert.doesNotThrow(() => { const task = child.put(blob, goodMetadata as Metadata); task.cancel(); }); }); - it("throws when customMetadata is a string instead of an object", () => { + it('throws when customMetadata is a string instead of an object', () => { const badCustomMetadata = { md5Hash: 'a', cacheControl: 'done', @@ -257,68 +320,109 @@ describe("Firebase Storage > Reference", () => { contentType: 'text/legit', customMetadata: 'yo' }; - testShared.assertThrows(testShared.bind(child.put, child, blob, badCustomMetadata), 'storage/invalid-argument'); + testShared.assertThrows( + testShared.bind(child.put, child, blob, badCustomMetadata), + 'storage/invalid-argument' + ); }); - it("throws when object is supplied instead of string", () => { + it('throws when object is supplied instead of string', () => { const objectInsteadOfStringInMetadata = { - md5Hash: {'real': 'hash'}, + md5Hash: { real: 'hash' }, cacheControl: 'done', contentDisposition: 'legit', contentEncoding: 'identity', contentLanguage: 'en', - contentType: 'text/legit', + contentType: 'text/legit' }; - testShared.assertThrows(testShared.bind(child.put, child, blob, objectInsteadOfStringInMetadata), 'storage/invalid-argument'); + testShared.assertThrows( + testShared.bind( + child.put, + child, + blob, + objectInsteadOfStringInMetadata + ), + 'storage/invalid-argument' + ); }); }); - describe("putString", () => { - it("throws on no arguments", () => { - testShared.assertThrows(testShared.bind(child.putString, child), 'storage/invalid-argument-count'); + describe('putString', () => { + it('throws on no arguments', () => { + testShared.assertThrows( + testShared.bind(child.putString, child), + 'storage/invalid-argument-count' + ); }); - it("throws on invalid format", () => { - testShared.assertThrows(testShared.bind(child.putString, child, 'raw', 'notaformat'), 'storage/invalid-argument'); + it('throws on invalid format', () => { + testShared.assertThrows( + testShared.bind(child.putString, child, 'raw', 'notaformat'), + 'storage/invalid-argument' + ); }); - it("throws on number instead of string", () => { - testShared.assertThrows(testShared.bind(child.putString, child, 3, StringFormat.RAW), 'storage/invalid-argument'); + it('throws on number instead of string', () => { + testShared.assertThrows( + testShared.bind(child.putString, child, 3, StringFormat.RAW), + 'storage/invalid-argument' + ); }); - it("throws on invalid metadata", () => { - testShared.assertThrows(testShared.bind(child.putString, child, 'raw', StringFormat.RAW, 3), 'storage/invalid-argument'); + it('throws on invalid metadata', () => { + testShared.assertThrows( + testShared.bind(child.putString, child, 'raw', StringFormat.RAW, 3), + 'storage/invalid-argument' + ); }); }); - describe("delete", () => { - it("throws on a number arg", () => { - testShared.assertThrows(testShared.bind(child.delete, child, 3), 'storage/invalid-argument-count'); + describe('delete', () => { + it('throws on a number arg', () => { + testShared.assertThrows( + testShared.bind(child.delete, child, 3), + 'storage/invalid-argument-count' + ); }); }); - describe("getMetadata", () => { - it("throws on a number arg", () => { - testShared.assertThrows(testShared.bind(child.getMetadata, child, 3), 'storage/invalid-argument-count'); + describe('getMetadata', () => { + it('throws on a number arg', () => { + testShared.assertThrows( + testShared.bind(child.getMetadata, child, 3), + 'storage/invalid-argument-count' + ); }); }); - describe("updateMetadata", () => { - it("throws on no args", () => { - testShared.assertThrows(testShared.bind(child.updateMetadata, child), 'storage/invalid-argument-count'); + describe('updateMetadata', () => { + it('throws on no args', () => { + testShared.assertThrows( + testShared.bind(child.updateMetadata, child), + 'storage/invalid-argument-count' + ); }); - it("throws on number arg", () => { - testShared.assertThrows(testShared.bind(child.updateMetadata, child, 3), 'storage/invalid-argument'); + it('throws on number arg', () => { + testShared.assertThrows( + testShared.bind(child.updateMetadata, child, 3), + 'storage/invalid-argument' + ); }); - it("throws on null arg", () => { - testShared.assertThrows(testShared.bind(child.updateMetadata, child, null), 'storage/invalid-argument'); + it('throws on null arg', () => { + testShared.assertThrows( + testShared.bind(child.updateMetadata, child, null), + 'storage/invalid-argument' + ); }); }); - describe("getDownloadURL", () => { - it("throws on number arg", () => { - testShared.assertThrows(testShared.bind(child.getDownloadURL, child, 3), 'storage/invalid-argument-count'); + describe('getDownloadURL', () => { + it('throws on number arg', () => { + testShared.assertThrows( + testShared.bind(child.getDownloadURL, child, 3), + 'storage/invalid-argument-count' + ); }); }); }); - describe("non-root operations", () => { + describe('non-root operations', () => { it("put doesn't throw", () => { assert.doesNotThrow(() => { child.put(new Blob(['a'])); @@ -331,7 +435,10 @@ describe("Firebase Storage > Reference", () => { child.putString('raw', StringFormat.RAW); child.putString('aaaa', StringFormat.BASE64); child.putString('aaaa', StringFormat.BASE64URL); - child.putString('data:application/octet-stream;base64,aaaa', StringFormat.DATA_URL); + child.putString( + 'data:application/octet-stream;base64,aaaa', + StringFormat.DATA_URL + ); }); }); it("delete doesn't throw", () => { @@ -356,24 +463,42 @@ describe("Firebase Storage > Reference", () => { }); }); - describe("root operations", () => { - it("put throws", () => { - testShared.assertThrows(root.put.bind(root, new Blob(['a'])), 'storage/invalid-root-operation'); - }); - it("putString throws", () => { - testShared.assertThrows(root.putString.bind(root, 'raw', StringFormat.RAW), 'storage/invalid-root-operation'); - }); - it("delete throws", () => { - testShared.assertThrows(root.delete.bind(root), 'storage/invalid-root-operation'); - }); - it("getMetadata throws", () => { - testShared.assertThrows(root.getMetadata.bind(root), 'storage/invalid-root-operation'); - }); - it("updateMetadata throws", () => { - testShared.assertThrows(root.updateMetadata.bind(root, {}), 'storage/invalid-root-operation'); - }); - it("getDownloadURL throws", () => { - testShared.assertThrows(root.getDownloadURL.bind(root), 'storage/invalid-root-operation'); + describe('root operations', () => { + it('put throws', () => { + testShared.assertThrows( + root.put.bind(root, new Blob(['a'])), + 'storage/invalid-root-operation' + ); + }); + it('putString throws', () => { + testShared.assertThrows( + root.putString.bind(root, 'raw', StringFormat.RAW), + 'storage/invalid-root-operation' + ); + }); + it('delete throws', () => { + testShared.assertThrows( + root.delete.bind(root), + 'storage/invalid-root-operation' + ); + }); + it('getMetadata throws', () => { + testShared.assertThrows( + root.getMetadata.bind(root), + 'storage/invalid-root-operation' + ); + }); + it('updateMetadata throws', () => { + testShared.assertThrows( + root.updateMetadata.bind(root, {}), + 'storage/invalid-root-operation' + ); + }); + it('getDownloadURL throws', () => { + testShared.assertThrows( + root.getDownloadURL.bind(root), + 'storage/invalid-root-operation' + ); }); }); }); diff --git a/tests/storage/browser/request_test.ts b/tests/storage/browser/request_test.ts index 46bcc5ef8e6..7770f275f33 100644 --- a/tests/storage/browser/request_test.ts +++ b/tests/storage/browser/request_test.ts @@ -13,23 +13,23 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -import {assert} from 'chai'; +import { assert } from 'chai'; import * as sinon from 'sinon'; -import {FirebaseNamespace} from '../../../src/app/firebase_app'; -import {makeRequest} from '../../../src/storage/implementation/request'; -import {RequestInfo} from '../../../src/storage/implementation/requestinfo'; -import {Headers, XhrIo} from '../../../src/storage/implementation/xhrio'; -import {makePool} from './testshared'; -import {TestingXhrIo} from './xhrio'; +import { FirebaseNamespace } from '../../../src/app/firebase_app'; +import { makeRequest } from '../../../src/storage/implementation/request'; +import { RequestInfo } from '../../../src/storage/implementation/requestinfo'; +import { Headers, XhrIo } from '../../../src/storage/implementation/xhrio'; +import { makePool } from './testshared'; +import { TestingXhrIo } from './xhrio'; declare var firebase: FirebaseNamespace; -describe("Firebase Storage > Request", () => { +describe('Firebase Storage > Request', () => { const versionHeaderName = 'X-Firebase-Storage-Version'; const versionHeaderValue = 'webjs/' + firebase.SDK_VERSION; const timeout = 60 * 1000; - it("Simple success request works", () => { + it('Simple success request works', () => { const url = 'http://my-url.com/'; const method = 'GET'; @@ -38,7 +38,13 @@ describe("Firebase Storage > Request", () => { const responseValue = 'ResponseValue1'; const response = 'I am the server response!!!!'; - function newSend(xhrio: TestingXhrIo, url: string, method: string, body?: ArrayBufferView|Blob|string|null, headers?: Headers) { + function newSend( + xhrio: TestingXhrIo, + url: string, + method: string, + body?: ArrayBufferView | Blob | string | null, + headers?: Headers + ) { const responseHeaders = {}; responseHeaders[responseHeader] = responseValue; xhrio.simulateResponse(status, response, responseHeaders); @@ -60,8 +66,9 @@ describe("Firebase Storage > Request", () => { requestInfo.successCodes = [200, 234]; return makeRequest(requestInfo, null, makePool(spiedSend)) - .getPromise() - .then(result => { + .getPromise() + .then( + result => { assert.equal(result, response); assert.isTrue(spiedSend.calledOnce); @@ -72,18 +79,28 @@ describe("Firebase Storage > Request", () => { expectedHeaders[requestHeader] = requestValue; expectedHeaders[versionHeaderName] = versionHeaderValue; assert.deepEqual(args[4], expectedHeaders); - }, error => { + }, + error => { assert.fail('Errored in successful call...'); - }); + } + ); }); - it("URL parameters get encoded correctly", () => { - function newSend(xhrio: TestingXhrIo, url: string, method: string, body?: ArrayBufferView|Blob|string|null, headers?: Headers) { + it('URL parameters get encoded correctly', () => { + function newSend( + xhrio: TestingXhrIo, + url: string, + method: string, + body?: ArrayBufferView | Blob | string | null, + headers?: Headers + ) { xhrio.simulateResponse(200, '', {}); } const spiedSend = sinon.spy(newSend); - function handler(xhr: XhrIo, text: string): string { return text; } + function handler(xhr: XhrIo, text: string): string { + return text; + } const url = 'http://my-url.com/'; const method = 'DELETE'; @@ -97,70 +114,121 @@ describe("Firebase Storage > Request", () => { requestInfo.urlParams[p2] = v2; requestInfo.body = 'thisistherequestbody'; return makeRequest(requestInfo, null, makePool(spiedSend)) - .getPromise() - .then(result => { + .getPromise() + .then( + result => { assert.isTrue(spiedSend.calledOnce); - const fullUrl = url + '?' + encodeURIComponent(p1) + '=' + - encodeURIComponent(v1) + '&' + encodeURIComponent(p2) + '=' + - encodeURIComponent(v2); + const fullUrl = + url + + '?' + + encodeURIComponent(p1) + + '=' + + encodeURIComponent(v1) + + '&' + + encodeURIComponent(p2) + + '=' + + encodeURIComponent(v2); const args = spiedSend.getCall(0).args; assert.equal(args[1], fullUrl); assert.equal(args[2], method); assert.equal(args[3], requestInfo.body); - }, error => { + }, + error => { assert.fail('Request failed unexpectedly'); - }); + } + ); }); - it("Propagates errors acceptably", () => { - function newSend(xhrio: TestingXhrIo, url: string, method: string, body?: ArrayBufferView|Blob|string|null, headers?: Headers) { + it('Propagates errors acceptably', () => { + function newSend( + xhrio: TestingXhrIo, + url: string, + method: string, + body?: ArrayBufferView | Blob | string | null, + headers?: Headers + ) { xhrio.simulateResponse(200, '', {}); } const errorMessage = 'Catch me if you can'; - function handler(xhr: XhrIo, text: string): string { throw new Error(errorMessage); } - const requestInfo = new RequestInfo('http://my-url.com/', 'GET', handler, timeout); - - return makeRequest(requestInfo, null, makePool(newSend)) - .getPromise() - .then(result => { - assert.fail('Succeeded when handler gave error'); - }, error => { - assert.equal(error.message, errorMessage); - }); + function handler(xhr: XhrIo, text: string): string { + throw new Error(errorMessage); + } + const requestInfo = new RequestInfo( + 'http://my-url.com/', + 'GET', + handler, + timeout + ); + + return makeRequest(requestInfo, null, makePool(newSend)).getPromise().then( + result => { + assert.fail('Succeeded when handler gave error'); + }, + error => { + assert.equal(error.message, errorMessage); + } + ); }); - it("Cancels properly", () => { - function handler(xhr: XhrIo, text: string): boolean { return true; } - const requestInfo = new RequestInfo('http://my-url.com/', 'GET', handler, timeout); + it('Cancels properly', () => { + function handler(xhr: XhrIo, text: string): boolean { + return true; + } + const requestInfo = new RequestInfo( + 'http://my-url.com/', + 'GET', + handler, + timeout + ); const request = makeRequest(requestInfo, null, makePool(null)); const promise = request.getPromise().then( - result => { assert.fail('Succeeded when handler gave error'); }, - error => { return true; }); + result => { + assert.fail('Succeeded when handler gave error'); + }, + error => { + return true; + } + ); request.cancel(); return promise; }); - it("Sends auth tokens along properly", () => { - function newSend(xhrio: TestingXhrIo, url: string, method: string, body?: ArrayBufferView|Blob|string|null, headers?: Headers) { + it('Sends auth tokens along properly', () => { + function newSend( + xhrio: TestingXhrIo, + url: string, + method: string, + body?: ArrayBufferView | Blob | string | null, + headers?: Headers + ) { xhrio.simulateResponse(200, '', {}); } const spiedSend = sinon.spy(newSend); const authToken = 'totallyLegitAuthToken'; - function handler(xhr: XhrIo, text: string): boolean { return true; } - const requestInfo = new RequestInfo('http://my-url.com/', 'GET', handler, timeout); + function handler(xhr: XhrIo, text: string): boolean { + return true; + } + const requestInfo = new RequestInfo( + 'http://my-url.com/', + 'GET', + handler, + timeout + ); const request = makeRequest(requestInfo, authToken, makePool(spiedSend)); return request.getPromise().then( - result => { - assert.isTrue(spiedSend.calledOnce); - const args = spiedSend.getCall(0).args; - const expectedHeaders = {'Authorization': 'Firebase ' + authToken}; - expectedHeaders[versionHeaderName] = versionHeaderValue; - assert.deepEqual(args[4], expectedHeaders); - }, error => { - assert.fail('Request failed unexpectedly'); - }); + result => { + assert.isTrue(spiedSend.calledOnce); + const args = spiedSend.getCall(0).args; + const expectedHeaders = { Authorization: 'Firebase ' + authToken }; + expectedHeaders[versionHeaderName] = versionHeaderValue; + assert.deepEqual(args[4], expectedHeaders); + }, + error => { + assert.fail('Request failed unexpectedly'); + } + ); }); }); diff --git a/tests/storage/browser/requests_test.ts b/tests/storage/browser/requests_test.ts index dcf5bed8fcb..08d810010fc 100644 --- a/tests/storage/browser/requests_test.ts +++ b/tests/storage/browser/requests_test.ts @@ -13,25 +13,30 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -import {assert} from 'chai'; -import {AuthWrapper} from '../../../src/storage/implementation/authwrapper'; -import {FbsBlob} from '../../../src/storage/implementation/blob'; -import {Location} from '../../../src/storage/implementation/location'; -import {fromResourceString, getMappings} from '../../../src/storage/implementation/metadata'; -import {makeRequest} from '../../../src/storage/implementation/request'; +import { assert } from 'chai'; +import { AuthWrapper } from '../../../src/storage/implementation/authwrapper'; +import { FbsBlob } from '../../../src/storage/implementation/blob'; +import { Location } from '../../../src/storage/implementation/location'; +import { + fromResourceString, + getMappings +} from '../../../src/storage/implementation/metadata'; +import { makeRequest } from '../../../src/storage/implementation/request'; import * as requests from '../../../src/storage/implementation/requests'; -import {makeNormalUrl, makeUploadUrl} from '../../../src/storage/implementation/url'; +import { + makeNormalUrl, + makeUploadUrl +} from '../../../src/storage/implementation/url'; import * as fbsPromise from '../../../src/storage/implementation/promise_external'; import * as errors from '../../../src/storage/implementation/error'; -import {RequestInfo} from '../../../src/storage/implementation/requestinfo'; -import {XhrIoPool} from '../../../src/storage/implementation/xhriopool'; -import {Metadata} from '../../../src/storage/metadata'; -import {Reference} from '../../../src/storage/reference'; -import {Service} from '../../../src/storage/service'; -import {assertObjectIncludes, fakeXhrIo} from './testshared'; - -describe("Firebase Storage > Requests", () => { - +import { RequestInfo } from '../../../src/storage/implementation/requestinfo'; +import { XhrIoPool } from '../../../src/storage/implementation/xhriopool'; +import { Metadata } from '../../../src/storage/metadata'; +import { Reference } from '../../../src/storage/reference'; +import { Service } from '../../../src/storage/service'; +import { assertObjectIncludes, fakeXhrIo } from './testshared'; + +describe('Firebase Storage > Requests', () => { const normalBucket = 'b'; const locationNormal = new Location(normalBucket, 'o'); const locationNormalUrl = '/b/' + normalBucket + '/o/o'; @@ -47,56 +52,65 @@ describe("Firebase Storage > Requests", () => { const mappings = getMappings(); - const authWrapper = new AuthWrapper(null, function(authWrapper, loc) { - return {} as Reference; - }, makeRequest, {} as Service, new XhrIoPool()); + const authWrapper = new AuthWrapper( + null, + function(authWrapper, loc) { + return {} as Reference; + }, + makeRequest, + {} as Service, + new XhrIoPool() + ); const contentTypeInMetadata = 'application/jason'; const metadata = ({ - 'contentType': contentTypeInMetadata, - 'customMetadata': { + contentType: contentTypeInMetadata, + customMetadata: { // no-inline - 'foo': 'bar' + foo: 'bar' } } as any) as Metadata; const metadataString = JSON.stringify({ // no-inline - 'contentType': contentTypeInMetadata, - 'metadata': { + contentType: contentTypeInMetadata, + metadata: { // no-inline - 'foo': 'bar' + foo: 'bar' } }); const serverResource = { - 'bucket': normalBucket, - 'generation': '1', - 'metageneration': '2', + bucket: normalBucket, + generation: '1', + metageneration: '2', - 'name': 'foo/bar/baz.png', + name: 'foo/bar/baz.png', - 'size': '10', - 'timeCreated': 'This is a real time', - 'updated': 'Also a real time', - 'md5Hash': 'deadbeef', + size: '10', + timeCreated: 'This is a real time', + updated: 'Also a real time', + md5Hash: 'deadbeef', - 'cacheControl': 'max-age=604800', - 'contentDisposition': 'Attachment; filename=baz.png', - 'contentLanguage': 'en-US', - 'contentType': 'application/jason', + cacheControl: 'max-age=604800', + contentDisposition: 'Attachment; filename=baz.png', + contentLanguage: 'en-US', + contentType: 'application/jason', - 'downloadTokens': 'a,b,c', - 'metadata': {'foo': 'bar'} + downloadTokens: 'a,b,c', + metadata: { foo: 'bar' } }; const serverResourceString = JSON.stringify(serverResource); const metadataFromServerResource = fromResourceString( - authWrapper, serverResourceString, mappings); + authWrapper, + serverResourceString, + mappings + ); function uploadMetadataString(name: string): string { return JSON.stringify({ - 'name': name, - 'contentType': contentTypeInMetadata, - 'metadata': {'foo': 'bar'} + name: name, + contentType: contentTypeInMetadata, + metadata: { foo: 'bar' } }); } @@ -115,7 +129,10 @@ describe("Firebase Storage > Requests", () => { }); } - function assertBodyEquals(body: Blob|string|Uint8Array|null, expectedStr: string): Promise { + function assertBodyEquals( + body: Blob | string | Uint8Array | null, + expectedStr: string + ): Promise { if (body === null) { assert.fail('body was null'); } @@ -135,8 +152,7 @@ describe("Firebase Storage > Requests", () => { } function checkMetadataHandler(requestInfo: RequestInfo): void { - const metadata = - requestInfo.handler(fakeXhrIo({}), serverResourceString); + const metadata = requestInfo.handler(fakeXhrIo({}), serverResourceString); assert.deepEqual(metadata, metadataFromServerResource); } @@ -148,80 +164,97 @@ describe("Firebase Storage > Requests", () => { } } - it("getMetadata request info", () => { + it('getMetadata request info', () => { const maps = [ - [locationNormal, locationNormalUrl], [locationEscapes, locationEscapesUrl] + [locationNormal, locationNormalUrl], + [locationEscapes, locationEscapesUrl] ]; for (let i = 0; i < maps.length; i++) { const location = maps[i][0] as Location; const url = maps[i][1] as string; const requestInfo = requests.getMetadata(authWrapper, location, mappings); assertObjectIncludes( - { - url: normalUrl(url), - method: 'GET', - body: null, - headers: {}, - urlParams: {} - }, - requestInfo); + { + url: normalUrl(url), + method: 'GET', + body: null, + headers: {}, + urlParams: {} + }, + requestInfo + ); } }); - it("getMetadata handler", () => { - const requestInfo = - requests.getMetadata(authWrapper, locationNormal, mappings); + it('getMetadata handler', () => { + const requestInfo = requests.getMetadata( + authWrapper, + locationNormal, + mappings + ); checkMetadataHandler(requestInfo); }); - it("updateMetadata requestinfo", () => { + it('updateMetadata requestinfo', () => { const maps = [ - [locationNormal, locationNormalUrl], [locationEscapes, locationEscapesUrl] + [locationNormal, locationNormalUrl], + [locationEscapes, locationEscapesUrl] ]; for (let i = 0; i < maps.length; i++) { const location = maps[i][0] as Location; const url = maps[i][1] as string; - const requestInfo = - requests.updateMetadata(authWrapper, location, metadata, mappings); + const requestInfo = requests.updateMetadata( + authWrapper, + location, + metadata, + mappings + ); assertObjectIncludes( - { - url: normalUrl(url), - method: 'PATCH', - body: metadataString, - headers: {'Content-Type': metadataContentType}, - urlParams: {} - }, - requestInfo); + { + url: normalUrl(url), + method: 'PATCH', + body: metadataString, + headers: { 'Content-Type': metadataContentType }, + urlParams: {} + }, + requestInfo + ); } }); - it("updateMetadata handler", () => { + it('updateMetadata handler', () => { const requestInfo = requests.updateMetadata( - authWrapper, locationNormal, metadata, mappings); + authWrapper, + locationNormal, + metadata, + mappings + ); checkMetadataHandler(requestInfo); }); - it("deleteObject request info", () => { + it('deleteObject request info', () => { const maps = [ - [locationNormal, locationNormalUrl], [locationEscapes, locationEscapesUrl] + [locationNormal, locationNormalUrl], + [locationEscapes, locationEscapesUrl] ]; for (let i = 0; i < maps.length; i++) { const location = maps[i][0] as Location; const url = maps[i][1] as string; const requestInfo = requests.deleteObject(authWrapper, location); assertObjectIncludes( - { - url: normalUrl(url), - method: 'DELETE', - body: null, - headers: {}, - urlParams: {} - }, - requestInfo); + { + url: normalUrl(url), + method: 'DELETE', + body: null, + headers: {}, + urlParams: {} + }, + requestInfo + ); } }); - it("deleteObject handler", () => { + it('deleteObject handler', () => { const requestInfo = requests.deleteObject(authWrapper, locationNormal); checkNoOpHandler(requestInfo); }); - it("multipartUpload request info", () => { + it('multipartUpload request info', () => { const multipartHeaderRegex = /^multipart\/related; boundary=([A-Za-z0-9]+)$/; const maps = [ @@ -233,46 +266,73 @@ describe("Firebase Storage > Requests", () => { const location = maps[i][0] as Location; const url = maps[i][1] as string; const makeMultipartBodyString = (boundary: string): string => { - return '--' + boundary + '\r\n' + - 'Content-Type: ' + metadataContentType + '\r\n\r\n' + - uploadMetadataString(location.path) + '\r\n--' + boundary + '\r\n' + - 'Content-Type: ' + contentTypeInMetadata + '\r\n\r\n' + - smallBlobString + '\r\n--' + boundary + '--'; + return ( + '--' + + boundary + + '\r\n' + + 'Content-Type: ' + + metadataContentType + + '\r\n\r\n' + + uploadMetadataString(location.path) + + '\r\n--' + + boundary + + '\r\n' + + 'Content-Type: ' + + contentTypeInMetadata + + '\r\n\r\n' + + smallBlobString + + '\r\n--' + + boundary + + '--' + ); }; const requestInfo = requests.multipartUpload( - authWrapper, location, mappings, smallBlob, metadata); - const matches = - (requestInfo.headers['Content-Type'] as string).match(multipartHeaderRegex); + authWrapper, + location, + mappings, + smallBlob, + metadata + ); + const matches = (requestInfo.headers['Content-Type'] as string).match( + multipartHeaderRegex + ); assert.isNotNull(matches); assert.equal(matches.length, 2); const boundary = matches[1]; promises.push( - assertBodyEquals(requestInfo.body, makeMultipartBodyString(boundary))); + assertBodyEquals(requestInfo.body, makeMultipartBodyString(boundary)) + ); assertObjectIncludes( - { - url: uploadUrl(url), - method: 'POST', - urlParams: {'name': location.path}, - headers: { - 'X-Goog-Upload-Protocol': 'multipart', - // Checked before this block, but needed here because - // assertObjectIncludes does exact checks on values. - 'Content-Type': requestInfo.headers['Content-Type'] - } - }, - requestInfo); + { + url: uploadUrl(url), + method: 'POST', + urlParams: { name: location.path }, + headers: { + 'X-Goog-Upload-Protocol': 'multipart', + // Checked before this block, but needed here because + // assertObjectIncludes does exact checks on values. + 'Content-Type': requestInfo.headers['Content-Type'] + } + }, + requestInfo + ); } return Promise.all(promises); }); - it("multipartUpload handler", () => { + it('multipartUpload handler', () => { const requestInfo = requests.multipartUpload( - authWrapper, locationNormal, mappings, smallBlob, metadata); + authWrapper, + locationNormal, + mappings, + smallBlob, + metadata + ); checkMetadataHandler(requestInfo); }); - it("createResumableUpload request info", () => { + it('createResumableUpload request info', () => { const maps = [ [locationNormal, locationNormalNoObjUrl], [locationEscapes, locationEscapesNoObjUrl] @@ -282,143 +342,212 @@ describe("Firebase Storage > Requests", () => { const location = maps[i][0] as Location; const url = maps[i][1] as string; const requestInfo = requests.createResumableUpload( - authWrapper, location, mappings, smallBlob, metadata); + authWrapper, + location, + mappings, + smallBlob, + metadata + ); assertObjectIncludes( - { - url: uploadUrl(url), - method: 'POST', - urlParams: {'name': location.path}, - headers: { - 'X-Goog-Upload-Protocol': 'resumable', - 'X-Goog-Upload-Command': 'start', - 'X-Goog-Upload-Header-Content-Length': smallBlob.size(), - 'X-Goog-Upload-Header-Content-Type': contentTypeInMetadata, - 'Content-Type': metadataContentType - } - }, - requestInfo); + { + url: uploadUrl(url), + method: 'POST', + urlParams: { name: location.path }, + headers: { + 'X-Goog-Upload-Protocol': 'resumable', + 'X-Goog-Upload-Command': 'start', + 'X-Goog-Upload-Header-Content-Length': smallBlob.size(), + 'X-Goog-Upload-Header-Content-Type': contentTypeInMetadata, + 'Content-Type': metadataContentType + } + }, + requestInfo + ); promises.push( - assertBodyEquals(requestInfo.body, uploadMetadataString(location.path))); + assertBodyEquals(requestInfo.body, uploadMetadataString(location.path)) + ); } return Promise.all(promises); }); function testCreateResumableUploadHandler() { const requestInfo = requests.createResumableUpload( - authWrapper, locationNormal, mappings, smallBlob, - metadata); + authWrapper, + locationNormal, + mappings, + smallBlob, + metadata + ); const uploadUrl = 'https://i.am.an.upload.url.com/hello/there'; const handlerUrl = requestInfo.handler( - fakeXhrIo( - {'X-Goog-Upload-Status': 'active', 'X-Goog-Upload-URL': uploadUrl}), - ''); + fakeXhrIo({ + 'X-Goog-Upload-Status': 'active', + 'X-Goog-Upload-URL': uploadUrl + }), + '' + ); assert.equal(handlerUrl, uploadUrl); } - it("getResumableUploadStatus request info", () => { - const url = 'https://this.is.totally.a.real.url.com/hello/upload?whatsgoingon'; + it('getResumableUploadStatus request info', () => { + const url = + 'https://this.is.totally.a.real.url.com/hello/upload?whatsgoingon'; const requestInfo = requests.getResumableUploadStatus( - authWrapper, locationNormal, url, smallBlob); + authWrapper, + locationNormal, + url, + smallBlob + ); assertObjectIncludes( - { - url: url, - method: 'POST', - urlParams: {}, - headers: {'X-Goog-Upload-Command': 'query'} - }, - requestInfo); + { + url: url, + method: 'POST', + urlParams: {}, + headers: { 'X-Goog-Upload-Command': 'query' } + }, + requestInfo + ); }); - describe("getResumableUploadStatus handler", () => { - const url = 'https://this.is.totally.a.real.url.com/hello/upload?whatsgoingon'; + describe('getResumableUploadStatus handler', () => { + const url = + 'https://this.is.totally.a.real.url.com/hello/upload?whatsgoingon'; const requestInfo = requests.getResumableUploadStatus( - authWrapper, locationNormal, url, smallBlob); + authWrapper, + locationNormal, + url, + smallBlob + ); let status = requestInfo.handler( - fakeXhrIo({ - 'X-Goog-Upload-Status': 'active', - 'X-Goog-Upload-Size-Received': '0' - }), - ''); - let expectedStatus = - new requests.ResumableUploadStatus(0, smallBlob.size(), false); + fakeXhrIo({ + 'X-Goog-Upload-Status': 'active', + 'X-Goog-Upload-Size-Received': '0' + }), + '' + ); + let expectedStatus = new requests.ResumableUploadStatus( + 0, + smallBlob.size(), + false + ); assert.deepEqual(status, expectedStatus); status = requestInfo.handler( - fakeXhrIo({ - 'X-Goog-Upload-Status': 'final', - 'X-Goog-Upload-Size-Received': '' + smallBlob.size() - }), - ''); + fakeXhrIo({ + 'X-Goog-Upload-Status': 'final', + 'X-Goog-Upload-Size-Received': '' + smallBlob.size() + }), + '' + ); expectedStatus = new requests.ResumableUploadStatus( - smallBlob.size(), smallBlob.size(), true); + smallBlob.size(), + smallBlob.size(), + true + ); assert.deepEqual(status, expectedStatus); }); - it("continueResumableUpload request info", () => { - const url = 'https://this.is.totally.a.real.url.com/hello/upload?whatsgoingon'; + it('continueResumableUpload request info', () => { + const url = + 'https://this.is.totally.a.real.url.com/hello/upload?whatsgoingon'; const requestInfo = requests.continueResumableUpload( - locationNormal, authWrapper, url, smallBlob, - requests.resumableUploadChunkSize, mappings); + locationNormal, + authWrapper, + url, + smallBlob, + requests.resumableUploadChunkSize, + mappings + ); assertObjectIncludes( - { - url: url, - method: 'POST', - urlParams: {}, - headers: { - 'X-Goog-Upload-Command': 'upload, finalize', - 'X-Goog-Upload-Offset': 0 - } - }, - requestInfo); + { + url: url, + method: 'POST', + urlParams: {}, + headers: { + 'X-Goog-Upload-Command': 'upload, finalize', + 'X-Goog-Upload-Offset': 0 + } + }, + requestInfo + ); return assertBodyEquals(requestInfo.body, smallBlobString); }); - it("continueResumableUpload handler", () => { - const url = 'https://this.is.totally.a.real.url.com/hello/upload?whatsgoingon'; + it('continueResumableUpload handler', () => { + const url = + 'https://this.is.totally.a.real.url.com/hello/upload?whatsgoingon'; const chunkSize = requests.resumableUploadChunkSize; assert.isTrue(smallBlob.size() < chunkSize); let requestInfo = requests.continueResumableUpload( - locationNormal, authWrapper, url, smallBlob, chunkSize, mappings); + locationNormal, + authWrapper, + url, + smallBlob, + chunkSize, + mappings + ); let status = requestInfo.handler( - fakeXhrIo({'X-Goog-Upload-Status': 'final'}), - serverResourceString); + fakeXhrIo({ 'X-Goog-Upload-Status': 'final' }), + serverResourceString + ); let expectedStatus = new requests.ResumableUploadStatus( - smallBlob.size(), smallBlob.size(), true, metadataFromServerResource); + smallBlob.size(), + smallBlob.size(), + true, + metadataFromServerResource + ); assert.deepEqual(status, expectedStatus); assert.isTrue(bigBlob.size() > chunkSize); requestInfo = requests.continueResumableUpload( - locationNormal, authWrapper, url, bigBlob, chunkSize, mappings); + locationNormal, + authWrapper, + url, + bigBlob, + chunkSize, + mappings + ); status = requestInfo.handler( - fakeXhrIo({'X-Goog-Upload-Status': 'active'}), ''); - expectedStatus = - new requests.ResumableUploadStatus(chunkSize, bigBlob.size(), false); + fakeXhrIo({ 'X-Goog-Upload-Status': 'active' }), + '' + ); + expectedStatus = new requests.ResumableUploadStatus( + chunkSize, + bigBlob.size(), + false + ); assert.deepEqual(status, expectedStatus); }); - it("error handler passes through unknown errors", () => { - const requestInfo = - requests.getMetadata(authWrapper, locationNormal, mappings); + it('error handler passes through unknown errors', () => { + const requestInfo = requests.getMetadata( + authWrapper, + locationNormal, + mappings + ); const error = errors.unknown(); - const resultError = - requestInfo.errorHandler(fakeXhrIo({}, 509), error); + const resultError = requestInfo.errorHandler(fakeXhrIo({}, 509), error); assert.equal(resultError, error); }); - it("error handler converts 404 to not found", () => { - const requestInfo = - requests.getMetadata(authWrapper, locationNormal, mappings); + it('error handler converts 404 to not found', () => { + const requestInfo = requests.getMetadata( + authWrapper, + locationNormal, + mappings + ); const error = errors.unknown(); - const resultError = - requestInfo.errorHandler(fakeXhrIo({}, 404), error); + const resultError = requestInfo.errorHandler(fakeXhrIo({}, 404), error); assert.isTrue(resultError.codeEquals(errors.Code.OBJECT_NOT_FOUND)); }); - it("error handler converts 402 to quota exceeded", () => { - const requestInfo = - requests.getMetadata(authWrapper, locationNormal, mappings); + it('error handler converts 402 to quota exceeded', () => { + const requestInfo = requests.getMetadata( + authWrapper, + locationNormal, + mappings + ); const error = errors.unknown(); - const resultError = - requestInfo.errorHandler(fakeXhrIo({}, 402), error); + const resultError = requestInfo.errorHandler(fakeXhrIo({}, 402), error); assert.isTrue(resultError.codeEquals(errors.Code.QUOTA_EXCEEDED)); }); }); diff --git a/tests/storage/browser/service_test.ts b/tests/storage/browser/service_test.ts index 075f78c51bc..f0c82b6da40 100644 --- a/tests/storage/browser/service_test.ts +++ b/tests/storage/browser/service_test.ts @@ -13,10 +13,10 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -import {assert} from 'chai'; -import {TaskEvent} from '../../../src/storage/implementation/taskenums'; -import {XhrIoPool} from '../../../src/storage/implementation/xhriopool'; -import {Service} from '../../../src/storage/service'; +import { assert } from 'chai'; +import { TaskEvent } from '../../../src/storage/implementation/taskenums'; +import { XhrIoPool } from '../../../src/storage/implementation/xhriopool'; +import { Service } from '../../../src/storage/service'; import * as testShared from './testshared'; const fakeAppGs = testShared.makeFakeApp(null, 'gs://mybucket'); @@ -28,234 +28,288 @@ function makeGsUrl(child: string = ''): string { return 'gs://' + testShared.bucket + '/' + child; } -describe("Firebase Storage > Service", () => { - describe("simple constructor", () => { +describe('Firebase Storage > Service', () => { + describe('simple constructor', () => { const service = new Service(testShared.fakeApp, xhrIoPool); - it("Root refs point to the right place", () => { + it('Root refs point to the right place', () => { const ref = service.ref(); assert.equal(ref.toString(), makeGsUrl()); }); - it("Child refs point to the right place", () => { + it('Child refs point to the right place', () => { const ref = service.ref('path/to/child'); assert.equal(ref.toString(), makeGsUrl('path/to/child')); }); - it("Throws calling ref with a gs:// URL", () => { - const error = testShared.assertThrows(() => { service.ref('gs://bucket/object'); }, 'storage/invalid-argument'); + it('Throws calling ref with a gs:// URL', () => { + const error = testShared.assertThrows(() => { + service.ref('gs://bucket/object'); + }, 'storage/invalid-argument'); assert.match(error.message, /refFromURL/); }); - it("Throws calling ref with an http:// URL", () => { + it('Throws calling ref with an http:// URL', () => { const error = testShared.assertThrows(() => { service.ref('http://firebasestorage.googleapis.com/etc'); - }, 'storage/invalid-argument'); + }, 'storage/invalid-argument'); assert.match(error.message, /refFromURL/); }); - it("Throws calling ref with an https:// URL", () => { + it('Throws calling ref with an https:// URL', () => { const error = testShared.assertThrows(() => { service.ref('https://firebasestorage.googleapis.com/etc'); }, 'storage/invalid-argument'); assert.match(error.message, /refFromURL/); }); }); - describe("custom bucket constructor", () => { - it("gs:// custom bucket constructor refs point to the right place", () => { - const service = new Service(testShared.fakeApp, xhrIoPool, 'gs://foo-bar.appspot.com'); + describe('custom bucket constructor', () => { + it('gs:// custom bucket constructor refs point to the right place', () => { + const service = new Service( + testShared.fakeApp, + xhrIoPool, + 'gs://foo-bar.appspot.com' + ); const ref = service.ref(); assert.equal(ref.toString(), 'gs://foo-bar.appspot.com/'); }); - it("http:// custom bucket constructor refs point to the right place", () => { - const service = new Service(testShared.fakeApp, xhrIoPool, - 'http://firebasestorage.googleapis.com/v1/b/foo-bar.appspot.com/o'); + it('http:// custom bucket constructor refs point to the right place', () => { + const service = new Service( + testShared.fakeApp, + xhrIoPool, + 'http://firebasestorage.googleapis.com/v1/b/foo-bar.appspot.com/o' + ); const ref = service.ref(); assert.equal(ref.toString(), 'gs://foo-bar.appspot.com/'); }); - it("https:// custom bucket constructor refs point to the right place", () => { - const service = new Service(testShared.fakeApp, xhrIoPool, - 'https://firebasestorage.googleapis.com/v1/b/foo-bar.appspot.com/o'); + it('https:// custom bucket constructor refs point to the right place', () => { + const service = new Service( + testShared.fakeApp, + xhrIoPool, + 'https://firebasestorage.googleapis.com/v1/b/foo-bar.appspot.com/o' + ); const ref = service.ref(); assert.equal(ref.toString(), 'gs://foo-bar.appspot.com/'); }); - it("Bare bucket name constructor refs point to the right place", () => { - const service = new Service(testShared.fakeApp, xhrIoPool, 'foo-bar.appspot.com'); + it('Bare bucket name constructor refs point to the right place', () => { + const service = new Service( + testShared.fakeApp, + xhrIoPool, + 'foo-bar.appspot.com' + ); const ref = service.ref(); assert.equal(ref.toString(), 'gs://foo-bar.appspot.com/'); }); - it("Child refs point to the right place", () => { - const service = new Service(testShared.fakeApp, xhrIoPool, 'foo-bar.appspot.com'); + it('Child refs point to the right place', () => { + const service = new Service( + testShared.fakeApp, + xhrIoPool, + 'foo-bar.appspot.com' + ); const ref = service.ref('path/to/child'); assert.equal(ref.toString(), 'gs://foo-bar.appspot.com/path/to/child'); }); - it("Throws trying to construct with a gs:// URL containing an object path", () => { + it('Throws trying to construct with a gs:// URL containing an object path', () => { const error = testShared.assertThrows(() => { - new Service( - testShared.fakeApp, xhrIoPool, 'gs://bucket/object/'); + new Service(testShared.fakeApp, xhrIoPool, 'gs://bucket/object/'); }, 'storage/invalid-default-bucket'); assert.match(error.message, /Invalid default bucket/); }); }); - describe("default bucket config", () => { - it("gs:// works without ending slash", () => { + describe('default bucket config', () => { + it('gs:// works without ending slash', () => { const service = new Service(fakeAppGs, xhrIoPool); assert.equal(service.ref().toString(), 'gs://mybucket/'); }); - it("gs:// works with ending slash", () => { + it('gs:// works with ending slash', () => { const service = new Service(fakeAppGsEndingSlash, xhrIoPool); assert.equal(service.ref().toString(), 'gs://mybucket/'); }); - it("Throws when config bucket is gs:// with an object path", () => { + it('Throws when config bucket is gs:// with an object path', () => { const error = testShared.assertThrows(() => { new Service(fakeAppInvalidGs, xhrIoPool); }, 'storage/invalid-default-bucket'); }); }); - describe("refFromURL", () => { + describe('refFromURL', () => { const service = new Service(testShared.fakeApp, xhrIoPool); - it("Throws on non-URL arg", () => { - const error = testShared.assertThrows(() => { service.refFromURL('path/to/child'); }, 'storage/invalid-argument'); + it('Throws on non-URL arg', () => { + const error = testShared.assertThrows(() => { + service.refFromURL('path/to/child'); + }, 'storage/invalid-argument'); assert.match(error.message, /invalid/i); }); - it("Works with gs:// URLs", () => { + it('Works with gs:// URLs', () => { const ref = service.refFromURL('gs://mybucket/child/path/image.png'); assert.equal(ref.toString(), 'gs://mybucket/child/path/image.png'); }); - it("Works with http:// URLs", () => { + it('Works with http:// URLs', () => { const ref = service.refFromURL( - 'http://firebasestorage.googleapis.com/v0/b/' + - 'mybucket/o/child%2Fpath%2Fimage.png?downloadToken=hello'); + 'http://firebasestorage.googleapis.com/v0/b/' + + 'mybucket/o/child%2Fpath%2Fimage.png?downloadToken=hello' + ); assert.equal(ref.toString(), 'gs://mybucket/child/path/image.png'); }); - it("Works with https:// URLs", () => { + it('Works with https:// URLs', () => { const ref = service.refFromURL( - 'https://firebasestorage.googleapis.com/v0/b/' + - 'mybucket/o/child%2Fpath%2Fimage.png?downloadToken=hello'); + 'https://firebasestorage.googleapis.com/v0/b/' + + 'mybucket/o/child%2Fpath%2Fimage.png?downloadToken=hello' + ); assert.equal(ref.toString(), 'gs://mybucket/child/path/image.png'); }); }); - describe("Argument verification", () => { + describe('Argument verification', () => { const service = new Service(testShared.fakeApp, xhrIoPool); - describe("ref", () => { - it("Throws with two args", () => { + describe('ref', () => { + it('Throws with two args', () => { testShared.assertThrows( - testShared.bind(service.ref, service, 1, 2), - 'storage/invalid-argument-count'); + testShared.bind(service.ref, service, 1, 2), + 'storage/invalid-argument-count' + ); }); - it("Throws on gs:// argument", () => { + it('Throws on gs:// argument', () => { testShared.assertThrows( - testShared.bind(service.ref, service, 'gs://yo'), - 'storage/invalid-argument'); + testShared.bind(service.ref, service, 'gs://yo'), + 'storage/invalid-argument' + ); }); - it("Throws on number argument", () => { + it('Throws on number argument', () => { testShared.assertThrows( - testShared.bind(service.ref, service, 3), 'storage/invalid-argument'); + testShared.bind(service.ref, service, 3), + 'storage/invalid-argument' + ); }); - it("Throws on null argument", () => { + it('Throws on null argument', () => { testShared.assertThrows( - testShared.bind(service.ref, service, null), 'storage/invalid-argument'); + testShared.bind(service.ref, service, null), + 'storage/invalid-argument' + ); }); }); - describe("refFromURL", () => { - it("Throws with no args", () => { + describe('refFromURL', () => { + it('Throws with no args', () => { testShared.assertThrows( - testShared.bind(service.refFromURL, service), - 'storage/invalid-argument-count'); + testShared.bind(service.refFromURL, service), + 'storage/invalid-argument-count' + ); }); - it("Throws with two args", () => { + it('Throws with two args', () => { testShared.assertThrows( - testShared.bind(service.refFromURL, service, 'a', 'b'), - 'storage/invalid-argument-count'); + testShared.bind(service.refFromURL, service, 'a', 'b'), + 'storage/invalid-argument-count' + ); }); - it("Throws with a non-URL string arg", () => { + it('Throws with a non-URL string arg', () => { testShared.assertThrows( - testShared.bind(service.refFromURL, service, 'child'), - 'storage/invalid-argument'); + testShared.bind(service.refFromURL, service, 'child'), + 'storage/invalid-argument' + ); }); - it("Throws with a null arg", () => { + it('Throws with a null arg', () => { testShared.assertThrows( - testShared.bind(service.refFromURL, service, null), - 'storage/invalid-argument'); + testShared.bind(service.refFromURL, service, null), + 'storage/invalid-argument' + ); }); - it("Throws with an invalid URL arg", () => { + it('Throws with an invalid URL arg', () => { testShared.assertThrows( - testShared.bind(service.refFromURL, service, 'notlegit://url'), - 'storage/invalid-argument'); + testShared.bind(service.refFromURL, service, 'notlegit://url'), + 'storage/invalid-argument' + ); }); }); - describe("setMaxUploadRetryTime", () => { - it("Throws on no args", () => { + describe('setMaxUploadRetryTime', () => { + it('Throws on no args', () => { testShared.assertThrows( - testShared.bind(service.setMaxUploadRetryTime, service), - 'storage/invalid-argument-count'); + testShared.bind(service.setMaxUploadRetryTime, service), + 'storage/invalid-argument-count' + ); }); - it("Throws on two args", () => { + it('Throws on two args', () => { testShared.assertThrows( - testShared.bind(service.setMaxUploadRetryTime, service, 1, 2), - 'storage/invalid-argument-count'); + testShared.bind(service.setMaxUploadRetryTime, service, 1, 2), + 'storage/invalid-argument-count' + ); }); - it("Throws on string arg", () => { + it('Throws on string arg', () => { testShared.assertThrows( - testShared.bind(service.setMaxUploadRetryTime, service, 'a'), - 'storage/invalid-argument'); + testShared.bind(service.setMaxUploadRetryTime, service, 'a'), + 'storage/invalid-argument' + ); }); - it("Throws on negative arg", () => { + it('Throws on negative arg', () => { testShared.assertThrows( - testShared.bind(service.setMaxUploadRetryTime, service, -10), - 'storage/invalid-argument'); + testShared.bind(service.setMaxUploadRetryTime, service, -10), + 'storage/invalid-argument' + ); }); }); - describe("setMaxOperationRetryTime", () => { - it("Throws on no args", () => { + describe('setMaxOperationRetryTime', () => { + it('Throws on no args', () => { testShared.assertThrows( - testShared.bind(service.setMaxOperationRetryTime, service), - 'storage/invalid-argument-count'); + testShared.bind(service.setMaxOperationRetryTime, service), + 'storage/invalid-argument-count' + ); }); - it("Throws on two args", () => { + it('Throws on two args', () => { testShared.assertThrows( - testShared.bind(service.setMaxOperationRetryTime, service, 1, 2), - 'storage/invalid-argument-count'); + testShared.bind(service.setMaxOperationRetryTime, service, 1, 2), + 'storage/invalid-argument-count' + ); }); - it("Throws on string arg", () => { + it('Throws on string arg', () => { testShared.assertThrows( - testShared.bind(service.setMaxOperationRetryTime, service, 'a'), - 'storage/invalid-argument'); + testShared.bind(service.setMaxOperationRetryTime, service, 'a'), + 'storage/invalid-argument' + ); }); - it("Throws on negative arg", () => { + it('Throws on negative arg', () => { testShared.assertThrows( - testShared.bind(service.setMaxOperationRetryTime, service, -10), - 'storage/invalid-argument'); + testShared.bind(service.setMaxOperationRetryTime, service, -10), + 'storage/invalid-argument' + ); }); }); }); - describe("Deletion", () => { + describe('Deletion', () => { const service = new Service(testShared.fakeApp, xhrIoPool); - it("In-flight requests are canceled when the service is deleted", () => { + it('In-flight requests are canceled when the service is deleted', () => { const ref = service.refFromURL('gs://mybucket/image.jpg'); const toReturn = ref.getMetadata().then( - met => { assert.fail('Promise succeeded, should have been canceled'); }, - err => { assert.equal(err.code, 'storage/app-deleted'); }); + met => { + assert.fail('Promise succeeded, should have been canceled'); + }, + err => { + assert.equal(err.code, 'storage/app-deleted'); + } + ); service.INTERNAL.delete(); return toReturn; }); - it("Requests fail when started after the service is deleted", () => { + it('Requests fail when started after the service is deleted', () => { const ref = service.refFromURL('gs://mybucket/image.jpg'); service.INTERNAL.delete(); const toReturn = ref.getMetadata().then( - met => { assert.fail('Promise succeeded, should have been canceled'); }, - err => { assert.equal(err.code, 'storage/app-deleted'); }); + met => { + assert.fail('Promise succeeded, should have been canceled'); + }, + err => { + assert.equal(err.code, 'storage/app-deleted'); + } + ); return toReturn; }); - it("Running uploads fail when the service is deleted", () => { + it('Running uploads fail when the service is deleted', () => { const ref = service.refFromURL('gs://mybucket/image.jpg'); const toReturn = new Promise(function(resolve, reject) { - ref.put(new Blob(['a'])) - .on(TaskEvent.STATE_CHANGED, null, - err => { - assert.equal(err.code, 'storage/app-deleted'); - resolve(); - }, - () => { - assert.fail('Upload completed, should have been canceled'); - }); + ref.put(new Blob(['a'])).on( + TaskEvent.STATE_CHANGED, + null, + err => { + assert.equal(err.code, 'storage/app-deleted'); + resolve(); + }, + () => { + assert.fail('Upload completed, should have been canceled'); + } + ); service.INTERNAL.delete(); }); return toReturn; diff --git a/tests/storage/browser/string_test.ts b/tests/storage/browser/string_test.ts index 84c24d97d01..c09c6fdc5d9 100644 --- a/tests/storage/browser/string_test.ts +++ b/tests/storage/browser/string_test.ts @@ -13,158 +13,275 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -import {assert} from 'chai'; -import {dataFromString, StringFormat} from '../../../src/storage/implementation/string'; -import {assertThrows, assertUint8ArrayEquals} from './testshared'; +import { assert } from 'chai'; +import { + dataFromString, + StringFormat +} from '../../../src/storage/implementation/string'; +import { assertThrows, assertUint8ArrayEquals } from './testshared'; -describe("Firebase Storage > String", () => { - it("Encodes raw strings with ascii correctly", () => { +describe('Firebase Storage > String', () => { + it('Encodes raw strings with ascii correctly', () => { const str = 'Hello, world!\n'; assertUint8ArrayEquals( - new Uint8Array([ - 0x48, 0x65, 0x6C, 0x6C, 0x6F, 0x2C, 0x20, 0x77, 0x6F, 0x72, 0x6C, 0x64, - 0x21, 0x0A - ]), - dataFromString(StringFormat.RAW, str).data); + new Uint8Array([ + 0x48, + 0x65, + 0x6c, + 0x6c, + 0x6f, + 0x2c, + 0x20, + 0x77, + 0x6f, + 0x72, + 0x6c, + 0x64, + 0x21, + 0x0a + ]), + dataFromString(StringFormat.RAW, str).data + ); }); - it("Encodes raw strings with 2-byte UTF8 codepoints correctly", () => { + it('Encodes raw strings with 2-byte UTF8 codepoints correctly', () => { const str = 'aa\u0089'; assertUint8ArrayEquals( - new Uint8Array([0x61, 0x61, 0xC2, 0x89]), - dataFromString(StringFormat.RAW, str).data); + new Uint8Array([0x61, 0x61, 0xc2, 0x89]), + dataFromString(StringFormat.RAW, str).data + ); }); - it("Encodes raw strings with 3-byte UTF8 codepoints correctly", () => { + it('Encodes raw strings with 3-byte UTF8 codepoints correctly', () => { const str = 'aa\uff7c'; assertUint8ArrayEquals( - new Uint8Array([0x61, 0x61, 0xEF, 0xBD, 0xBC]), - dataFromString(StringFormat.RAW, str).data); + new Uint8Array([0x61, 0x61, 0xef, 0xbd, 0xbc]), + dataFromString(StringFormat.RAW, str).data + ); }); - it("Encodes raw strings with 4-byte UTF8 codepoints correctly", () => { + it('Encodes raw strings with 4-byte UTF8 codepoints correctly', () => { const str = 'Hello! \ud83d\ude0a'; assertUint8ArrayEquals( - new Uint8Array( - [0x48, 0x65, 0x6C, 0x6C, 0x6F, 0x21, 0x20, 0xF0, 0x9F, 0x98, 0x8A]), - dataFromString(StringFormat.RAW, str).data); + new Uint8Array([ + 0x48, + 0x65, + 0x6c, + 0x6c, + 0x6f, + 0x21, + 0x20, + 0xf0, + 0x9f, + 0x98, + 0x8a + ]), + dataFromString(StringFormat.RAW, str).data + ); }); - it("Encodes raw strings with missing low surrogates correctly", () => { + it('Encodes raw strings with missing low surrogates correctly', () => { const str = 'aa\ud83d t'; assertUint8ArrayEquals( - new Uint8Array([0x61, 0x61, 0xEF, 0xBF, 0xBD, 0x20, 0x74]), - dataFromString(StringFormat.RAW, str).data); + new Uint8Array([0x61, 0x61, 0xef, 0xbf, 0xbd, 0x20, 0x74]), + dataFromString(StringFormat.RAW, str).data + ); }); - it("Encodes raw strings with missing high surrogates correctly", () => { + it('Encodes raw strings with missing high surrogates correctly', () => { const str = 'aa\udc3d t'; assertUint8ArrayEquals( - new Uint8Array([0x61, 0x61, 0xEF, 0xBF, 0xBD, 0x20, 0x74]), - dataFromString(StringFormat.RAW, str).data); + new Uint8Array([0x61, 0x61, 0xef, 0xbf, 0xbd, 0x20, 0x74]), + dataFromString(StringFormat.RAW, str).data + ); }); - it("Encodes base64 strings correctly", () => { + it('Encodes base64 strings correctly', () => { const str = 'CpYlM1+XsGxTd1n6izHMU/yY3Bw='; const base64Bytes = new Uint8Array([ - 0x0A, 0x96, 0x25, 0x33, 0x5F, 0x97, 0xB0, 0x6C, 0x53, 0x77, - 0x59, 0xFA, 0x8B, 0x31, 0xCC, 0x53, 0xFC, 0x98, 0xDC, 0x1C + 0x0a, + 0x96, + 0x25, + 0x33, + 0x5f, + 0x97, + 0xb0, + 0x6c, + 0x53, + 0x77, + 0x59, + 0xfa, + 0x8b, + 0x31, + 0xcc, + 0x53, + 0xfc, + 0x98, + 0xdc, + 0x1c ]); assertUint8ArrayEquals( - base64Bytes, - dataFromString(StringFormat.BASE64, str).data); + base64Bytes, + dataFromString(StringFormat.BASE64, str).data + ); }); - it("Encodes base64 strings without padding correctly", () => { + it('Encodes base64 strings without padding correctly', () => { const str = 'CpYlM1+XsGxTd1n6izHMU/yY3Bw'; const base64Bytes = new Uint8Array([ - 0x0A, 0x96, 0x25, 0x33, 0x5F, 0x97, 0xB0, 0x6C, 0x53, 0x77, - 0x59, 0xFA, 0x8B, 0x31, 0xCC, 0x53, 0xFC, 0x98, 0xDC, 0x1C + 0x0a, + 0x96, + 0x25, + 0x33, + 0x5f, + 0x97, + 0xb0, + 0x6c, + 0x53, + 0x77, + 0x59, + 0xfa, + 0x8b, + 0x31, + 0xcc, + 0x53, + 0xfc, + 0x98, + 0xdc, + 0x1c ]); assertUint8ArrayEquals( - base64Bytes, - dataFromString(StringFormat.BASE64, str).data); + base64Bytes, + dataFromString(StringFormat.BASE64, str).data + ); }); - it("Rejects invalid base64 strings", () => { + it('Rejects invalid base64 strings', () => { const str = 'CpYlM1-XsGxTd1n6izHMU_yY3Bw='; assertThrows(function() { dataFromString(StringFormat.BASE64, str); }, 'storage/invalid-format'); }); - it("Encodes base64url strings correctly", () => { + it('Encodes base64url strings correctly', () => { const str = 'CpYlM1-XsGxTd1n6izHMU_yY3Bw='; const base64Bytes = new Uint8Array([ - 0x0A, 0x96, 0x25, 0x33, 0x5F, 0x97, 0xB0, 0x6C, 0x53, 0x77, - 0x59, 0xFA, 0x8B, 0x31, 0xCC, 0x53, 0xFC, 0x98, 0xDC, 0x1C + 0x0a, + 0x96, + 0x25, + 0x33, + 0x5f, + 0x97, + 0xb0, + 0x6c, + 0x53, + 0x77, + 0x59, + 0xfa, + 0x8b, + 0x31, + 0xcc, + 0x53, + 0xfc, + 0x98, + 0xdc, + 0x1c ]); assertUint8ArrayEquals( - base64Bytes, - dataFromString(StringFormat.BASE64URL, str).data); + base64Bytes, + dataFromString(StringFormat.BASE64URL, str).data + ); }); - it("Encodes base64url strings without padding correctly", () => { + it('Encodes base64url strings without padding correctly', () => { const str = 'CpYlM1-XsGxTd1n6izHMU_yY3Bw'; const base64Bytes = new Uint8Array([ - 0x0A, 0x96, 0x25, 0x33, 0x5F, 0x97, 0xB0, 0x6C, 0x53, 0x77, - 0x59, 0xFA, 0x8B, 0x31, 0xCC, 0x53, 0xFC, 0x98, 0xDC, 0x1C + 0x0a, + 0x96, + 0x25, + 0x33, + 0x5f, + 0x97, + 0xb0, + 0x6c, + 0x53, + 0x77, + 0x59, + 0xfa, + 0x8b, + 0x31, + 0xcc, + 0x53, + 0xfc, + 0x98, + 0xdc, + 0x1c ]); assertUint8ArrayEquals( - base64Bytes, - dataFromString(StringFormat.BASE64URL, str).data); + base64Bytes, + dataFromString(StringFormat.BASE64URL, str).data + ); }); - it("Rejects invalid base64url strings", () => { + it('Rejects invalid base64url strings', () => { const str = 'CpYlM1+XsGxTd1n6izHMU/yY3Bw='; assertThrows(function() { dataFromString(StringFormat.BASE64URL, str); }, 'storage/invalid-format'); }); - it("Encodes base64 data URLs (including embedded content type and parameters) correctly", () => { + it('Encodes base64 data URLs (including embedded content type and parameters) correctly', () => { const str = 'data:image/png;param1=value;base64,aaaa'; const data = dataFromString(StringFormat.DATA_URL, str); - assertUint8ArrayEquals( - new Uint8Array([0x69, 0xA6, 0x9A]), data.data); + assertUint8ArrayEquals(new Uint8Array([0x69, 0xa6, 0x9a]), data.data); assert.equal(data.contentType, 'image/png;param1=value'); }); - it("Encodes non-base64 data URLs with no content type correctly", () => { + it('Encodes non-base64 data URLs with no content type correctly', () => { const str = 'data:,aaaa'; const data = dataFromString(StringFormat.DATA_URL, str); - assertUint8ArrayEquals( - new Uint8Array([0x61, 0x61, 0x61, 0x61]), data.data); + assertUint8ArrayEquals(new Uint8Array([0x61, 0x61, 0x61, 0x61]), data.data); assert.equal(data.contentType, null); }); - it("Encodes base64 data URLs with no content type correctly", () => { + it('Encodes base64 data URLs with no content type correctly', () => { const str = 'data:;base64,aaaa'; const data = dataFromString(StringFormat.DATA_URL, str); - assertUint8ArrayEquals( - new Uint8Array([0x69, 0xA6, 0x9A]), data.data); + assertUint8ArrayEquals(new Uint8Array([0x69, 0xa6, 0x9a]), data.data); assert.equal(data.contentType, null); }); - it("Encodes non-base64 data URLs with content type correctly", () => { + it('Encodes non-base64 data URLs with content type correctly', () => { const str = 'data:text/plain,arst'; const data = dataFromString(StringFormat.DATA_URL, str); - assertUint8ArrayEquals( - new Uint8Array([0x61, 0x72, 0x73, 0x74]), data.data); + assertUint8ArrayEquals(new Uint8Array([0x61, 0x72, 0x73, 0x74]), data.data); assert.equal(data.contentType, 'text/plain'); }); - it("Encodes non-base64 data URLs with URL-encoded text correctly", () => { + it('Encodes non-base64 data URLs with URL-encoded text correctly', () => { const str = 'data:,a%20data'; const data = dataFromString(StringFormat.DATA_URL, str); assertUint8ArrayEquals( - new Uint8Array([0x61, 0x20, 0x64, 0x61, 0x74, 0x61]), data.data); + new Uint8Array([0x61, 0x20, 0x64, 0x61, 0x74, 0x61]), + data.data + ); }); - it("Encodes non-base64 data URLs with URL-encoded non-BMP codepoints correctly", () => { + it('Encodes non-base64 data URLs with URL-encoded non-BMP codepoints correctly', () => { const str = 'data:,%F0%9F%98%8A%E6%86%82%E9%AC%B1'; const data = dataFromString(StringFormat.DATA_URL, str); assertUint8ArrayEquals( - new Uint8Array( - [0xF0, 0x9F, 0x98, 0x8A, 0xE6, 0x86, 0x82, 0xE9, 0xAC, 0xB1]), - data.data); + new Uint8Array([ + 0xf0, + 0x9f, + 0x98, + 0x8a, + 0xe6, + 0x86, + 0x82, + 0xe9, + 0xac, + 0xb1 + ]), + data.data + ); }); - it("Rejects data URLs with invalid URL encodings", () => { + it('Rejects data URLs with invalid URL encodings', () => { const str = 'data:,%%0'; assertThrows(function() { dataFromString(StringFormat.DATA_URL, str); }, 'storage/invalid-format'); }); - it("Rejects data URLs with invalid URL-encoded byte sequences", () => { + it('Rejects data URLs with invalid URL-encoded byte sequences', () => { const str = 'data:,%80%80%80'; assertThrows(function() { dataFromString(StringFormat.DATA_URL, str); }, 'storage/invalid-format'); }); - it("Rejects data URLs with an invalid format", () => { + it('Rejects data URLs with an invalid format', () => { const str = 'dateeeep:,invalid'; assertThrows(function() { dataFromString(StringFormat.DATA_URL, str); diff --git a/tests/storage/browser/task_test.ts b/tests/storage/browser/task_test.ts index a0d429c82f3..ceefed03b96 100644 --- a/tests/storage/browser/task_test.ts +++ b/tests/storage/browser/task_test.ts @@ -13,22 +13,25 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -import {assert} from 'chai'; +import { assert } from 'chai'; import * as arrayUtils from '../../../src/storage/implementation/array'; -import {AuthWrapper} from '../../../src/storage/implementation/authwrapper'; -import {FbsBlob} from '../../../src/storage/implementation/blob'; -import {Location} from '../../../src/storage/implementation/location'; -import {getMappings} from '../../../src/storage/implementation/metadata'; -import {Unsubscribe} from '../../../src/storage/implementation/observer'; +import { AuthWrapper } from '../../../src/storage/implementation/authwrapper'; +import { FbsBlob } from '../../../src/storage/implementation/blob'; +import { Location } from '../../../src/storage/implementation/location'; +import { getMappings } from '../../../src/storage/implementation/metadata'; +import { Unsubscribe } from '../../../src/storage/implementation/observer'; import * as fbsPromise from '../../../src/storage/implementation/promise_external'; -import {makeRequest} from '../../../src/storage/implementation/request'; -import {TaskEvent, TaskState} from '../../../src/storage/implementation/taskenums'; -import {Headers} from '../../../src/storage/implementation/xhrio'; -import {Reference} from '../../../src/storage/reference'; -import {Service} from '../../../src/storage/service'; -import {UploadTask} from '../../../src/storage/task'; -import {assertThrows, bind as fbsBind, makePool} from './testshared'; -import {StringHeaders, TestingXhrIo} from './xhrio'; +import { makeRequest } from '../../../src/storage/implementation/request'; +import { + TaskEvent, + TaskState +} from '../../../src/storage/implementation/taskenums'; +import { Headers } from '../../../src/storage/implementation/xhrio'; +import { Reference } from '../../../src/storage/reference'; +import { Service } from '../../../src/storage/service'; +import { UploadTask } from '../../../src/storage/task'; +import { assertThrows, bind as fbsBind, makePool } from './testshared'; +import { StringHeaders, TestingXhrIo } from './xhrio'; const testLocation = new Location('bucket', 'object'); const smallBlob = new FbsBlob(new Blob(['a'])); @@ -39,102 +42,140 @@ const mappings = getMappings(); const fakeMetadata = '{ "downloadTokens": "a,b" }'; type Response = { - status: number, - body: string, - headers: StringHeaders + status: number; + body: string; + headers: StringHeaders; }; -type RequestHandler = (url: string, method: string, body?: ArrayBufferView|Blob|string, headers?: Headers) => Response; +type RequestHandler = ( + url: string, + method: string, + body?: ArrayBufferView | Blob | string, + headers?: Headers +) => Response; function authWrapperWithHandler(handler: RequestHandler): AuthWrapper { - function newSend(xhrio: TestingXhrIo, url: string, method: string, body?: ArrayBufferView|Blob|string, headers?: Headers): void { + function newSend( + xhrio: TestingXhrIo, + url: string, + method: string, + body?: ArrayBufferView | Blob | string, + headers?: Headers + ): void { const response = handler(url, method, body, headers); xhrio.simulateResponse(response.status, response.body, response.headers); } - return new AuthWrapper(null, (_1, _2) => { return {} as Reference; }, makeRequest, {} as Service, makePool(newSend)); + return new AuthWrapper( + null, + (_1, _2) => { + return {} as Reference; + }, + makeRequest, + {} as Service, + makePool(newSend) + ); } function fakeServerHandler(): RequestHandler { const stats: { [num: number]: { - currentSize: number, - finalSize: number - } + currentSize: number; + finalSize: number; + }; } = {}; let nextId: number = 0; - function statusHeaders(status: string, opt_existing?: StringHeaders): StringHeaders { + function statusHeaders( + status: string, + opt_existing?: StringHeaders + ): StringHeaders { if (opt_existing) { opt_existing['X-Goog-Upload-Status'] = status; return opt_existing; } else { - return {'X-Goog-Upload-Status': status}; + return { 'X-Goog-Upload-Status': status }; } } - function handler(url: string, method: string, content?: ArrayBufferView|Blob|string, headers?: Headers): Response { + function handler( + url: string, + method: string, + content?: ArrayBufferView | Blob | string, + headers?: Headers + ): Response { method = method || 'GET'; content = content || ''; headers = headers || {}; if (headers['X-Goog-Upload-Protocol'] === 'multipart') { - return {status: 200, body: fakeMetadata, headers: statusHeaders('final')}; + return { + status: 200, + body: fakeMetadata, + headers: statusHeaders('final') + }; } - const contentLength = (content as Blob).size || (content as string).length || 0; + const contentLength = + (content as Blob).size || (content as string).length || 0; if (headers['X-Goog-Upload-Protocol'] === 'resumable') { const thisId = nextId; nextId++; stats[thisId] = { currentSize: 0, - finalSize: +(headers['X-Goog-Upload-Header-Content-Length']), + finalSize: +headers['X-Goog-Upload-Header-Content-Length'] }; return { status: 200, body: '', - headers: statusHeaders( - 'active', {'X-Goog-Upload-URL': 'http://example.com?' + thisId}) + headers: statusHeaders('active', { + 'X-Goog-Upload-URL': 'http://example.com?' + thisId + }) }; } const matches = url.match(/^http:\/\/example\.com\?([0-9]+)$/); if (matches === null) { - return {status: 400, body: '', headers: {}}; + return { status: 400, body: '', headers: {} }; } - const id = +(matches[1]); + const id = +matches[1]; if (!stats[id]) { - return {status: 400, body: 'Invalid upload id', headers: {}}; + return { status: 400, body: 'Invalid upload id', headers: {} }; } if (headers['X-Goog-Upload-Command'] === 'query') { return { status: 200, body: '', - headers: statusHeaders( - 'active', {'X-Goog-Upload-Size-Received': stats[id].currentSize.toString()}) + headers: statusHeaders('active', { + 'X-Goog-Upload-Size-Received': stats[id].currentSize.toString() + }) }; } - const commands = (headers['X-Goog-Upload-Command'] as string).split(',').map(str => { return str.trim(); }); + const commands = (headers['X-Goog-Upload-Command'] as string) + .split(',') + .map(str => { + return str.trim(); + }); const isUpload = arrayUtils.contains(commands, 'upload'); const isFinalize = arrayUtils.contains(commands, 'finalize'); const stat = stats[id]; if (isUpload) { - const offset = +(headers['X-Goog-Upload-Offset']); + const offset = +headers['X-Goog-Upload-Offset']; if (offset !== stat.currentSize) { - return {status: 400, body: 'Uploading at wrong offset', headers: {}}; + return { status: 400, body: 'Uploading at wrong offset', headers: {} }; } stat.currentSize += contentLength; if (stat.currentSize > stat.finalSize) { - return {status: 400, body: 'Too many bytes', headers: {}}; + return { status: 400, body: 'Too many bytes', headers: {} }; } else if (!isFinalize) { - return {status: 200, body: '', headers: statusHeaders('active')}; + return { status: 200, body: '', headers: statusHeaders('active') }; } } @@ -155,45 +196,77 @@ function fakeServerHandler(): RequestHandler { } } - return {status: 400, body: '', headers: {}}; + return { status: 400, body: '', headers: {} }; } return handler; } -describe("Firebase Storage > Upload Task", () => { - it("Works for a small upload w/ an observer", () => { +describe('Firebase Storage > Upload Task', () => { + it('Works for a small upload w/ an observer', () => { const authWrapper = authWrapperWithHandler(fakeServerHandler()); const task = new UploadTask( - {} as Reference, authWrapper, testLocation, mappings, smallBlob); - return fbsPromise.make((resolve, reject) => { - task.on( - TaskEvent.STATE_CHANGED, null, - error => { assert.fail('Unexpected upload failure'); }, - () => { resolve(null); }); + {} as Reference, + authWrapper, + testLocation, + mappings, + smallBlob + ); + return fbsPromise.make((resolve, reject) => { + task.on( + TaskEvent.STATE_CHANGED, + null, + error => { + assert.fail('Unexpected upload failure'); + }, + () => { + resolve(null); + } + ); }); }); - it("Works for a small upload w/ a promise", () => { + it('Works for a small upload w/ a promise', () => { const authWrapper = authWrapperWithHandler(fakeServerHandler()); const task = new UploadTask( - {} as Reference, authWrapper, testLocation, mappings, smallBlob); + {} as Reference, + authWrapper, + testLocation, + mappings, + smallBlob + ); return task.then(snapshot => { assert.equal(snapshot.totalBytes, smallBlob.size()); }); }); - it("Works for a small upload canceled w/ a promise", () => { + it('Works for a small upload canceled w/ a promise', () => { const authWrapper = authWrapperWithHandler(fakeServerHandler()); const task = new UploadTask( - {} as Reference, authWrapper, testLocation, mappings, smallBlob); + {} as Reference, + authWrapper, + testLocation, + mappings, + smallBlob + ); const promise: Promise = task.then( - snapshot => { assert.fail('task completed, but should have failed'); return null; }, - err => { return 'Task failed as expected'; }); + snapshot => { + assert.fail('task completed, but should have failed'); + return null; + }, + err => { + return 'Task failed as expected'; + } + ); task.cancel(); return promise; }); - it("Works properly with multiple observers", () => { + it('Works properly with multiple observers', () => { const authWrapper = authWrapperWithHandler(fakeServerHandler()); const task = new UploadTask( - {} as Reference, authWrapper, testLocation, mappings, smallBlob); + {} as Reference, + authWrapper, + testLocation, + mappings, + smallBlob + ); let badComplete = false; const h1: Unsubscribe = task.on(TaskEvent.STATE_CHANGED, null, null, () => { @@ -208,13 +281,20 @@ describe("Firebase Storage > Upload Task", () => { // This one will get executed immediately const h3: Unsubscribe = (() => { let lastState; - return task.on(TaskEvent.STATE_CHANGED, snapshot => { - if (lastState !== TaskState.RUNNING && - snapshot.state === TaskState.RUNNING) { - resumed++; - } - lastState = snapshot.state; - }, null, null); + return task.on( + TaskEvent.STATE_CHANGED, + snapshot => { + if ( + lastState !== TaskState.RUNNING && + snapshot.state === TaskState.RUNNING + ) { + resumed++; + } + lastState = snapshot.state; + }, + null, + null + ); })() as Unsubscribe; h1(); @@ -222,32 +302,49 @@ describe("Firebase Storage > Upload Task", () => { return fbsPromise.make((resolve, reject) => { task.on( - TaskEvent.STATE_CHANGED, null, - error => { assert.fail('Upload failed'); }, - function() { - assert.isFalse(badComplete); - assert.equal(resumed, 1); - resolve(null); - }); + TaskEvent.STATE_CHANGED, + null, + error => { + assert.fail('Upload failed'); + }, + function() { + assert.isFalse(badComplete); + assert.equal(resumed, 1); + resolve(null); + } + ); }); }); it("Works properly with an observer missing the 'next' method", () => { const authWrapper = authWrapperWithHandler(fakeServerHandler()); const task = new UploadTask( - {} as Reference, authWrapper, testLocation, mappings, smallBlob); + {} as Reference, + authWrapper, + testLocation, + mappings, + smallBlob + ); return fbsPromise.make((resolve, reject) => { - task.on( - TaskEvent.STATE_CHANGED, { - error: err => { assert.fail('Unexpected upload failure'); }, - complete: () => { resolve(null); } - }); + task.on(TaskEvent.STATE_CHANGED, { + error: err => { + assert.fail('Unexpected upload failure'); + }, + complete: () => { + resolve(null); + } }); + }); }); function runNormalUploadTest(blob: FbsBlob): Promise { const authWrapper = authWrapperWithHandler(fakeServerHandler()); const task = new UploadTask( - {} as Reference, authWrapper, testLocation, mappings, blob); + {} as Reference, + authWrapper, + testLocation, + mappings, + blob + ); let resolve, reject; const promise = fbsPromise.make(function(innerResolve, innerReject) { @@ -283,32 +380,33 @@ describe("Firebase Storage > Upload Task", () => { function addCallbacks(task) { let lastState; task.on( - TaskEvent.STATE_CHANGED, - snapshot => { - fixedAssertEquals(complete, 0); - - const state = snapshot.state; - if (lastState !== TaskState.RUNNING && - state === TaskState.RUNNING) { - events.push('resume'); - } else if ( - lastState !== TaskState.PAUSED && - state === TaskState.PAUSED) { - events.push('pause'); - } - - const p = [snapshot.bytesTransferred, snapshot.totalBytes]; - progress.push(p); - - lastState = state; - }, - error => { - fixedAssertFail('upload failed'); - }, - () => { - events.push('complete'); - complete++; - }); + TaskEvent.STATE_CHANGED, + snapshot => { + fixedAssertEquals(complete, 0); + + const state = snapshot.state; + if (lastState !== TaskState.RUNNING && state === TaskState.RUNNING) { + events.push('resume'); + } else if ( + lastState !== TaskState.PAUSED && + state === TaskState.PAUSED + ) { + events.push('pause'); + } + + const p = [snapshot.bytesTransferred, snapshot.totalBytes]; + progress.push(p); + + lastState = state; + }, + error => { + fixedAssertFail('upload failed'); + }, + () => { + events.push('complete'); + complete++; + } + ); } addCallbacks(task); @@ -316,8 +414,7 @@ describe("Firebase Storage > Upload Task", () => { let lastState; task.on(TaskEvent.STATE_CHANGED, function(snapshot) { const state = snapshot.state; - if (lastState !== TaskState.PAUSED && - state === TaskState.PAUSED) { + if (lastState !== TaskState.PAUSED && state === TaskState.PAUSED) { events.push('timeout'); setTimeout(function() { task.resume(); @@ -347,7 +444,7 @@ describe("Firebase Storage > Upload Task", () => { for (let i = 0; i < progress.length - 1; i++) { increasing = increasing && progress[i][0] <= progress[i + 1][0]; allTotalsTheSame = - allTotalsTheSame && progress[i][1] === progress[i + 1][1]; + allTotalsTheSame && progress[i][1] === progress[i + 1][1]; } let lastIsAll = false; @@ -363,33 +460,44 @@ describe("Firebase Storage > Upload Task", () => { fixedAssertTrue(lastIsAll); const task2 = new UploadTask( - {} as Reference, authWrapper, testLocation, mappings, blob); + {} as Reference, + authWrapper, + testLocation, + mappings, + blob + ); const events2 = []; (function() { let lastState; task2.on( - TaskEvent.STATE_CHANGED, - snapshot => { - const state = snapshot.state; - if (lastState !== TaskState.RUNNING && - state === TaskState.RUNNING) { - events2.push('resume'); - } else if ( - lastState !== TaskState.PAUSED && - state === TaskState.PAUSED) { - events2.push('pause'); - } - lastState = state; - }, error => { - events2.push('failure'); - fixedAssertEquals(events2.length, 2); - fixedAssertEquals(events2[0], 'resume'); - fixedAssertEquals(events2[1], 'failure'); - resolve(null); - }, () => { - fixedAssertFail('Completed when we should have canceled'); - }); + TaskEvent.STATE_CHANGED, + snapshot => { + const state = snapshot.state; + if ( + lastState !== TaskState.RUNNING && + state === TaskState.RUNNING + ) { + events2.push('resume'); + } else if ( + lastState !== TaskState.PAUSED && + state === TaskState.PAUSED + ) { + events2.push('pause'); + } + lastState = state; + }, + error => { + events2.push('failure'); + fixedAssertEquals(events2.length, 2); + fixedAssertEquals(events2[0], 'resume'); + fixedAssertEquals(events2[1], 'failure'); + resolve(null); + }, + () => { + fixedAssertFail('Completed when we should have canceled'); + } + ); })(); task2.cancel(); }); @@ -399,87 +507,105 @@ describe("Firebase Storage > Upload Task", () => { return promise; } - it("Calls callback sequences for small uploads correctly", () => { + it('Calls callback sequences for small uploads correctly', () => { return runNormalUploadTest(smallBlob); }); - it("Calls callback sequences for big uploads correctly", () => { + it('Calls callback sequences for big uploads correctly', () => { return runNormalUploadTest(bigBlob); }); - describe("Argument verification", () => { + describe('Argument verification', () => { const authWrapper = authWrapperWithHandler(fakeServerHandler()); const task = new UploadTask( - {} as Reference, authWrapper, testLocation, mappings, smallBlob); - describe("on", () => { - it("Throws on no args", () => { - assertThrows( - fbsBind(task.on, task), 'storage/invalid-argument-count'); - }); - it("Throws on 5 args", () => { - assertThrows(fbsBind( - task.on, task, TaskEvent.STATE_CHANGED, null, null, null, 1), - 'storage/invalid-argument-count'); + {} as Reference, + authWrapper, + testLocation, + mappings, + smallBlob + ); + describe('on', () => { + it('Throws on no args', () => { + assertThrows(fbsBind(task.on, task), 'storage/invalid-argument-count'); }); - it("Throws on a single string arg", () => { + it('Throws on 5 args', () => { assertThrows( - fbsBind(task.on, task, '3'), 'storage/invalid-argument'); + fbsBind(task.on, task, TaskEvent.STATE_CHANGED, null, null, null, 1), + 'storage/invalid-argument-count' + ); }); - it("Throws on a single null arg", () => { - assertThrows( - fbsBind(task.on, task, null), 'storage/invalid-argument'); + it('Throws on a single string arg', () => { + assertThrows(fbsBind(task.on, task, '3'), 'storage/invalid-argument'); + }); + it('Throws on a single null arg', () => { + assertThrows(fbsBind(task.on, task, null), 'storage/invalid-argument'); }); - it("Throws on a number arg instead of a function", () => { + it('Throws on a number arg instead of a function', () => { assertThrows( - fbsBind(task.on, task, TaskEvent.STATE_CHANGED, null, null, 3), - 'storage/invalid-argument'); + fbsBind(task.on, task, TaskEvent.STATE_CHANGED, null, null, 3), + 'storage/invalid-argument' + ); }); - it("Throws on an empty object arg", () => { + it('Throws on an empty object arg', () => { assertThrows( - fbsBind(task.on, task, TaskEvent.STATE_CHANGED, {}), - 'storage/invalid-argument'); + fbsBind(task.on, task, TaskEvent.STATE_CHANGED, {}), + 'storage/invalid-argument' + ); }); }); - describe("subscribe returned from on", () => { - it("Throws on no args", () => { - assertThrows(fbsBind(task.on(TaskEvent.STATE_CHANGED), null), - 'storage/invalid-argument-count'); + describe('subscribe returned from on', () => { + it('Throws on no args', () => { + assertThrows( + fbsBind(task.on(TaskEvent.STATE_CHANGED), null), + 'storage/invalid-argument-count' + ); }); - it("Throws on 4 args", () => { - assertThrows(fbsBind(task.on(TaskEvent.STATE_CHANGED), null, null, null, null, 1), - 'storage/invalid-argument-count'); - + it('Throws on 4 args', () => { + assertThrows( + fbsBind(task.on(TaskEvent.STATE_CHANGED), null, null, null, null, 1), + 'storage/invalid-argument-count' + ); }); - it("Throws number arg instead of function", () => { + it('Throws number arg instead of function', () => { assertThrows( - fbsBind(task.on(TaskEvent.STATE_CHANGED), null, null, null, 3), - 'storage/invalid-argument'); + fbsBind(task.on(TaskEvent.STATE_CHANGED), null, null, null, 3), + 'storage/invalid-argument' + ); }); - it("Throws on an empty object arg", () => { + it('Throws on an empty object arg', () => { assertThrows( - fbsBind(task.on(TaskEvent.STATE_CHANGED), null, {}), - 'storage/invalid-argument'); + fbsBind(task.on(TaskEvent.STATE_CHANGED), null, {}), + 'storage/invalid-argument' + ); }); - it("Throws on a single null arg", () => { + it('Throws on a single null arg', () => { assertThrows( - fbsBind(task.on(TaskEvent.STATE_CHANGED), null, null), - 'storage/invalid-argument'); + fbsBind(task.on(TaskEvent.STATE_CHANGED), null, null), + 'storage/invalid-argument' + ); }); }); - describe("resume", () => { - it("Throws on a number", () => { - assertThrows(fbsBind(task.resume, task, 3), 'storage/invalid-argument-count'); + describe('resume', () => { + it('Throws on a number', () => { + assertThrows( + fbsBind(task.resume, task, 3), + 'storage/invalid-argument-count' + ); }); }); - describe("pause", () => { - it("Throws on a number", () => { + describe('pause', () => { + it('Throws on a number', () => { assertThrows( - fbsBind(task.pause, task, 3), 'storage/invalid-argument-count'); + fbsBind(task.pause, task, 3), + 'storage/invalid-argument-count' + ); }); }); - describe("cancel", () => { - it("Throws on a number", () => { + describe('cancel', () => { + it('Throws on a number', () => { assertThrows( - fbsBind(task.cancel, task, 3), 'storage/invalid-argument-count'); + fbsBind(task.cancel, task, 3), + 'storage/invalid-argument-count' + ); }); }); }); diff --git a/tests/storage/browser/testshared.ts b/tests/storage/browser/testshared.ts index 6b2f7ede87d..cfc44e4d38a 100644 --- a/tests/storage/browser/testshared.ts +++ b/tests/storage/browser/testshared.ts @@ -13,33 +13,41 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -import {assert} from 'chai'; -import {FirebaseApp} from '../../../src/app/firebase_app'; +import { assert } from 'chai'; +import { FirebaseApp } from '../../../src/app/firebase_app'; import * as constants from '../../../src/storage/implementation/constants'; -import {Code, FirebaseStorageError} from '../../../src/storage/implementation/error'; +import { + Code, + FirebaseStorageError +} from '../../../src/storage/implementation/error'; import * as objectUtils from '../../../src/storage/implementation/object'; import * as promiseimpl from '../../../src/storage/implementation/promise_external'; import * as type from '../../../src/storage/implementation/type'; -import {Headers, XhrIo} from '../../../src/storage/implementation/xhrio'; -import {XhrIoPool} from '../../../src/storage/implementation/xhriopool'; -import {SendHook, StringHeaders, TestingXhrIo} from './xhrio'; +import { Headers, XhrIo } from '../../../src/storage/implementation/xhrio'; +import { XhrIoPool } from '../../../src/storage/implementation/xhriopool'; +import { SendHook, StringHeaders, TestingXhrIo } from './xhrio'; export const authToken = 'totally-legit-auth-token'; export const bucket = 'mybucket'; -export const fakeApp = makeFakeApp({'accessToken': authToken}); +export const fakeApp = makeFakeApp({ accessToken: authToken }); export const fakeAppNoAuth = makeFakeApp(null); -export function makeFakeApp(token: Object|null, bucket_arg?: string): FirebaseApp { +export function makeFakeApp( + token: Object | null, + bucket_arg?: string +): FirebaseApp { const app: any = {}; app.INTERNAL = {}; - app.INTERNAL.getToken = function() { return promiseimpl.resolve(token); }; + app.INTERNAL.getToken = function() { + return promiseimpl.resolve(token); + }; app.options = {}; if (type.isDef(bucket_arg)) { app.options[constants.configOption] = bucket_arg; } else { app.options[constants.configOption] = bucket; } - return (app as FirebaseApp); + return app as FirebaseApp; } export function makePool(sendHook: SendHook): XhrIoPool { @@ -48,7 +56,7 @@ export function makePool(sendHook: SendHook): XhrIoPool { return new TestingXhrIo(sendHook); } }; - return (pool as XhrIoPool); + return pool as XhrIoPool; } /** @@ -57,7 +65,7 @@ export function makePool(sendHook: SendHook): XhrIoPool { */ export function fakeXhrIo(headers: Headers, status: number = 200): XhrIo { const lower: StringHeaders = {}; - objectUtils.forEach(headers, function(key: string, val: string|number) { + objectUtils.forEach(headers, function(key: string, val: string | number) { lower[key.toLowerCase()] = val.toString(); }); @@ -75,8 +83,8 @@ export function fakeXhrIo(headers: Headers, status: number = 200): XhrIo { } }; - return (fakeXhrIo as XhrIo); -}; + return fakeXhrIo as XhrIo; +} /** * Binds ignoring types. Used to test calls involving improper arguments. @@ -85,10 +93,10 @@ export function bind(f: Function, ctx: any, ...args: any[]): () => void { return () => { f.apply(ctx, args); }; -}; +} export function assertThrows(f: () => void, code: Code): FirebaseStorageError { - let captured: FirebaseStorageError|null = null; + let captured: FirebaseStorageError | null = null; assert.throws(() => { try { f(); @@ -109,7 +117,10 @@ export function assertUint8ArrayEquals(arr1: Uint8Array, arr2: Uint8Array) { } } -export function assertObjectIncludes(included: {[name: string]: any}, obj: {[name: string]: any}): void { +export function assertObjectIncludes( + included: { [name: string]: any }, + obj: { [name: string]: any } +): void { objectUtils.forEach(included, function(key, val) { assert.deepEqual(val, obj[key]); }); diff --git a/tests/storage/browser/xhrio.ts b/tests/storage/browser/xhrio.ts index ee40638991c..50c02e5feb8 100644 --- a/tests/storage/browser/xhrio.ts +++ b/tests/storage/browser/xhrio.ts @@ -13,12 +13,22 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -import {forEach} from '../../../src/storage/implementation/object'; +import { forEach } from '../../../src/storage/implementation/object'; import * as promiseimpl from '../../../src/storage/implementation/promise_external'; import * as type from '../../../src/storage/implementation/type'; -import {ErrorCode, Headers, XhrIo} from '../../../src/storage/implementation/xhrio'; - -export type SendHook = (xhrio: TestingXhrIo, url: string, method: string, body?: ArrayBufferView|Blob|string|null, headers?: Headers) => void; +import { + ErrorCode, + Headers, + XhrIo +} from '../../../src/storage/implementation/xhrio'; + +export type SendHook = ( + xhrio: TestingXhrIo, + url: string, + method: string, + body?: ArrayBufferView | Blob | string | null, + headers?: Headers +) => void; export enum State { START = 0, @@ -26,10 +36,9 @@ export enum State { DONE = 2 } -export type StringHeaders = {[name: string]: string}; +export type StringHeaders = { [name: string]: string }; export class TestingXhrIo implements XhrIo { - private state: State; private sendPromise: Promise; private resolve: (XhrIo) => void; @@ -41,8 +50,9 @@ export class TestingXhrIo implements XhrIo { constructor(sendHook: SendHook) { this.state = State.START; - this.sendPromise = - this.sendPromise = promiseimpl.make((resolve, reject) => { + this.sendPromise = this.sendPromise = promiseimpl.make< + XhrIo + >((resolve, reject) => { this.resolve = resolve; }); this.sendHook = sendHook; @@ -52,9 +62,14 @@ export class TestingXhrIo implements XhrIo { this.errorCode = ErrorCode.NO_ERROR; } - send(url: string, method: string, body?: ArrayBufferView|Blob|string|null, headers?: Headers): Promise { + send( + url: string, + method: string, + body?: ArrayBufferView | Blob | string | null, + headers?: Headers + ): Promise { if (this.state !== State.START) { - throw new Error('Can\'t send again'); + throw new Error("Can't send again"); } this.state = State.SENT; @@ -67,13 +82,13 @@ export class TestingXhrIo implements XhrIo { simulateResponse(status: number, body: string, headers: Headers) { if (this.state !== State.SENT) { - throw new Error('Can\'t simulate response before send/more than once'); + throw new Error("Can't simulate response before send/more than once"); } this.status = status; this.responseText = body; this.headers = {}; - forEach(headers, (key: string, val: string|number) => { + forEach(headers, (key: string, val: string | number) => { this.headers[key.toLowerCase()] = val.toString(); }); this.errorCode = ErrorCode.NO_ERROR; diff --git a/tests/utils/deep_copy.test.ts b/tests/utils/deep_copy.test.ts index 1c6425caed9..1d2491f3ab7 100644 --- a/tests/utils/deep_copy.test.ts +++ b/tests/utils/deep_copy.test.ts @@ -13,82 +13,93 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -import {assert} from 'chai'; -import {deepCopy, deepExtend} from '../../src/utils/deep_copy'; +import { assert } from 'chai'; +import { deepCopy, deepExtend } from '../../src/utils/deep_copy'; -describe("deepCopy()", () => { - it("Scalars", () => { +describe('deepCopy()', () => { + it('Scalars', () => { assert.strictEqual(deepCopy(true), true); assert.strictEqual(deepCopy(123), 123); assert.strictEqual(deepCopy('abc'), 'abc'); }); - it("Date", () => { + it('Date', () => { let d = new Date(); assert.deepEqual(deepCopy(d), d); }); - it("Object", () => { + it('Object', () => { assert.deepEqual(deepCopy({}), {}); - assert.deepEqual(deepCopy({a: 123}), {a: 123}); - assert.deepEqual(deepCopy({a: {b: 123}}), {a: {b: 123}}); + assert.deepEqual(deepCopy({ a: 123 }), { a: 123 }); + assert.deepEqual(deepCopy({ a: { b: 123 } }), { a: { b: 123 } }); }); - it("Array", () => { + it('Array', () => { assert.deepEqual(deepCopy([]), []); assert.deepEqual(deepCopy([123, 456]), [123, 456]); assert.deepEqual(deepCopy([123, [456]]), [123, [456]]); }); }); -describe("deepExtend", () => { - it("Scalars", () => { +describe('deepExtend', () => { + it('Scalars', () => { assert.strictEqual(deepExtend(1, true), true); assert.strictEqual(deepExtend(undefined, 123), 123); assert.strictEqual(deepExtend('was', 'abc'), 'abc'); }); - it("Date", () => { + it('Date', () => { let d = new Date(); assert.deepEqual(deepExtend(new Date(), d), d); }); - it("Object", () => { - assert.deepEqual(deepExtend({old: 123}, {}), {old: 123}); - assert.deepEqual(deepExtend({old: 123}, {s: 'hello'}), - {old: 123, s: 'hello'}); - assert.deepEqual(deepExtend({old: 123, a: {c: 'in-old'}}, - {a: {b: 123}}), - {old: 123, a: {b: 123, c: 'in-old'}}); + it('Object', () => { + assert.deepEqual(deepExtend({ old: 123 }, {}), { old: 123 }); + assert.deepEqual(deepExtend({ old: 123 }, { s: 'hello' }), { + old: 123, + s: 'hello' + }); + assert.deepEqual( + deepExtend({ old: 123, a: { c: 'in-old' } }, { a: { b: 123 } }), + { old: 123, a: { b: 123, c: 'in-old' } } + ); }); - it("Array", () => { + it('Array', () => { assert.deepEqual(deepExtend([1], []), []); assert.deepEqual(deepExtend([1], [123, 456]), [123, 456]); assert.deepEqual(deepExtend([1], [123, [456]]), [123, [456]]); }); - it("Array is copied - not referenced", () => { - let o1 = {a: [1]}; - let o2 = {a: [2]}; + it('Array is copied - not referenced', () => { + let o1 = { a: [1] }; + let o2 = { a: [2] }; - assert.deepEqual(deepExtend(o1, o2), {a: [2]}); + assert.deepEqual(deepExtend(o1, o2), { a: [2] }); o2.a.push(3); - assert.deepEqual(o1, {a: [2]}); + assert.deepEqual(o1, { a: [2] }); }); - it("Array with undefined elements", () => { + it('Array with undefined elements', () => { let a: any = []; - a[3] = "3"; + a[3] = '3'; let b = deepExtend(undefined, a); - assert.deepEqual(b, [,,,"3"]); + assert.deepEqual(b, [, , , '3']); }); - it("Function", () => { - let source:any = () => {/*_*/}; - let target:any = deepExtend({a: () => {/*_*/}}, - {a: source}); - assert.deepEqual({a: source}, target); + it('Function', () => { + let source: any = () => { + /*_*/ + }; + let target: any = deepExtend( + { + a: () => { + /*_*/ + } + }, + { a: source } + ); + assert.deepEqual({ a: source }, target); assert.strictEqual(source, target.a); }); });